Darknet/YOLO v3.0-177-gfa2353b
Object Detection Framework
 
Loading...
Searching...
No Matches
activations.cpp File Reference
Include dependency graph for activations.cpp:

Functions

float activate (float x, ACTIVATION a)
 
void activate_array (float *x, const int n, const ACTIVATION a)
 
void activate_array_hard_mish (float *x, const int n, float *activation_input, float *output)
 
void activate_array_mish (float *x, const int n, float *activation_input, float *output)
 
void activate_array_normalize_channels (float *x, const int n, int batch, int channels, int wh_step, float *output)
 
void activate_array_normalize_channels_softmax (float *x, const int n, int batch, int channels, int wh_step, float *output, int use_max_val)
 
void activate_array_swish (float *x, const int n, float *output_sigmoid, float *output)
 
ACTIVATION get_activation (char *s)
 
const char * get_activation_string (ACTIVATION a)
 
float gradient (float x, ACTIVATION a)
 
void gradient_array (const float *x, const int n, const ACTIVATION a, float *delta)
 
void gradient_array_hard_mish (const int n, const float *activation_input, float *delta)
 
void gradient_array_mish (const int n, const float *activation_input, float *delta)
 
void gradient_array_normalize_channels (float *x, const int n, int batch, int channels, int wh_step, float *delta)
 
void gradient_array_normalize_channels_softmax (float *x, const int n, int batch, int channels, int wh_step, float *delta)
 
void gradient_array_swish (const float *x, const int n, const float *sigmoid, float *delta)
 
static float hard_mish_yashas (float x)
 
static float hard_mish_yashas_grad (float x)
 

Function Documentation

◆ activate()

float activate ( float  x,
ACTIVATION  a 
)
Todo:
V3 Why were some activations missing? Was that intentional?
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array()

void activate_array ( float *  x,
const int  n,
const ACTIVATION  a 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_hard_mish()

void activate_array_hard_mish ( float *  x,
const int  n,
float *  activation_input,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_mish()

void activate_array_mish ( float *  x,
const int  n,
float *  activation_input,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_normalize_channels()

void activate_array_normalize_channels ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  output 
)
Here is the caller graph for this function:

◆ activate_array_normalize_channels_softmax()

void activate_array_normalize_channels_softmax ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  output,
int  use_max_val 
)
Here is the caller graph for this function:

◆ activate_array_swish()

void activate_array_swish ( float *  x,
const int  n,
float *  output_sigmoid,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ get_activation()

ACTIVATION get_activation ( char *  s)

◆ get_activation_string()

const char * get_activation_string ( ACTIVATION  a)

◆ gradient()

float gradient ( float  x,
ACTIVATION  a 
)
Todo:
V3 why were these 3 missed?
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array()

void gradient_array ( const float *  x,
const int  n,
const ACTIVATION  a,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_hard_mish()

void gradient_array_hard_mish ( const int  n,
const float *  activation_input,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_mish()

void gradient_array_mish ( const int  n,
const float *  activation_input,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_normalize_channels()

void gradient_array_normalize_channels ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta 
)
Here is the caller graph for this function:

◆ gradient_array_normalize_channels_softmax()

void gradient_array_normalize_channels_softmax ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta 
)
Here is the caller graph for this function:

◆ gradient_array_swish()

void gradient_array_swish ( const float *  x,
const int  n,
const float *  sigmoid,
float *  delta 
)
Here is the caller graph for this function:

◆ hard_mish_yashas()

static float hard_mish_yashas ( float  x)
static
Here is the caller graph for this function:

◆ hard_mish_yashas_grad()

static float hard_mish_yashas_grad ( float  x)
static
Here is the caller graph for this function: