Darknet/YOLO v3.0-177-gfa2353b
Object Detection Framework
 
Loading...
Searching...
No Matches
activations.hpp File Reference
Include dependency graph for activations.hpp:
This graph shows which files directly or indirectly include this file:

Functions

float activate (float x, ACTIVATION a)
 
void activate_array (float *x, const int n, const ACTIVATION a)
 
void activate_array_hard_mish (float *x, const int n, float *activation_input, float *output)
 
void activate_array_hard_mish_ongpu (float *x, int n, float *activation_input_gpu, float *output_gpu)
 
void activate_array_mish (float *x, const int n, float *activation_input, float *output)
 
void activate_array_mish_ongpu (float *x, int n, float *activation_input_gpu, float *output_gpu)
 
void activate_array_normalize_channels (float *x, const int n, int batch, int channels, int wh_step, float *output)
 
void activate_array_normalize_channels_ongpu (float *x, int n, int batch, int channels, int wh_step, float *output_gpu)
 
void activate_array_normalize_channels_softmax (float *x, const int n, int batch, int channels, int wh_step, float *output, int use_max_val)
 
void activate_array_normalize_channels_softmax_ongpu (float *x, int n, int batch, int channels, int wh_step, float *output_gpu, int use_max_val)
 
void activate_array_ongpu (float *x, int n, ACTIVATION a)
 
void activate_array_swish (float *x, const int n, float *output_sigmoid, float *output)
 
void activate_array_swish_ongpu (float *x, int n, float *output_sigmoid_gpu, float *output_gpu)
 
static float elu_activate (float x)
 
static float elu_gradient (float x)
 
static float gelu_activate (float x)
 
static float gelu_gradient (float x)
 
ACTIVATION get_activation (char *s)
 
const char * get_activation_string (ACTIVATION a)
 
float gradient (float x, ACTIVATION a)
 
void gradient_array (const float *x, const int n, const ACTIVATION a, float *delta)
 
void gradient_array_hard_mish (const int n, const float *activation_input, float *delta)
 
void gradient_array_hard_mish_ongpu (int n, float *activation_input_gpu, float *delta)
 
void gradient_array_mish (const int n, const float *activation_input, float *delta)
 
void gradient_array_mish_ongpu (int n, float *activation_input_gpu, float *delta)
 
void gradient_array_normalize_channels (float *x, const int n, int batch, int channels, int wh_step, float *delta)
 
void gradient_array_normalize_channels_ongpu (float *output_gpu, int n, int batch, int channels, int wh_step, float *delta_gpu)
 
void gradient_array_normalize_channels_softmax (float *x, const int n, int batch, int channels, int wh_step, float *delta)
 
void gradient_array_normalize_channels_softmax_ongpu (float *output_gpu, int n, int batch, int channels, int wh_step, float *delta_gpu)
 
void gradient_array_ongpu (float *x, int n, ACTIVATION a, float *delta)
 
void gradient_array_swish (const float *x, const int n, const float *sigmoid, float *delta)
 
void gradient_array_swish_ongpu (float *x, int n, float *sigmoid_gpu, float *delta)
 
static float hardtan_activate (float x)
 
static float hardtan_gradient (float x)
 
static float leaky_activate (float x)
 
static float leaky_gradient (float x)
 
static float lhtan_activate (float x)
 
static float lhtan_gradient (float x)
 
static float linear_activate (float x)
 
static float linear_gradient (float x)
 
static float loggy_activate (float x)
 
static float loggy_gradient (float x)
 
static float logistic_activate (float x)
 
static float logistic_gradient (float x)
 
static float plse_activate (float x)
 
static float plse_gradient (float x)
 
static float ramp_activate (float x)
 
static float ramp_gradient (float x)
 
static float relie_activate (float x)
 
static float relie_gradient (float x)
 
static float relu6_activate (float x)
 
static float relu6_gradient (float x)
 
static float relu_activate (float x)
 
static float relu_gradient (float x)
 
static float sech (float x)
 
static float selu_activate (float x)
 
static float selu_gradient (float x)
 
static float softplus_activate (float x, float threshold)
 
static float stair_activate (float x)
 
static float stair_gradient (float x)
 
static float tanh_activate (float x)
 
static float tanh_gradient (float x)
 

Function Documentation

◆ activate()

float activate ( float  x,
ACTIVATION  a 
)
Todo:
V3 Why were some activations missing? Was that intentional?
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array()

void activate_array ( float *  x,
const int  n,
const ACTIVATION  a 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_hard_mish()

void activate_array_hard_mish ( float *  x,
const int  n,
float *  activation_input,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_hard_mish_ongpu()

void activate_array_hard_mish_ongpu ( float *  x,
int  n,
float *  activation_input_gpu,
float *  output_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_mish()

void activate_array_mish ( float *  x,
const int  n,
float *  activation_input,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_mish_ongpu()

void activate_array_mish_ongpu ( float *  x,
int  n,
float *  activation_input_gpu,
float *  output_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_normalize_channels()

void activate_array_normalize_channels ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  output 
)
Here is the caller graph for this function:

◆ activate_array_normalize_channels_ongpu()

void activate_array_normalize_channels_ongpu ( float *  x,
int  n,
int  batch,
int  channels,
int  wh_step,
float *  output_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_normalize_channels_softmax()

void activate_array_normalize_channels_softmax ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  output,
int  use_max_val 
)
Here is the caller graph for this function:

◆ activate_array_normalize_channels_softmax_ongpu()

void activate_array_normalize_channels_softmax_ongpu ( float *  x,
int  n,
int  batch,
int  channels,
int  wh_step,
float *  output_gpu,
int  use_max_val 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_ongpu()

void activate_array_ongpu ( float *  x,
int  n,
ACTIVATION  a 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_swish()

void activate_array_swish ( float *  x,
const int  n,
float *  output_sigmoid,
float *  output 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ activate_array_swish_ongpu()

void activate_array_swish_ongpu ( float *  x,
int  n,
float *  output_sigmoid_gpu,
float *  output_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ elu_activate()

static float elu_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ elu_gradient()

static float elu_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ gelu_activate()

static float gelu_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ gelu_gradient()

static float gelu_gradient ( float  x)
inlinestatic
Here is the call graph for this function:
Here is the caller graph for this function:

◆ get_activation()

ACTIVATION get_activation ( char *  s)

◆ get_activation_string()

const char * get_activation_string ( ACTIVATION  a)

◆ gradient()

float gradient ( float  x,
ACTIVATION  a 
)
Todo:
V3 why were these 3 missed?
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array()

void gradient_array ( const float *  x,
const int  n,
const ACTIVATION  a,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_hard_mish()

void gradient_array_hard_mish ( const int  n,
const float *  activation_input,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_hard_mish_ongpu()

void gradient_array_hard_mish_ongpu ( int  n,
float *  activation_input_gpu,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_mish()

void gradient_array_mish ( const int  n,
const float *  activation_input,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_mish_ongpu()

void gradient_array_mish_ongpu ( int  n,
float *  activation_input_gpu,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_normalize_channels()

void gradient_array_normalize_channels ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta 
)
Here is the caller graph for this function:

◆ gradient_array_normalize_channels_ongpu()

void gradient_array_normalize_channels_ongpu ( float *  output_gpu,
int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_normalize_channels_softmax()

void gradient_array_normalize_channels_softmax ( float *  x,
const int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta 
)
Here is the caller graph for this function:

◆ gradient_array_normalize_channels_softmax_ongpu()

void gradient_array_normalize_channels_softmax_ongpu ( float *  output_gpu,
int  n,
int  batch,
int  channels,
int  wh_step,
float *  delta_gpu 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_ongpu()

void gradient_array_ongpu ( float *  x,
int  n,
ACTIVATION  a,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ gradient_array_swish()

void gradient_array_swish ( const float *  x,
const int  n,
const float *  sigmoid,
float *  delta 
)
Here is the caller graph for this function:

◆ gradient_array_swish_ongpu()

void gradient_array_swish_ongpu ( float *  x,
int  n,
float *  sigmoid_gpu,
float *  delta 
)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ hardtan_activate()

static float hardtan_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ hardtan_gradient()

static float hardtan_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ leaky_activate()

static float leaky_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ leaky_gradient()

static float leaky_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ lhtan_activate()

static float lhtan_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ lhtan_gradient()

static float lhtan_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ linear_activate()

static float linear_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ linear_gradient()

static float linear_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ loggy_activate()

static float loggy_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ loggy_gradient()

static float loggy_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ logistic_activate()

static float logistic_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ logistic_gradient()

static float logistic_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ plse_activate()

static float plse_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ plse_gradient()

static float plse_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ ramp_activate()

static float ramp_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ ramp_gradient()

static float ramp_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relie_activate()

static float relie_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relie_gradient()

static float relie_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relu6_activate()

static float relu6_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relu6_gradient()

static float relu6_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relu_activate()

static float relu_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ relu_gradient()

static float relu_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ sech()

static float sech ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ selu_activate()

static float selu_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ selu_gradient()

static float selu_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ softplus_activate()

static float softplus_activate ( float  x,
float  threshold 
)
inlinestatic
Here is the caller graph for this function:

◆ stair_activate()

static float stair_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ stair_gradient()

static float stair_gradient ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ tanh_activate()

static float tanh_activate ( float  x)
inlinestatic
Here is the caller graph for this function:

◆ tanh_gradient()

static float tanh_gradient ( float  x)
inlinestatic
Here is the caller graph for this function: