Skip to content

Commit

Permalink
r414: added the exp() operator
Browse files Browse the repository at this point in the history
  • Loading branch information
lh3 committed Feb 15, 2017
1 parent 401a77d commit 111949a
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 4 deletions.
24 changes: 21 additions & 3 deletions kautodiff.c
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ KAD_FUNC_OP2(kad_mse, 29)
#define KAD_FUNC_OP1(fname, op) kad_node_t *fname(kad_node_t *x) { return kad_op1_core((op), x); }

KAD_FUNC_OP1(kad_log, 27)
KAD_FUNC_OP1(kad_exp, 33)
KAD_FUNC_OP1(kad_square, 5)
KAD_FUNC_OP1(kad_sigm, 6)
KAD_FUNC_OP1(kad_tanh, 7)
Expand Down Expand Up @@ -1149,6 +1150,22 @@ int kad_op_1minus(kad_node_t *p, int action)
return 0;
}

int kad_op_exp(kad_node_t *p, int action)
{
int i, n;
kad_node_t *q = p->child[0].p;
n = kad_len(q);
if (action == KAD_SYNC_DIM) {
kad_sync_dim1(p, q);
} else if (action == KAD_FORWARD) {
for (i = 0; i < n; ++i) p->x[i] = expf(q->x[i]);
} else if (action == KAD_BACKWARD && kad_is_back(q)) {
for (i = 0; i < n; ++i)
q->g[i] += p->g[i] * p->x[i];
}
return 0;
}

int kad_op_log(kad_node_t *p, int action)
{
int i, n;
Expand Down Expand Up @@ -2148,12 +2165,13 @@ kad_op_f kad_op_list[KAD_MAX_OP] = {
kad_op_sample_normal, // 24: sample from a normal distribution
kad_op_reduce_sum, // 25
kad_op_reduce_mean, // 26
kad_op_log, // 27
kad_op_log, // 27: log()
kad_op_avg1d, // 28: 1D average pooling (for 1D ConvNet)
kad_op_mse, // 29: mean square error
kad_op_reshape, // 30
kad_op_concat, // 31
kad_op_stdnorm // 32: layer normalization
kad_op_stdnorm, // 32: layer normalization
kad_op_exp // 33: exp()
};

/**************************
Expand All @@ -2171,7 +2189,7 @@ void kad_print_graph(FILE *fp, int n, kad_node_t **v)
{
static const char *op[] = { 0, "add", "mul", "cmul", "ce_bin_neg", "square", "sigm", "tanh", "relu", "matmul", "avg", "1minus", "switch", "ce_multi", "softmax",
"dropout", "conv2d", "max2d", "conv1d", "max1d", "slice", "max", "ce_bin", "sub", "sample_normal", "reduce_sum", "reduce_mean", "log",
"avg1d", "mse", "reshape", "concat", "stdnorm" };
"avg1d", "mse", "reshape", "concat", "stdnorm", "exp" };
int i, j;
for (i = 0; i < n; ++i) v[i]->tmp = i;
for (i = 0; i < n; ++i) {
Expand Down
3 changes: 2 additions & 1 deletion kautodiff.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
#ifndef KANN_AUTODIFF_H
#define KANN_AUTODIFF_H

#define KAD_VERSION "r403"
#define KAD_VERSION "r414"

#include <stdio.h>
#include <stdint.h>
Expand Down Expand Up @@ -197,6 +197,7 @@ kad_node_t *kad_tanh(kad_node_t *x); // f(x) = (1-exp(-2x)) / (1+exp(-2x)) (el
kad_node_t *kad_relu(kad_node_t *x); // f(x) = max{0,x} (element-wise rectifier, aka ReLU)
kad_node_t *kad_softmax(kad_node_t *x);// f_i(x_1,...,x_n) = exp(x_i) / \sum_j exp(x_j) (softmax)
kad_node_t *kad_1minus(kad_node_t *x); // f(x) = 1 - x
kad_node_t *kad_exp(kad_node_t *x); // f(x) = exp(x)
kad_node_t *kad_log(kad_node_t *x); // f(x) = log(x)

kad_node_t *kad_stdnorm(kad_node_t *x); // layer normalization
Expand Down

0 comments on commit 111949a

Please sign in to comment.