Skip to content

Commit

Permalink
implemented working twod layer
Browse files Browse the repository at this point in the history
  • Loading branch information
tmbdev committed Nov 30, 2015
1 parent f2ba050 commit 04cf1de
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 12 deletions.
62 changes: 62 additions & 0 deletions clstm_prefab.cc
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,19 @@ Network make_bidi(const Assoc &params) {
layer(output_type, 2 * nhidden, noutput, params, {})});
}

// A 1D bidirectional LSTM with Softmax/Sigmoid output layer.

Network make_bidi0(const Assoc &params) {
int ninput = params.get("ninput");
int noutput = params.get("noutput");
string lstm_type = get(params, "lstm_type", "NPLSTM");
return layer("Parallel", ninput, 2 * noutput, {}, {
layer(lstm_type, ninput, noutput, params, {}),
layer("Reversed", ninput, ninput, {},
{layer(lstm_type, ninput, noutput, params, {})}),
});
}

// Two stacked 1D bidirectional LSTM with Softmax/Sigmoid output layer.

Network make_bidi2(const Assoc &params) {
Expand Down Expand Up @@ -93,11 +106,60 @@ Network make_bidi2(const Assoc &params) {
layer(output_type, 2 * nhidden2, noutput, params, {})});
}

Network make_perplstm(const Assoc &params) {
int ninput = params.get("ninput");
int nhidden = params.get("nhidden");
int noutput = params.get("noutput");
string output_type = get(params, "output_type", "SigmoidLayer");
Network vertical = make_bidi({
{"ninput", ninput},
{"nhidden", nhidden},
{"noutput", noutput},
{"output_type", output_type}});
return layer(
"Stacked", ninput, noutput, {},{
//layer("Btswitch", nhidden2, nhidden2, {}, {}),
vertical,
//layer("Btswitch", noutput, noutput, {}, {})
});
}

// Two dimensional LSTM

Network make_twod(const Assoc &params) {
int ninput = params.get("ninput");
int nhidden = params.get("nhidden");
int nhidden2 = params.get("nhidden2", nhidden);
int nhidden3 = params.get("nhidden3", nhidden2);
int noutput = params.get("noutput");
string output_type = get(params, "output_type",
noutput == 1 ? "SigmoidLayer" : "SoftmaxLayer");
Network horizontal = make_bidi({
{"ninput", ninput},
{"nhidden", nhidden},
{"noutput", nhidden2},
{"output_type", "SigmoidLayer"}});
Network vertical = make_bidi({
{"ninput", nhidden2},
{"nhidden", nhidden3},
{"noutput", noutput},
{"output_type", output_type}});
return layer(
"Stacked", ninput, noutput, {},{
horizontal,
layer("Btswitch", nhidden2, nhidden2, {}, {}),
vertical,
layer("Btswitch", noutput, noutput, {}, {})});
}

void init_clstm_prefab() {
network_factories["lstm1"] = make_lstm1;
network_factories["revlstm1"] = make_revlstm1;
network_factories["bidi"] = make_bidi;
network_factories["bidi0"] = make_bidi0;
network_factories["bidi2"] = make_bidi2;
network_factories["twod"] = make_twod;
network_factories["perplstm"] = make_perplstm;
}

static int init_ = (init_clstm_prefab(), 0);
Expand Down
29 changes: 17 additions & 12 deletions test-deriv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,9 @@ struct Maximizer {
}
};

void test_net(Network net, string id="", int bs=1) {
if (id=="") id = net->kind;
void test_net(Network net, string id = "", int N = 4, int bs = 1) {
if (id == "") id = net->kind;
print("testing", id);
int N = 4;
int ninput = net->ninput();
int noutput = net->noutput();
;
Expand Down Expand Up @@ -179,26 +178,32 @@ void test_net(Network net, string id="", int bs=1) {

int main(int argc, char **argv) {
TRY {
test_net(
make_net("perplstm", {{"ninput", 3}, {"nhidden", 4}, {"noutput", 5}}),
"perplstm", 11, 13);
test_net(make_net("twod", {{"ninput", 3},
{"nhidden", 4},
{"noutput", 5},
{"output_type", "SigmoidLayer"}}),
"twod", 11, 13);
test_net(layer("LinearLayer", 7, 3, {}, {}));
test_net(layer("SigmoidLayer", 7, 3, {}, {}));
test_net(layer("TanhLayer", 7, 3, {}, {}));
test_net(layer("NPLSTM", 7, 3, {}, {}));
test_net(
layer("Reversed", 7, 3, {}, {layer("SigmoidLayer", 7, 3, {}, {})}));
test_net(layer("Parallel", 7, 3, {}, {layer("SigmoidLayer", 7, 3, {}, {}),
layer("LinearLayer", 7, 3, {}, {})}),
"parallel(sigmoid,linear)");
layer("LinearLayer", 7, 3, {}, {})}),
"parallel(sigmoid,linear)");
test_net(make_net("bidi", {{"ninput", 7},
{"noutput", 3},
{"nhidden", 5},
{"output_type", "SigmoidLayer"}}),
"bidi");
test_net(layer("Stacked", 3, 3, {}, {
layer("Btswitch", 3, 3, {}, {}),
layer("Btswitch", 3, 3, {}, {})}),
"btswitch");
test_net(layer("Batchstack", 3, 9, {}, {}),
"Batchstack", 5);
"bidi");
test_net(layer("Stacked", 3, 3, {}, {layer("Btswitch", 3, 3, {}, {}),
layer("Btswitch", 3, 3, {}, {})}),
"btswitch");
test_net(layer("Batchstack", 3, 9, {}, {}), "Batchstack", 4, 5);
// not testing: SoftmaxLayer and ReluLayer
}
CATCH(const char *message) { print("ERROR", message); }
Expand Down

0 comments on commit 04cf1de

Please sign in to comment.