Skip to content

Instantly share code, notes, and snippets.

@ChessPoker
Created September 1, 2016 23:30
Show Gist options
  • Select an option

  • Save ChessPoker/b34c830f1366da8c21a155b2d2f1ce30 to your computer and use it in GitHub Desktop.

Select an option

Save ChessPoker/b34c830f1366da8c21a155b2d2f1ce30 to your computer and use it in GitHub Desktop.

Revisions

  1. ChessPoker created this gist Sep 1, 2016.
    20 changes: 20 additions & 0 deletions networkConfig.java
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,20 @@
    MultiLayerNetwork net;
    //two hidden layers of 3 neurons each
    final int[] LSTMLayers = new int[]{3,3};
    NeuralNetConfiguration.ListBuilder list = new NeuralNetConfiguration.Builder()
    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
    .learningRate(learningRate)
    .regularization(true).l2(0.0000001)
    .seed(76692)
    .weightInit(WeightInit.XAVIER)
    .updater(Updater.ADAM).adamMeanDecay(0.99).adamVarDecay(0.9999)
    .list();
    int layerIdx = 0;
    for (; layerIdx < LSTMLayers.length; layerIdx++) {
    list = list.layer(layerIdx, new GravesLSTM.Builder().nIn(nIn).nOut(LSTMLayers[layerIdx])
    .activation("softsign").build());
    nIn = LSTMLayers[layerIdx];
    }
    list.layer(layerIdx++, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax")
    .nIn(nIn).nOut(nOut).build());
    MultiLayerConfiguration conf = list.pretrain(false).backprop(true).build();