本文整理了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.setListeners()
方法的一些代码示例,展示了MultiLayerNetwork.setListeners()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。MultiLayerNetwork.setListeners()
方法的具体详情如下:
包路径:org.deeplearning4j.nn.multilayer.MultiLayerNetwork
类名称:MultiLayerNetwork
方法名:setListeners
暂无
代码示例来源:origin: deeplearning4j/dl4j-examples
network.setListeners(new StatsListener(statsStorage), new ScoreIterationListener(10));
代码示例来源:origin: deeplearning4j/dl4j-examples
net.setListeners(new PerformanceListener(10, true));
代码示例来源:origin: deeplearning4j/dl4j-examples
model.setListeners(new ScoreIterationListener(100));
long timeX = System.currentTimeMillis();
代码示例来源:origin: deeplearning4j/dl4j-examples
model.setListeners(new ScoreIterationListener(100));
long timeX = System.currentTimeMillis();
代码示例来源:origin: deeplearning4j/dl4j-examples
model.init();
model.setListeners(new ScoreIterationListener(100));
代码示例来源:origin: deeplearning4j/dl4j-examples
net.setListeners(new ScoreIterationListener(1), new IterationListener() {
@Override
public void iterationDone(Model model, int iteration, int epoch) {
代码示例来源:origin: deeplearning4j/dl4j-examples
net.setListeners(new ScoreIterationListener(1));
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
@Override
public void setListeners(IterationListener... listeners) {
Collection<IterationListener> cListeners = new ArrayList<>();
//Check: user might have done setListeners(null) thinking this would clear the current listeners.
//This results in an IterationListener[1] with a single null value -> results in a NPE later
if (listeners != null && listeners.length > 0) {
for (IterationListener i : listeners) {
if (i != null)
cListeners.add(i);
}
}
setListeners(cListeners);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* This method ADDS additional IterationListener to existing listeners
*
* @param listeners
*/
@Override
public void addListeners(IterationListener... listeners) {
if (this.listeners == null) {
setListeners(listeners);
return;
}
for (IterationListener listener : listeners) {
this.listeners.add(listener);
if (listener instanceof TrainingListener) {
this.trainingListeners.add((TrainingListener) listener);
}
}
if (solver != null) {
solver.setListeners(this.listeners);
}
}
代码示例来源:origin: sjsdfg/dl4j-tutorials
public static void main(final String[] args){
//Switch these two options to do different functions with different networks
final MathFunction fn = new SinXDivXMathFunction();
final MultiLayerConfiguration conf = getDeepDenseLayerNetworkConfiguration();
//Generate the training data
final INDArray x = Nd4j.linspace(-10,10,nSamples).reshape(nSamples, 1);
final DataSetIterator iterator = getTrainingData(x,fn,batchSize,rng);
//Create the network
final MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new ScoreIterationListener(1));
//Train the network on the full data set, and evaluate in periodically
final INDArray[] networkPredictions = new INDArray[nEpochs/ plotFrequency];
for( int i=0; i<nEpochs; i++ ){
iterator.reset();
net.fit(iterator);
if((i+1) % plotFrequency == 0) networkPredictions[i/ plotFrequency] = net.output(x, false);
}
//Plot the target data and the network predictions
plot(fn,x,fn.getFunctionValues(x),networkPredictions);
}
代码示例来源:origin: apache/opennlp-sandbox
public static MultiLayerNetwork train(WordVectors wordVectors, ObjectStream<NameSample> samples,
int epochs, int windowSize, String[] labels) throws IOException {
int vectorSize = 300;
int layerSize = 256;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new RmsProp(0.01)).l2(0.001)
.weightInit(WeightInit.XAVIER)
.list()
.layer(0, new GravesLSTM.Builder().nIn(vectorSize).nOut(layerSize)
.activation(Activation.TANH).build())
.layer(1, new RnnOutputLayer.Builder().activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(layerSize).nOut(3).build())
.pretrain(false).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new ScoreIterationListener(5));
// TODO: Extract labels on the fly from the data
DataSetIterator train = new NameSampleDataSetIterator(samples, wordVectors, windowSize, labels);
System.out.println("Starting training");
for (int i = 0; i < epochs; i++) {
net.fit(train);
train.reset();
System.out.println(String.format("Finished epoch %d", i));
}
return net;
}
代码示例来源:origin: rahul-raj/Deeplearning4J
model.init();
model.setListeners(new ScoreIterationListener(100));
代码示例来源:origin: sjsdfg/dl4j-tutorials
public static void main(String[] args){
//Generate the training data
DataSetIterator iterator = getTrainingData(batchSize,rng);
//Create the network
int numInput = 2;
int numOutputs = 1;
MultiLayerNetwork net = new MultiLayerNetwork(new NeuralNetConfiguration.Builder()
.seed(seed)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER)
.updater(new Sgd(learningRate))
.list()
.layer(0, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY)
.nIn(numInput).nOut(numOutputs).build())
.pretrain(false).backprop(true).build()
);
net.init();
net.setListeners(new ScoreIterationListener(1));
for( int i=0; i<nEpochs; i++ ){
iterator.reset();
net.fit(iterator);
}
final INDArray input = Nd4j.create(new double[] { 0.111111, 0.3333333333333 }, new int[] { 1, 2 });
INDArray out = net.output(input, false);
System.out.println(out);
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
model.setListeners(mIterationListener); //print the score with every iteration
代码示例来源:origin: apache/opennlp-sandbox
net.setListeners(new ScoreIterationListener(1));
return net;
代码示例来源:origin: sjsdfg/dl4j-tutorials
);
net.init();
net.setListeners(new ScoreIterationListener(1));
代码示例来源:origin: sjsdfg/dl4j-tutorials
public static void main(String[] args) {
List<Data> data = readFile("");
RegIterator trainIter = new RegIterator(data, 1, 5, 5);
// 构建模型
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(1234)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(0.01, 0.9))
.list().layer(0, new GravesLSTM.Builder().activation(Activation.TANH).nIn(1).nOut(32)
.build())
.layer(1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY).nIn(32).nOut(1).build())
.build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.setListeners(new ScoreIterationListener(1));
network.init();
int epoch = 10;
for (int i = 0; i < epoch; i++) {
while (trainIter.hasNext()) {
DataSet dataSets = trainIter.next();
network.fit(dataSets);
}
trainIter.reset();
}
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
public void buildModel() {
if (model == null) {
int iterations = 1000;
long seed = 6;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(iterations)
.activation(Activation.TANH)
.weightInit(WeightInit.XAVIER)
.learningRate(0.1)
.regularization(true).l2(1e-4)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3)
.build())
.layer(1, new DenseLayer.Builder().nIn(3).nOut(3)
.build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MEAN_SQUARED_LOGARITHMIC_ERROR)
.activation(Activation.SOFTMAX)
.nIn(3).nOut(numClasses).build())
.backprop(true).pretrain(false)
.build();
//run the model
model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(iterationListener);
}
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
public void buildModel() {
if (model == null) {
int iterations = 1000;
long seed = 6;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(iterations)
.activation(Activation.TANH)
.weightInit(WeightInit.XAVIER)
.learningRate(0.1)
.regularization(true).l2(1e-4)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3)
.build())
.layer(1, new DenseLayer.Builder().nIn(3).nOut(3)
.build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX)
.nIn(3).nOut(numClasses).build())
.backprop(true).pretrain(false)
.build();
//run the model
model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(iterationListener);
}
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
public void buildModel() {
//Create the network
int numInput = 2;
int numOutputs = 1;
int nHidden = 10;
mNetwork = new MultiLayerNetwork(new NeuralNetConfiguration.Builder()
.seed(mSeed)
.iterations(ITERATIONS)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(LEARNING_RATE)
.weightInit(WeightInit.XAVIER)
.updater(Updater.NESTEROVS)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInput).nOut(nHidden)
.activation(Activation.TANH)
.name("input")
.build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY)
.name("output")
.nIn(nHidden).nOut(numOutputs).build())
.pretrain(false)
.backprop(true)
.build()
);
mNetwork.init();
mNetwork.setListeners(mIterationListener);
}
内容来源于网络,如有侵权,请联系作者删除!