本文整理了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.getLayers()
方法的一些代码示例,展示了MultiLayerNetwork.getLayers()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。MultiLayerNetwork.getLayers()
方法的具体详情如下:
包路径:org.deeplearning4j.nn.multilayer.MultiLayerNetwork
类名称:MultiLayerNetwork
方法名:getLayers
暂无
代码示例来源:origin: deeplearning4j/dl4j-examples
Layer[] layers = net.getLayers();
long totalNumParams = 0;
for( int i= 0; i < layers.length; i++) {
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Get the output layer
*
* @return
*/
public Layer getOutputLayer() {
return getLayers()[getLayers().length - 1];
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
@Override
protected Layer[] getOrderedLayers() {
return network.getLayers();
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Triggers the activation of the last hidden layer ie: not logistic regression
*
* @return the activation of the last hidden layer given the last input to the network
*/
public INDArray activate() {
return getLayers()[getLayers().length - 1].activate();
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
public MultiLayerUpdater(MultiLayerNetwork network, INDArray updaterState) {
super(network, updaterState);
layersByName = new HashMap<>();
Layer[] l = network.getLayers();
for (int i = 0; i < l.length; i++) {
layersByName.put(String.valueOf(i), l[i]);
}
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Return the weight matrices for a multi layer network
* @param network the network to get the weights for
* @return the weight matrices for a given multi layer network
*/
public static List<INDArray> weightMatrices(MultiLayerNetwork network) {
List<INDArray> ret = new ArrayList<>();
for (int i = 0; i < network.getLayers().length; i++) {
ret.add(network.getLayers()[i].getParam(DefaultParamInitializer.WEIGHT_KEY));
}
return ret;
}
代码示例来源:origin: CampagneLaboratory/variationanalysis
private int getModelActivationNumber(MultiLayerNetwork model, FeatureMapper modelFeatureMapper) {
int numActivations = 0;
Layer[] layers = model.getLayers();
INDArray inputFeatures = Nd4j.zeros(1, modelFeatureMapper.numberOfFeatures());
int sum = model.feedForward(inputFeatures, false).stream().mapToInt(indArray ->
indArray.data().asFloat().length).sum();
System.out.println("Number of activations: " + sum);
return sum;
}
代码示例来源:origin: CampagneLaboratory/variationanalysis
private int getModelActivationNumber(MultiLayerNetwork model, FeatureMapper modelFeatureMapper) {
int numActivations = 0;
Layer[] layers = model.getLayers();
int totalNumParams = 0;
INDArray inputFeatures = Nd4j.zeros(1, modelFeatureMapper.numberOfFeatures());
int sum = model.feedForward(inputFeatures, false).stream().mapToInt(indArray ->
indArray.data().asFloat().length).sum();
System.out.println("Number of activations: " + sum);
return sum;
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Returns a 1 x m vector where the vector is composed of
* a flattened vector of all of the weights for the
* various neuralNets(w,hbias NOT VBIAS) and output layer
*
* @return the params for this neural net
*/
public INDArray params(boolean backwardOnly) {
if (backwardOnly)
return params();
List<INDArray> params = new ArrayList<>();
for (Layer layer : getLayers()) {
INDArray layerParams = layer.params();
if (layerParams != null)
params.add(layerParams); //may be null: subsampling etc layers
}
return Nd4j.toFlattened('f', params);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-modelimport
layersFromModel = ((MultiLayerNetwork) model).getLayers();
else
layersFromModel = ((ComputationGraph) model).getLayers();
代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui_2.11
layers = ((MultiLayerNetwork) model).getLayers();
} else if (model instanceof ComputationGraph) {
layers = ((ComputationGraph) model).getLayers();
代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui_2.10
layers = ((MultiLayerNetwork) model).getLayers();
} else if (model instanceof ComputationGraph) {
layers = ((ComputationGraph) model).getLayers();
代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui_2.10
if (model instanceof MultiLayerNetwork) {
MultiLayerNetwork l = (MultiLayerNetwork) model;
for (Layer layer : l.getLayers()) {
if (!(layer instanceof FrozenLayer) && layer.type() == Layer.Type.CONVOLUTIONAL) {
INDArray output = layer.activate();
代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui_2.11
if (model instanceof MultiLayerNetwork) {
MultiLayerNetwork l = (MultiLayerNetwork) model;
for (Layer layer : l.getLayers()) {
if (!(layer instanceof FrozenLayer) && layer.type() == Layer.Type.CONVOLUTIONAL) {
INDArray output = layer.activate();
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
org.deeplearning4j.nn.api.Layer[] layers = editedModel.getLayers();
for (int i = frozenTill; i >= 0; i--) {
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
flattenedParams = params.dup();
int idx = 0;
for (int i = 0; i < getLayers().length; i++) {
Layer layer = getLayer(i);
int range = layer.numParams();
代码示例来源:origin: neo4j-contrib/neo4j-ml-procedures
List<Node> result = new ArrayList<>();
int layerCount = model.getnLayers();
for (Layer layer : model.getLayers()) {
Node node = node("Layer",
"type", layer.type().name(), "index", layer.getIndex(),
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
MultiLayerNetwork network = (MultiLayerNetwork) model;
try {
if (network.getLayers() != null && network.getLayers().length > 0) {
for (Layer layer : network.getLayers()) {
if (layer instanceof RBM
|| layer instanceof org.deeplearning4j.nn.layers.feedforward.rbm.RBM) {
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Clones the multilayernetwork
* @return
*/
@Override
public MultiLayerNetwork clone() {
MultiLayerConfiguration conf = this.layerWiseConfigurations.clone();
MultiLayerNetwork ret = new MultiLayerNetwork(conf);
ret.init(this.params().dup(), false);
if (solver != null) {
//If solver is null: updater hasn't been initialized -> getUpdater call will force initialization, however
Updater u = this.getUpdater();
INDArray updaterState = u.getStateViewArray();
if (updaterState != null) {
ret.getUpdater().setStateViewArray(ret, updaterState.dup(), false);
}
}
if (hasAFrozenLayer()) {
//correct layers to frozen layers
Layer[] clonedLayers = ret.getLayers();
for (int i = 0; i < layers.length; i++) {
if (layers[i] instanceof FrozenLayer) {
clonedLayers[i] = new FrozenLayer(ret.getLayer(i));
}
}
ret.setLayers(clonedLayers);
}
return ret;
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
private void initHelperMLN() {
if (applyFrozen) {
org.deeplearning4j.nn.api.Layer[] layers = origMLN.getLayers();
for (int i = frozenTill; i >= 0; i--) {
//unchecked?
layers[i] = new FrozenLayer(layers[i]);
}
origMLN.setLayers(layers);
}
for (int i = 0; i < origMLN.getnLayers(); i++) {
if (origMLN.getLayer(i) instanceof FrozenLayer) {
frozenInputLayer = i;
}
}
List<NeuralNetConfiguration> allConfs = new ArrayList<>();
for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
allConfs.add(origMLN.getLayer(i).conf());
}
MultiLayerConfiguration c = origMLN.getLayerWiseConfigurations();
unFrozenSubsetMLN = new MultiLayerNetwork(new MultiLayerConfiguration.Builder().backprop(c.isBackprop())
.inputPreProcessors(c.getInputPreProcessors()).pretrain(c.isPretrain())
.backpropType(c.getBackpropType()).tBPTTForwardLength(c.getTbpttFwdLength())
.tBPTTBackwardLength(c.getTbpttBackLength()).confs(allConfs).build());
unFrozenSubsetMLN.init();
//copy over params
for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
unFrozenSubsetMLN.getLayer(i - frozenInputLayer - 1).setParams(origMLN.getLayer(i).params());
}
//unFrozenSubsetMLN.setListeners(origMLN.getListeners());
}
内容来源于网络,如有侵权,请联系作者删除!