public class FineTuneConfiguration
extends java.lang.Object
Modifier and Type | Class and Description |
---|---|
static class |
FineTuneConfiguration.Builder |
Modifier and Type | Field and Description |
---|---|
protected org.nd4j.linalg.activations.IActivation |
activationFn |
protected java.lang.Double |
adamMeanDecay |
protected java.lang.Double |
adamVarDecay |
protected java.lang.Boolean |
backprop |
protected BackpropType |
backpropType |
protected java.lang.Double |
biasInit |
protected java.lang.Double |
biasLearningRate |
protected ConvolutionMode |
convolutionMode |
protected Distribution |
dist |
protected java.lang.Double |
dropOut |
protected java.lang.Double |
epsilon |
protected GradientNormalization |
gradientNormalization |
protected java.lang.Double |
gradientNormalizationThreshold |
protected java.lang.Double |
l1 |
protected java.lang.Double |
l1Bias |
protected java.lang.Double |
l2 |
protected java.lang.Double |
l2Bias |
protected java.lang.Double |
learningRate |
protected LearningRatePolicy |
learningRatePolicy |
protected java.util.Map<java.lang.Integer,java.lang.Double> |
learningRateSchedule |
protected java.lang.Double |
lrPolicyDecayRate |
protected java.lang.Double |
lrPolicyPower |
protected java.lang.Double |
lrPolicySteps |
protected java.lang.Double |
lrScoreBasedDecay |
protected java.lang.Integer |
maxNumLineSearchIterations |
protected java.lang.Boolean |
miniBatch |
protected java.lang.Boolean |
minimize |
protected java.lang.Double |
momentum |
protected java.util.Map<java.lang.Integer,java.lang.Double> |
momentumSchedule |
protected java.lang.Integer |
numIterations |
protected OptimizationAlgorithm |
optimizationAlgo |
protected java.lang.Boolean |
pretrain |
protected java.lang.Double |
rho |
protected java.lang.Double |
rmsDecay |
protected java.lang.Long |
seed |
protected StepFunction |
stepFunction |
protected java.lang.Integer |
tbpttBackLength |
protected java.lang.Integer |
tbpttFwdLength |
protected Updater |
updater |
protected java.lang.Boolean |
useDropConnect |
protected java.lang.Boolean |
useRegularization |
protected WeightInit |
weightInit |
Constructor and Description |
---|
FineTuneConfiguration() |
Modifier and Type | Method and Description |
---|---|
NeuralNetConfiguration |
appliedNeuralNetConfiguration(NeuralNetConfiguration nnc) |
NeuralNetConfiguration.Builder |
appliedNeuralNetConfigurationBuilder() |
void |
applyToComputationGraphConfiguration(ComputationGraphConfiguration conf) |
void |
applyToMultiLayerConfiguration(MultiLayerConfiguration conf) |
void |
applyToNeuralNetConfiguration(NeuralNetConfiguration nnc) |
static FineTuneConfiguration |
fromJson(java.lang.String json) |
static FineTuneConfiguration |
fromYaml(java.lang.String yaml) |
java.lang.String |
toJson() |
java.lang.String |
toYaml() |
protected org.nd4j.linalg.activations.IActivation activationFn
protected WeightInit weightInit
protected java.lang.Double biasInit
protected Distribution dist
protected java.lang.Double learningRate
protected java.lang.Double biasLearningRate
protected java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule
protected java.lang.Double lrScoreBasedDecay
protected java.lang.Double l1
protected java.lang.Double l2
protected java.lang.Double l1Bias
protected java.lang.Double l2Bias
protected java.lang.Double dropOut
protected Updater updater
protected java.lang.Double momentum
protected java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule
protected java.lang.Double epsilon
protected java.lang.Double rho
protected java.lang.Double rmsDecay
protected java.lang.Double adamMeanDecay
protected java.lang.Double adamVarDecay
protected java.lang.Boolean miniBatch
protected java.lang.Integer numIterations
protected java.lang.Integer maxNumLineSearchIterations
protected java.lang.Long seed
protected java.lang.Boolean useRegularization
protected OptimizationAlgorithm optimizationAlgo
protected StepFunction stepFunction
protected java.lang.Boolean useDropConnect
protected java.lang.Boolean minimize
protected GradientNormalization gradientNormalization
protected java.lang.Double gradientNormalizationThreshold
protected LearningRatePolicy learningRatePolicy
protected java.lang.Double lrPolicyDecayRate
protected java.lang.Double lrPolicySteps
protected java.lang.Double lrPolicyPower
protected ConvolutionMode convolutionMode
protected java.lang.Boolean pretrain
protected java.lang.Boolean backprop
protected BackpropType backpropType
protected java.lang.Integer tbpttFwdLength
protected java.lang.Integer tbpttBackLength
public NeuralNetConfiguration appliedNeuralNetConfiguration(NeuralNetConfiguration nnc)
public void applyToNeuralNetConfiguration(NeuralNetConfiguration nnc)
public void applyToMultiLayerConfiguration(MultiLayerConfiguration conf)
public void applyToComputationGraphConfiguration(ComputationGraphConfiguration conf)
public NeuralNetConfiguration.Builder appliedNeuralNetConfigurationBuilder()
public java.lang.String toJson()
public java.lang.String toYaml()
public static FineTuneConfiguration fromJson(java.lang.String json)
public static FineTuneConfiguration fromYaml(java.lang.String yaml)