public abstract static class Layer.Builder<T extends Layer.Builder<T>>
extends java.lang.Object
Modifier and Type | Field and Description |
---|---|
protected org.nd4j.linalg.activations.IActivation |
activationFn |
protected double |
adamMeanDecay |
protected double |
adamVarDecay |
protected double |
biasInit |
protected double |
biasLearningRate |
protected Distribution |
dist |
protected double |
dropOut |
protected double |
epsilon |
protected GradientNormalization |
gradientNormalization |
protected double |
gradientNormalizationThreshold |
protected double |
l1 |
protected double |
l1Bias |
protected double |
l2 |
protected double |
l2Bias |
protected java.lang.String |
layerName |
protected double |
learningRate |
protected LearningRatePolicy |
learningRatePolicy |
protected java.util.Map<java.lang.Integer,java.lang.Double> |
learningRateSchedule |
protected double |
momentum |
protected java.util.Map<java.lang.Integer,java.lang.Double> |
momentumAfter |
protected double |
rho |
protected double |
rmsDecay |
protected Updater |
updater |
protected WeightInit |
weightInit |
Constructor and Description |
---|
Builder() |
Modifier and Type | Method and Description |
---|---|
T |
activation(org.nd4j.linalg.activations.Activation activation) |
T |
activation(org.nd4j.linalg.activations.IActivation activationFunction) |
T |
activation(java.lang.String activationFunction)
Deprecated.
Use
activation(Activation) or @activation(IActivation) |
T |
adamMeanDecay(double adamMeanDecay)
Mean decay rate for Adam updater.
|
T |
adamVarDecay(double adamVarDecay)
Variance decay rate for Adam updater.
|
T |
biasInit(double biasInit) |
T |
biasLearningRate(double biasLearningRate)
Bias learning rate.
|
abstract <E extends Layer> |
build() |
T |
dist(Distribution dist)
Distribution to sample initial weights from.
|
T |
dropOut(double dropOut)
Dropout.
|
T |
epsilon(double epsilon)
Epsilon value for updaters: Adagrad and Adadelta.
|
T |
gradientNormalization(GradientNormalization gradientNormalization)
Gradient normalization strategy.
|
T |
gradientNormalizationThreshold(double threshold)
Threshold for gradient normalization, only used for GradientNormalization.ClipL2PerLayer,
GradientNormalization.ClipL2PerParamType, and GradientNormalization.ClipElementWiseAbsoluteValue
Not used otherwise. L2 threshold for first two types of clipping, or absolute value threshold for last type of clipping. |
T |
l1(double l1)
L1 regularization coefficient (weights only).
|
T |
l1Bias(double l1Bias)
L1 regularization coefficient for the bias.
|
T |
l2(double l2)
L2 regularization coefficient (weights only).
|
T |
l2Bias(double l2Bias)
L2 regularization coefficient for the bias.
|
T |
learningRate(double learningRate)
Learning rate.
|
T |
learningRateDecayPolicy(LearningRatePolicy policy)
Learning rate decay policy.
|
T |
learningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule)
Learning rate schedule.
|
T |
momentum(double momentum)
Momentum rate.
|
T |
momentumAfter(java.util.Map<java.lang.Integer,java.lang.Double> momentumAfter)
Momentum schedule.
|
T |
name(java.lang.String layerName)
Layer name assigns layer string name.
|
T |
rho(double rho)
Ada delta coefficient, rho.
|
T |
rmsDecay(double rmsDecay)
Decay rate for RMSProp.
|
T |
updater(Updater updater)
Gradient updater.
|
T |
weightInit(WeightInit weightInit)
Weight initialization scheme.
|
protected java.lang.String layerName
protected org.nd4j.linalg.activations.IActivation activationFn
protected WeightInit weightInit
protected double biasInit
protected Distribution dist
protected double learningRate
protected double biasLearningRate
protected java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule
protected double momentum
protected java.util.Map<java.lang.Integer,java.lang.Double> momentumAfter
protected double l1
protected double l2
protected double l1Bias
protected double l2Bias
protected double dropOut
protected Updater updater
protected double rho
protected double epsilon
protected double rmsDecay
protected double adamMeanDecay
protected double adamVarDecay
protected GradientNormalization gradientNormalization
protected double gradientNormalizationThreshold
protected LearningRatePolicy learningRatePolicy
public T name(java.lang.String layerName)
@Deprecated public T activation(java.lang.String activationFunction)
activation(Activation)
or @activation(IActivation)
public T activation(org.nd4j.linalg.activations.IActivation activationFunction)
public T activation(org.nd4j.linalg.activations.Activation activation)
public T weightInit(WeightInit weightInit)
WeightInit
public T biasInit(double biasInit)
public T dist(Distribution dist)
public T learningRate(double learningRate)
public T biasLearningRate(double biasLearningRate)
public T learningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule)
public T l1(double l1)
l1Bias(double)
to configure the l1 regularization
coefficient for the bias.public T l2(double l2)
l2Bias(double)
to configure the l2 regularization
coefficient for the bias.public T l1Bias(double l1Bias)
l1(double)
public T l2Bias(double l2Bias)
l2(double)
public T dropOut(double dropOut)
public T momentum(double momentum)
public T momentumAfter(java.util.Map<java.lang.Integer,java.lang.Double> momentumAfter)
public T updater(Updater updater)
Updater
public T rho(double rho)
rho
- public T rmsDecay(double rmsDecay)
public T epsilon(double epsilon)
epsilon
- Epsilon value to use for adagrad and adadeltapublic T adamMeanDecay(double adamMeanDecay)
public T adamVarDecay(double adamVarDecay)
public T gradientNormalization(GradientNormalization gradientNormalization)
gradientNormalization
- Type of normalization to use. Defaults to None.GradientNormalization
public T gradientNormalizationThreshold(double threshold)
public T learningRateDecayPolicy(LearningRatePolicy policy)
policy
- Type of policy to use. Defaults to None.GradientNormalization
public abstract <E extends Layer> E build()