variable name changes

parent 139baf52
......@@ -13,7 +13,7 @@ import java.util.*;
public class SerialCompositeElementSymbol extends CompositeElementSymbol {
protected List<List<ArchitectureElementSymbol>> episodicSubNetworks = new ArrayList<>(new ArrayList<>());
protected boolean anyEpisodicLocalAdaption = false;
protected boolean anyEpisodicLocalAdaptation = false;
protected void setElements(List<ArchitectureElementSymbol> elements) {
ArchitectureElementSymbol previous = null;
......@@ -60,10 +60,10 @@ public class SerialCompositeElementSymbol extends CompositeElementSymbol {
public List<List<ArchitectureElementSymbol>> getEpisodicSubNetworks() {
return episodicSubNetworks;
}
protected void setAnyEpisodicLocalAdaptation(boolean value) { anyEpisodicLocalAdaptation = value; }
protected void setAnyEpisodicLocalAdaption(boolean isUsed){ anyEpisodicLocalAdaption = isUsed; }
public boolean getAnyEpisodicLocalAdaption(){ return anyEpisodicLocalAdaption; }
public boolean getAnyEpisodicLocalAdaptation() { return anyEpisodicLocalAdaptation; }
@Override
public void setInputElement(ArchitectureElementSymbol inputElement) {
......
......@@ -117,9 +117,15 @@ public class AllPredefinedLayers {
public static final String REPLAY_BATCH_SIZE_NAME = "replayBatchSize";
public static final String REPLAY_STEPS_NAME = "replaySteps";
public static final String REPLAY_GRADIENT_STEPS_NAME = "replayGradientSteps";
public static final String USE_LOCAL_ADAPTION_NAME = "useLocalAdaption";
public static final String LOCAL_ADAPTION_K_NAME = "localAdaptionK";
public static final String LOCAL_ADAPTION_GRADIENT_STEPS_NAME = "localAdaptionGradientSteps";
public static final String USE_LOCAL_ADAPTATION_NAME = "useLocalAdaptation";
public static final String LOCAL_ADAPTATION_K_NAME = "localAdaptationK";
public static final String LOCAL_ADAPTATION_GRADIENT_STEPS_NAME = "localAdaptationGradientSteps";
public static final String MAX_STORED_SAMPLES_NAME = "maxStoredSamples";
public static final String MEMORY_REPLACEMENT_STRATEGY_NAME = "memoryReplacementStrategy";
public static final String MEMORY_STORE_PROB_NAME = "memoryStoreProb";
public static final String QUERY_NET_DIR_NAME = "queryNetDir";
public static final String QUERY_NET_PREFIX_NAME = "queryNetPrefix";
public static final String QUERY_NET_NUM_INPUTS_NAME = "queryNetNumInputs";
//parameters for large memory layer
public static final String SUB_KEY_SIZE_NAME = "subKeySize";
......@@ -128,14 +134,6 @@ public class AllPredefinedLayers {
public static final String K_NAME = "k";
public static final String NUM_HEADS_NAME = "numHeads";
public static final String VALUES_DIM_NAME = "valuesDim";
public static final String MEMORY_REPLACEMENT_STRATEGY_NAME = "memoryReplacementStrategy";
//parameters for episodic memory layer
public static final String MAX_STORED_SAMPLES_NAME = "maxStoredSamples";
public static final String REPLAY_MEMORY_STORE_PROB_NAME = "replayMemoryStoreProb";
public static final String QUERY_NET_DIR_NAME = "queryNetDir";
public static final String QUERY_NET_PREFIX_NAME = "queryNetPrefix";
public static final String QUERY_NET_NUM_INPUTS_NAME = "queryNetNumInputs";
//possible String values
public static final String PADDING_VALID = "valid";
......
......@@ -50,7 +50,7 @@ public class DotProductSelfAttention extends PredefinedLayerDeclaration {
List<ParameterSymbol> parameters = new ArrayList<>(Arrays.asList(
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.SCALE_FACTOR_NAME)
.constraints(Constraints.POSITIVE)
.constraints(Constraints.POSITIVE_OR_MINUS_ONE)
.defaultValue(-1)
.build(),
new ParameterSymbol.Builder()
......@@ -60,12 +60,12 @@ public class DotProductSelfAttention extends PredefinedLayerDeclaration {
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.DIM_KEYS_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.constraints(Constraints.INTEGER, Constraints.POSITIVE_OR_MINUS_ONE)
.defaultValue(-1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.DIM_VALUES_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.constraints(Constraints.INTEGER, Constraints.POSITIVE_OR_MINUS_ONE)
.defaultValue(-1)
.build(),
new ParameterSymbol.Builder()
......
......@@ -74,29 +74,34 @@ public class EpisodicMemory extends PredefinedLayerDeclaration {
.defaultValue(1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.REPLAY_MEMORY_STORE_PROB_NAME)
.constraints(Constraints.NUMBER, Constraints.BETWEEN_ZERO_AND_ONE)
.defaultValue(1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.USE_LOCAL_ADAPTION_NAME)
.name(AllPredefinedLayers.USE_LOCAL_ADAPTATION_NAME)
.constraints(Constraints.BOOLEAN)
.defaultValue(true)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.LOCAL_ADAPTION_GRADIENT_STEPS_NAME)
.name(AllPredefinedLayers.LOCAL_ADAPTATION_GRADIENT_STEPS_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.defaultValue(1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.LOCAL_ADAPTATION_K_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.defaultValue(1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.MEMORY_STORE_PROB_NAME)
.constraints(Constraints.NUMBER, Constraints.BETWEEN_ZERO_AND_ONE)
.defaultValue(1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.MAX_STORED_SAMPLES_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE_OR_MINUS_ONE)
.defaultValue(-1)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.LOCAL_ADAPTION_K_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.defaultValue(1)
.name(AllPredefinedLayers.MEMORY_REPLACEMENT_STRATEGY_NAME)
.constraints(Constraints.MEMORY_REPLACEMENT_STRATEGY_TYPE)
.defaultValue(AllPredefinedLayers.REPLACE_OLDEST)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.QUERY_NET_DIR_NAME)
......@@ -109,11 +114,6 @@ public class EpisodicMemory extends PredefinedLayerDeclaration {
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.QUERY_NET_NUM_INPUTS_NAME)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.MEMORY_REPLACEMENT_STRATEGY_NAME)
.constraints(Constraints.MEMORY_REPLACEMENT_STRATEGY_TYPE)
.defaultValue(AllPredefinedLayers.REPLACE_OLDEST)
.build()));
declaration.setParameters(parameters);
return declaration;
......
......@@ -61,11 +61,11 @@ public class LoadNetwork extends PredefinedLayerDeclaration {
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.NUM_INPUTS_NAME)
.constraints(Constraints.INTEGER)
.constraints(Constraints.INTEGER, Constraints.POSITIVE)
.build(),
new ParameterSymbol.Builder()
.name(AllPredefinedLayers.OUTPUT_SHAPE_NAME)
.constraints(Constraints.INTEGER_OR_INTEGER_TUPLE)
.constraints(Constraints.INTEGER_OR_INTEGER_TUPLE, Constraints.POSITIVE)
.build()));
declaration.setParameters(parameters);
return declaration;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment