MeLOn
example_training_of_ANN Namespace Reference

Variables

string problem_name = "peaks"
 LOAD DATA ############################ enter data set information. More...
 
string filename_data = "./data/peaks.csv"
 
int input_dim = 2
 
int output_dim = 1
 
bool scaleInput = True
 
bool normalizeOutput = True
 
 data = np.loadtxt(open(filename_data, "rb"), delimiter=",")
 
 X = data[:, :-output_dim]
 
 y = data[:, input_dim:]
 
 X_norm = utils.scale(X, scaleInput)
 
 y_norm = utils.normalize(y, normalizeOutput)
 
 x_train
 
 x_val
 
 y_train
 
 y_val
 
 test_size
 
 n_train = x_train.shape[0]
 
string output_folder = "./data/Output/"
 SET PARAMETERS ############################ output filename. More...
 
string filename_out = output_folder + problem_name
 
list network_layout = [10, 10]
 
string activation_function = 'relu'
 
string activation_function_out = 'linear'
 
float learning_rate = 0.001
 
 kernel_regularizer = tf.keras.regularizers.l2(l=0.0001)
 
string kernel_initializer = 'he_normal'
 
string optimizer = 'adam'
 
int epochs = 100
 
int batch_size = 128
 
int random_state = 1
 
 model = tf.keras.Sequential()
 BUILD MODEL ############################. More...
 
 loss
 
 metrics
 
 training_time = time.time()
 TRAINING ############################. More...
 
 history
 
 y_pred = model.predict(X_norm)
 SAVE MODEL ############################. More...
 

Variable Documentation

◆ activation_function

string example_training_of_ANN.activation_function = 'relu'

◆ activation_function_out

string example_training_of_ANN.activation_function_out = 'linear'

◆ batch_size

int example_training_of_ANN.batch_size = 128

◆ data

example_training_of_ANN.data = np.loadtxt(open(filename_data, "rb"), delimiter=",")

◆ epochs

int example_training_of_ANN.epochs = 100

◆ filename_data

string example_training_of_ANN.filename_data = "./data/peaks.csv"

◆ filename_out

string example_training_of_ANN.filename_out = output_folder + problem_name

◆ history

example_training_of_ANN.history
Initial value:
1 = model.fit(x_train, y_train, validation_data=(x_val, y_val),
2  epochs=epochs, batch_size=batch_size, verbose=1)

◆ input_dim

int example_training_of_ANN.input_dim = 2

◆ kernel_initializer

string example_training_of_ANN.kernel_initializer = 'he_normal'

◆ kernel_regularizer

example_training_of_ANN.kernel_regularizer = tf.keras.regularizers.l2(l=0.0001)

◆ learning_rate

float example_training_of_ANN.learning_rate = 0.001

◆ loss

example_training_of_ANN.loss

◆ metrics

example_training_of_ANN.metrics

◆ model

example_training_of_ANN.model = tf.keras.Sequential()

BUILD MODEL ############################.

◆ n_train

example_training_of_ANN.n_train = x_train.shape[0]

◆ network_layout

list example_training_of_ANN.network_layout = [10, 10]

◆ normalizeOutput

bool example_training_of_ANN.normalizeOutput = True

◆ optimizer

example_training_of_ANN.optimizer = 'adam'

◆ output_dim

int example_training_of_ANN.output_dim = 1

◆ output_folder

string example_training_of_ANN.output_folder = "./data/Output/"

SET PARAMETERS ############################ output filename.

◆ problem_name

string example_training_of_ANN.problem_name = "peaks"

LOAD DATA ############################ enter data set information.

◆ random_state

int example_training_of_ANN.random_state = 1

◆ scaleInput

bool example_training_of_ANN.scaleInput = True

◆ test_size

example_training_of_ANN.test_size

◆ training_time

example_training_of_ANN.training_time = time.time()

TRAINING ############################.

◆ X

example_training_of_ANN.X = data[:, :-output_dim]

◆ X_norm

example_training_of_ANN.X_norm = utils.scale(X, scaleInput)

◆ x_train

example_training_of_ANN.x_train

◆ x_val

example_training_of_ANN.x_val

◆ y

example_training_of_ANN.y = data[:, input_dim:]

◆ y_norm

example_training_of_ANN.y_norm = utils.normalize(y, normalizeOutput)

◆ y_pred

example_training_of_ANN.y_pred = model.predict(X_norm)

SAVE MODEL ############################.

◆ y_train

example_training_of_ANN.y_train

◆ y_val

example_training_of_ANN.y_val