MeLOn
example_training_of_ANN_with_pruning Namespace Reference

Variables

string problem_name = "peaks"
 LOAD DATA ############################ enter data set information. More...
 
string filename_data = "./data/peaks.csv"
 
int input_dim = 2
 
int output_dim = 1
 
bool scaleInput = True
 
bool normalizeOutput = True
 
 data = np.loadtxt(open(filename_data, "rb"), delimiter=",")
 
 X = data[:, :-output_dim]
 
 y = data[:, input_dim:]
 
 X_norm = utils.scale(X, scaleInput)
 
 y_norm = utils.normalize(y, normalizeOutput)
 
 x_train
 
 x_val
 
 y_train
 
 y_val
 
 test_size
 
 n_train = x_train.shape[0]
 
string output_folder = "./data/Output/"
 SET PARAMETERS ############################ output filename. More...
 
string filename_out = output_folder + problem_name
 
list network_layout = [10, 10]
 
string activation_function = 'relu'
 
string activation_function_out = 'linear'
 
float learning_rate = 0.001
 
 kernel_regularizer = tf.keras.regularizers.l2(l=0.0001)
 
string kernel_initializer = 'he_normal'
 
string optimizer = 'adam'
 
int epochs = 100
 
int batch_size = 128
 
float initial_sparsity = 0.0
 
float final_sparsity = 0.4
 
int begin_step = 30
 
int end_step = np.ceil(n_train / batch_size).astype(np.int32) * epochs
 
int frequency = 10
 
int random_state = 1
 
dictionary pruning_params
 BUILD MODEL ############################ Set pruning parameters
More...
 
 pruned_model = tf.keras.Sequential()
 
 loss
 
 metrics
 
 training_time = time.time()
 TRAINING ############################. More...
 
 history
 
 stripped_model = sparsity.strip_pruning(pruned_model)
 SAVE MODEL ############################. More...
 
 y_pred = stripped_model.predict(X_norm)
 

Variable Documentation

◆ activation_function

string example_training_of_ANN_with_pruning.activation_function = 'relu'

◆ activation_function_out

string example_training_of_ANN_with_pruning.activation_function_out = 'linear'

◆ batch_size

int example_training_of_ANN_with_pruning.batch_size = 128

◆ begin_step

int example_training_of_ANN_with_pruning.begin_step = 30

◆ data

example_training_of_ANN_with_pruning.data = np.loadtxt(open(filename_data, "rb"), delimiter=",")

◆ end_step

int example_training_of_ANN_with_pruning.end_step = np.ceil(n_train / batch_size).astype(np.int32) * epochs

◆ epochs

int example_training_of_ANN_with_pruning.epochs = 100

◆ filename_data

string example_training_of_ANN_with_pruning.filename_data = "./data/peaks.csv"

◆ filename_out

string example_training_of_ANN_with_pruning.filename_out = output_folder + problem_name

◆ final_sparsity

float example_training_of_ANN_with_pruning.final_sparsity = 0.4

◆ frequency

int example_training_of_ANN_with_pruning.frequency = 10

◆ history

example_training_of_ANN_with_pruning.history
Initial value:
1 = pruned_model.fit(x_train, y_train, validation_data=(x_val, y_val),
2  epochs=epochs, batch_size=batch_size, verbose=1,
3  callbacks=[sparsity.UpdatePruningStep()])

◆ initial_sparsity

float example_training_of_ANN_with_pruning.initial_sparsity = 0.0

◆ input_dim

int example_training_of_ANN_with_pruning.input_dim = 2

◆ kernel_initializer

string example_training_of_ANN_with_pruning.kernel_initializer = 'he_normal'

◆ kernel_regularizer

example_training_of_ANN_with_pruning.kernel_regularizer = tf.keras.regularizers.l2(l=0.0001)

◆ learning_rate

float example_training_of_ANN_with_pruning.learning_rate = 0.001

◆ loss

example_training_of_ANN_with_pruning.loss

◆ metrics

example_training_of_ANN_with_pruning.metrics

◆ n_train

example_training_of_ANN_with_pruning.n_train = x_train.shape[0]

◆ network_layout

list example_training_of_ANN_with_pruning.network_layout = [10, 10]

◆ normalizeOutput

bool example_training_of_ANN_with_pruning.normalizeOutput = True

◆ optimizer

example_training_of_ANN_with_pruning.optimizer = 'adam'

◆ output_dim

int example_training_of_ANN_with_pruning.output_dim = 1

◆ output_folder

string example_training_of_ANN_with_pruning.output_folder = "./data/Output/"

SET PARAMETERS ############################ output filename.

◆ problem_name

string example_training_of_ANN_with_pruning.problem_name = "peaks"

LOAD DATA ############################ enter data set information.

◆ pruned_model

example_training_of_ANN_with_pruning.pruned_model = tf.keras.Sequential()

◆ pruning_params

dictionary example_training_of_ANN_with_pruning.pruning_params
Initial value:
1 = {
2  'pruning_schedule': sparsity.PolynomialDecay(initial_sparsity=initial_sparsity,
3  final_sparsity=final_sparsity,
4  begin_step=begin_step,
5  end_step=end_step,
6  frequency=frequency)
7 }

BUILD MODEL ############################ Set pruning parameters

◆ random_state

int example_training_of_ANN_with_pruning.random_state = 1

◆ scaleInput

bool example_training_of_ANN_with_pruning.scaleInput = True

◆ stripped_model

example_training_of_ANN_with_pruning.stripped_model = sparsity.strip_pruning(pruned_model)

SAVE MODEL ############################.

◆ test_size

example_training_of_ANN_with_pruning.test_size

◆ training_time

example_training_of_ANN_with_pruning.training_time = time.time()

TRAINING ############################.

◆ X

example_training_of_ANN_with_pruning.X = data[:, :-output_dim]

◆ X_norm

example_training_of_ANN_with_pruning.X_norm = utils.scale(X, scaleInput)

◆ x_train

example_training_of_ANN_with_pruning.x_train

◆ x_val

example_training_of_ANN_with_pruning.x_val

◆ y

example_training_of_ANN_with_pruning.y = data[:, input_dim:]

◆ y_norm

example_training_of_ANN_with_pruning.y_norm = utils.normalize(y, normalizeOutput)

◆ y_pred

example_training_of_ANN_with_pruning.y_pred = stripped_model.predict(X_norm)

◆ y_train

example_training_of_ANN_with_pruning.y_train

◆ y_val

example_training_of_ANN_with_pruning.y_val