Commit 4c700ad7 authored by Marius Laska's avatar Marius Laska
Browse files

minor changes

parent aed5c9bb
......@@ -19,15 +19,16 @@ PI = 3.14159265359
def main():
p_reg: Pipeline = Storable.load("../evaluation/gia/evaluation/grid/output/REG_2")
p_reg: Pipeline = None # Storable.load("../evaluation/lohan/evaluation/grid/output/REG")
p_circle: Pipeline = None#Storable.load("evaluation/lohan/full_cmp/output/CIRCLE")
p_quantile: Pipeline = None#Storable.load("evaluation/lohan/full_cmp/output/QUANTILE")
p_box: Pipeline = Storable.load("../evaluation/gia/evaluation/grid/output/BBOX_2")#"evaluation/lohan/scaled_output_2/output/BBOX") # Storable.load("evaluation/gia/full_cmp/output/BBOX")
p_box: Pipeline = Storable.load("../evaluation/lohan/evaluation/grid/output/BBOX_NEW")#"evaluation/lohan/scaled_output_2/output/BBOX") # Storable.load("evaluation/gia/full_cmp/output/BBOX")
fp_dims = (83.32, 17.16)
#convert_from_2dim_grid(p_box, grid_size=20)
convert_from_grid(p_box)
# for fold_idx in range(p_box.data_provider.num_splits):
# p_box.summary.y_true_labels[fold_idx][:, 0] *= fp_dims[0]
# p_box.summary.y_true_labels[fold_idx][:, 1] *= fp_dims[1]
......@@ -106,6 +107,7 @@ def main():
print("\n --------REG--------- \n")
y_true = np.concatenate(p_reg.summary.y_true_labels, axis=0)
y_pred = np.concatenate(p_reg.summary.y_pred, axis=0)
print("center-dist: {}".format(center_diff(y_true, y_pred)))
# comparison to REG
#radius_c = find_radius_for_acc(p_reg, accuracy=success)
......@@ -119,10 +121,10 @@ def main():
def visualize(p_reg: Pipeline=None, p_circle: Pipeline=None, p_quantile: Pipeline=None, p_box: Pipeline=None):
img = "../evaluation/gia/gia_floor_4.jpg"
fp_dims = (83.32, 17.16)
#img = resource_filename('data', 'lohan/CrowdsourcedDS1floor.png')
#fp_dims = (200, 80)
#img = "../evaluation/gia/gia_floor_4.jpg"
#fp_dims = (83.32, 17.16)
img = resource_filename('data', 'lohan/CrowdsourcedDS1floor.png')
fp_dims = (200, 80)
#fp = FloorPlanPlotRec((83.32, 17.16), 20, floorplan_bg_img=img)
# plot_data_heatmap(pipe, floor_plotter=fp)
if p_reg is not None:
......@@ -155,17 +157,17 @@ def visualize(p_reg: Pipeline=None, p_circle: Pipeline=None, p_quantile: Pipelin
alpha=0.5)
fp.show_plot()
#fp = FloorPlanPlotRec(fp_dims, 2, floorplan_bg_img=img)
for idx in range(len(y_true)):#len(y_true)):
fp = FloorPlanPlotRec(fp_dims, 2, floorplan_bg_img=img)
for idx in range(50):#len(y_true)):
idx += 0
fp = FloorPlanPlotRec(fp_dims, 2, floorplan_bg_img=img)
#fp = FloorPlanPlotRec(fp_dims, 2, floorplan_bg_img=img)
fp.draw_points(y_true[idx, 0], y_true[idx, 1], color='g', alpha=0.5)
# box_size = y_pred[:]
if p_circle is not None:
fp.draw_circles(centers=y_pred_circle[idx, :2], radius=y_pred_circle[idx, 2], color='r')
if p_reg is not None:
fp.draw_circles(centers=y_pred_reg[idx, :2], radius=4.7243, color='b')
fp.draw_circles(centers=y_pred_reg[idx, :2], radius=12.506, color='b')
if p_quantile is not None:
fp.draw_points(y_pred_quantile[idx, 0], y_pred_quantile[idx, 1], color="r")
fp.draw_points(y_pred_quantile[idx, 2], y_pred_quantile[idx, 3],
......@@ -175,7 +177,7 @@ def visualize(p_reg: Pipeline=None, p_circle: Pipeline=None, p_quantile: Pipelin
if p_box is not None:
fp.draw_rectangles_new(anchors=y_pred_box[idx, :], color='black')
fp.show_plot()
fp.show_plot()
if __name__ == "__main__":
......
......@@ -55,6 +55,7 @@ def plot_mean_error_per_grid(pipe: Pipeline):
pred_size = np.full((num_g_y, num_g_x), np.nan)
succ_rate = np.full((num_g_y, num_g_x), np.nan)
num_grid = np.full((num_g_y, num_g_x), np.nan)
std_size_grid = np.full((num_g_y, num_g_x), np.nan)
for y_idx in range(num_g_y):
for x_idx in range(num_g_x):
......@@ -62,12 +63,13 @@ def plot_mean_error_per_grid(pipe: Pipeline):
upper_right = np.array([(x_idx + 1) * g_size, (y_idx + 1) * g_size])
# get average error per grid
error, size, succ = error_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right)
error, size, size_std, succ = error_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right)
num = get_num_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right)
overlap_count[num_g_y - y_idx - 1, x_idx] = error
pred_size[num_g_y - y_idx - 1, x_idx] = size #avg_box_size_grid(y_pred, lower_left, upper_right)
succ_rate[num_g_y - y_idx - 1, x_idx] = succ
num_grid[num_g_y - y_idx - 1, x_idx] = num
std_size_grid[num_g_y - y_idx - 1, x_idx] = size_std
overlap_count = (((overlap_count - np.nanmin(overlap_count)) * (255 - 1)) / (
np.nanmax(overlap_count) - np.nanmin(overlap_count))) + 1
......@@ -88,6 +90,10 @@ def plot_mean_error_per_grid(pipe: Pipeline):
255 - 0)) / (np.nanmax(num_grid) - np.nanmin(num_grid))) + 0
succ_rate[np.where(np.isnan(num_grid))] = 0
std_size_grid = (((std_size_grid - np.nanmin(std_size_grid)) * (
255 - 0)) / (np.nanmax(std_size_grid) - np.nanmin(std_size_grid))) + 0
std_size_grid[np.where(np.isnan(std_size_grid))] = 0
# colormap
cmap = plt.cm.jet # define the colormap
# extract all colors from the .jet map
......@@ -107,10 +113,11 @@ def plot_mean_error_per_grid(pipe: Pipeline):
#fp_dims = (83.32, 17.16)
fp_dims = (200, 80)
fp = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img)
fp_size = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img)
fp_succ = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img)
fp_num = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img)
fp = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img, title="Error")
fp_size = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img, title="BoxSize")
fp_succ = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img, title="ACC")
fp_num = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img, title="#Test")
fp_size_std = FloorPlanPlotRec(fp_dims, 20, floorplan_bg_img=img, title="BoxSizeSTD")
for y_idx in range(num_g_y):
for x_idx in range(num_g_x):
lower_left = np.array([x_idx * g_size, (y_idx) * g_size])
......@@ -120,6 +127,7 @@ def plot_mean_error_per_grid(pipe: Pipeline):
size = pred_size[num_g_y - y_idx - 1, x_idx]
succ = succ_rate[num_g_y - y_idx - 1, x_idx]
num = num_grid[num_g_y - y_idx - 1, x_idx]
s_std = std_size_grid[num_g_y - y_idx - 1, x_idx]
fp.draw_rectangles(np.concatenate((lower_left, upper_right)),
color=my_cmap[int(count)], fill=True)
......@@ -129,6 +137,8 @@ def plot_mean_error_per_grid(pipe: Pipeline):
color=my_cmap[int(succ)], fill=True)
fp_num.draw_rectangles(np.concatenate((lower_left, upper_right)),
color=my_cmap[int(num)], fill=True)
fp_size_std.draw_rectangles(np.concatenate((lower_left, upper_right)),
color=my_cmap[int(s_std)], fill=True)
fp.show_plot()
......@@ -217,6 +227,7 @@ def avg_box_size_grid(y_pred, lower_left, upper_right):
# size of prediction cells
return np.mean(np.prod(y_pred[mask, 2:], axis=1))
def get_num_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right):
min_x = lower_left[0]
min_y = lower_left[1]
......@@ -247,8 +258,8 @@ def error_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right):
mask = np.where(range)[0]
if len(mask) == 0:
return np.nan, np.nan, np.nan
if len(mask) < 5:
return np.nan, np.nan, np.nan, np.nan
#size = np.abs(y_pred[mask, 0] - y_pred[mask, 2]) * np.abs(y_pred[mask, 1] - y_pred[mask, 3])
......@@ -263,7 +274,7 @@ def error_grid(y_pred, y_true, y_pred_reg, lower_left, upper_right):
size = np.prod(y_pred[mask, 2:], axis=1)
diff = np.linalg.norm(y_true[mask] - y_pred_reg[mask, :2], axis=1)
return np.median(diff), np.median(size), len(correct)/len(mask)
return np.mean(diff), np.mean(size), np.std(size), len(correct)/len(mask)
def overlap_reg(y_pred, radius, lower_left, upper_right):
......@@ -314,6 +325,6 @@ if __name__ == "__main__":
for id in [128]:
pipe: Pipeline = Storable.load(
"../evaluation/lohan/evaluation/grid/output/BBOX")#{}".format(id))
"../evaluation/lohan/evaluation/grid/output/BBOX_NEW")#{}".format(id))
plot_mean_error_per_grid(pipe)
\ No newline at end of file
from tensorflow.keras import regularizers, initializers
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
from tensorflow.keras.layers import Dense, Dropout, Conv2D, MaxPooling2D, Flatten
from tensorflow.keras.layers import Dense, Dropout, Conv2D, MaxPooling2D, Flatten, BatchNormalization
from tensorflow.keras.models import Sequential
from il_pipeline.models.layer_generator import hidden_layers
from il_pipeline.models.lr_normalizer import lr_normalizer
def bbox_model_for_talos(x_train, y_train, x_val, y_val, params):
model = Sequential()
model.add(Dense(params['first_neuron'], input_dim=x_train.shape[1],
activation=params['activation'],
kernel_regularizer=regularizers.l2(
params['regularization_penalty'])
))
model.add(Dropout(params['dropout']))
hidden_layers(model, params, x_train.shape[1])
model.add(Dense(5*3, activation=params['last_activation'],
# kernel_initializer=params['kernel_initializer'],
kernel_regularizer=regularizers.l2(
params['regularization_penalty']))) # ,
# kernel_initializer=initializers.RandomUniform(minval=-10, maxval=15)))#,
# bias_initializer=initializers.RandomUniform(minval=0, maxval=0)))
# compile the model
model.compile(loss=params['losses'],
optimizer=params['optimizer'](
lr=lr_normalizer(params['lr'], params['optimizer'])))
out = model.fit(x_train, y_train,
batch_size=params['batch_size'],
epochs=params['epochs'],
validation_data=[x_val, y_val],
verbose=0)
return out, model
def bbox_model_for_generator(metrics):
def define_classification_model_for_generator(X_cols, Y_cols, params) -> Sequential:
......@@ -17,6 +51,8 @@ def bbox_model_for_generator(metrics):
params['regularization_penalty'])
))
#model.add(BatchNormalization())
model.add(Dropout(params['dropout']))
hidden_layers(model, params, X_cols)
......
......@@ -8,11 +8,17 @@ class FloorPlanPlotRec(FloorplanPlot):
def __init__(self, floorplan_dimensions, grid_size, draw_grid=False, floorplan_bg_img="",
filename=None, sample_points_file=None, add_points=False,
walls_file=None, add_walls=False, xtick_freq=None, artificial_labels=False,
correct_walls=False):
correct_walls=False, title=None):
super().__init__(floorplan_dimensions, grid_size, draw_grid, floorplan_bg_img,
filename, sample_points_file, add_points, walls_file, add_walls,
xtick_freq, artificial_labels, correct_walls)
if title is not None:
self.set_title(title)
def set_title(self, title="title"):
self.axis.set_title(title)
def draw_circles(self, centers, radius, color='r'):
if centers.ndim == 1:
centers = np.array([centers])
......
......@@ -197,7 +197,7 @@ def main():
file = "gia_evaluation.yml"
file = "ujiindoor_circle.yml"
#file = "config_lohan.yml"
file = "config/gia_grid.yml"
file = "config/lohan_deep.yml"
execute(file)
#point_uncertainty_estimation(file, "evaluation/lohan/old_loss/progress/output/BBOX_256_base.hdf5")
......
import copy
import logging
import numpy as np
from debug_tools.logger import getLogger
from il_pipeline.utility.config_reader import ConfigReader
from ldce.base import ClusterBase
from ldce.plotting.floorplan_plot import FloorplanPlot
from base.BboxModel import BboxModel
from base.bbox_model_definition import bbox_model_for_talos
log = getLogger(level=logging.INFO)
from base.bbox_pipeline import BboxPipeline
import talos as ta
def execute(conf_file):
area_classification = True
logging.basicConfig(level="INFO")
conf = ConfigReader(conf_file)
conf.setup_directories()
# only PD
conf.download_floor_plan()
# sequentially execute all training pipelines
for p_idx, pipeline_params in enumerate(conf.pipelines):
num_iterations = conf.get_params('repetitions', pipeline_params)
log.info('Train and evaluate il_pipeline "{}"... ({}/{}))'.format(
pipeline_params['name'], p_idx + 1, len(conf.pipelines)))
log.info('Repeat {} time(s) and build average...'.format(num_iterations))
# read pipeline parameters
pre_params = conf.get_params('preprocessing', pipeline_params, merge_level=1)
model_params = conf.get_params('model_params', pipeline_params, merge_level=1)
fp_params = conf.get_params('floor_plan', pipeline_params, merge_level=1)
assign_closest = conf.get_params('assign_closest', pre_params, 'preprocessing')
area_mode = conf.get_params('area_assignment', pre_params, 'preprocessing')
# get data provider
base_data_provider = BboxPipeline.get_data_provider(conf.data_params, pre_params)
# for storing pipeline iteration names
p_names = []
for run in range(num_iterations):
pipe_params = copy.deepcopy(pipeline_params)
p_name = pipe_params['name']# + "[{}]".format(run + 1)
pipe_params['name'] = p_name
p_names.append(p_name)
data_provider = copy.deepcopy(base_data_provider)
clusterer: ClusterBase = BboxPipeline.get_floor_plan_segmentation(
data_provider, fp_params)
# either use segmented floor plan or apply regression
if fp_params['type'] == 'segmentation':
area_labels, _, coverage, mask = clusterer.get_cluster_labels_for_areas(
data_provider.labels, area_mode=area_mode,
assign_closest=assign_closest)
data_provider.set_area_labels(area_labels,
delete_uncovered=True,
pre_params=pre_params)
data_provider.remove_APs_with_low_correlation_to_areas(
pre_params)
if data_provider.get_data_dims(
model_type="classification")[1] < 2:
log.info("Skipping segmentation (only single class)")
continue
elif fp_params['type'] == 'regression':
data_provider.generate_random_splits_from_clusters(clusterer.base_cluster_mappings)
elif fp_params['type'] == 'floor_classification':
data_provider.area_labels = data_provider.labels
# compute grid encoding
data_provider.transform_to_grid_encoding()
#data_provider.transform_to_2dim_grid_encoding(grid_size=20)
pipeline = BboxPipeline(data_provider,
clusterer,
conf,
model_params,
pipe_params['name'])
if model_params is not None:
train_model(model_params, pipeline)
pipeline.store()
if model_params is not None:
pipe_files = [conf.output_dir + p_name for p_name in p_names]
BboxPipeline.merge_summaries(pipe_files, pipeline_params['name'])
def train_model(params, pipe: BboxPipeline):
dp = pipe.data_provider
x_train, y_train = dp.get_train_data(labels=dp.grid_labels, split_idx=0, area_labels=False)
x_test, y_test = dp.get_test_data(labels=dp.grid_labels, split_idx=0, area_labels=False)
x_val, y_val = pipe.data_provider.get_val_data(labels=dp.grid_labels, split_idx=0, area_labels=False)
x_train_val = np.concatenate((x_train, x_val), axis=0)
y_train_val = np.concatenate((y_train, y_val), axis=0)
m_type = params['type']
model = BboxModel(params['type'], pipe.summary, pipe.data_provider, params,
pipe.config.output_dir, pipe.filename)
model.setup_params()
num_epochs = params['epochs']
if 'pretrain' in params:
model.pre_train_model(params['pretrain'])
model.type = m_type
model.params.update({'epochs': num_epochs})
m_params = model.params
del m_params['loss']
del m_params['type']
#del m_params['pred']
del m_params['augmentation']
# put every element in list
m_params = {k: [v] if type(v) not in [list, tuple] else v for (k, v) in m_params.items()}
t = ta.Scan(x=x_train_val,
y=y_train_val,
x_val=x_test,
y_val=y_test,
model=bbox_model_for_talos,
grid_downsample=0.1,
params=m_params,
dataset_name='indoor_loc_box',
experiment_no='1')
print("test")
def report():
r = ta.Reporting("indoor_loc_2.csv")
data = r.data
test = data.sort_values(by=['val_loss'], ascending=True).head(5)
test = r.best_params(metric="val_loss")
r.plot_corr(metric="val_loss")
print(test)
def main():
file = "config/lohan_talos.yml"
execute(file)
#point_uncertainty_estimation(file, "evaluation/lohan/old_loss/progress/output/BBOX_256_base.hdf5")
if __name__ == "__main__":
#report()
main()
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment