Skip to content
Snippets Groups Projects
Commit 366f2863 authored by Jan Habscheid's avatar Jan Habscheid
Browse files

-e1 drin

parent 5009b6ec
Branches
No related tags found
1 merge request!5h-refinement for order 0 and 1
No preview for this file type
...@@ -22,12 +22,21 @@ N_REFINEMENTS = 0 ...@@ -22,12 +22,21 @@ N_REFINEMENTS = 0
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--r", required=True, type=int) parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=False, type=int)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
DEGREE_ELEVATIONS = args.e
else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS) print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
if not os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}'): print('H_REFINEMENTS:', H_REFINEMENTS)
os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}') if not os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}'):
os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}')
INLET_BOUNDARY_ID = 2 INLET_BOUNDARY_ID = 2
OUTLET_BOUNDARY_ID = 3 OUTLET_BOUNDARY_ID = 3
INLET_PEAK_VELOCITY = 1.4336534897721067 INLET_PEAK_VELOCITY = 1.4336534897721067
...@@ -60,6 +69,7 @@ data_setup = { ...@@ -60,6 +69,7 @@ data_setup = {
'TILING': TILING, 'TILING': TILING,
'N_REFINEMENTS': N_REFINEMENTS, 'N_REFINEMENTS': N_REFINEMENTS,
'DEGREE_ELEVATIONS': DEGREE_ELEVATIONS, 'DEGREE_ELEVATIONS': DEGREE_ELEVATIONS,
'H_REFINEMENTS': H_REFINEMENTS,
'INLET_BOUNDARY_ID': INLET_BOUNDARY_ID, 'INLET_BOUNDARY_ID': INLET_BOUNDARY_ID,
'OUTLET_BOUNDARY_ID': OUTLET_BOUNDARY_ID, 'OUTLET_BOUNDARY_ID': OUTLET_BOUNDARY_ID,
'INLET_PEAK_VELOCITY': INLET_PEAK_VELOCITY, 'INLET_PEAK_VELOCITY': INLET_PEAK_VELOCITY,
...@@ -102,15 +112,15 @@ if __name__ == "__main__": ...@@ -102,15 +112,15 @@ if __name__ == "__main__":
# print(f'knots_y: {knots_y}') # print(f'knots_y: {knots_y}')
train_index = 0 train_index = 0
# Train models # Train models
if not os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}'): os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}') if not os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}'): os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}')
if os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train'): shutil.rmtree(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train') if os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train'): shutil.rmtree(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train')
os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train') os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train')
if os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train/microstructure'): shutil.rmtree(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train/microstructure/') if os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train/microstructure'): shutil.rmtree(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train/microstructure/')
os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train/microstructure') os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train/microstructure')
if os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test'): shutil.rmtree(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test') if os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test'): shutil.rmtree(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test')
os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test') os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test')
if os.path.exists(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test/microstructure'): shutil.rmtree(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test/microstructure/') if os.path.exists(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test/microstructure'): shutil.rmtree(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test/microstructure/')
os.mkdir(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test/microstructure') os.mkdir(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test/microstructure')
for size_1 in SIZES_TRAIN: for size_1 in SIZES_TRAIN:
for size_2 in SIZES_TRAIN: for size_2 in SIZES_TRAIN:
for size_3 in SIZES_TRAIN: for size_3 in SIZES_TRAIN:
...@@ -122,7 +132,7 @@ if __name__ == "__main__": ...@@ -122,7 +132,7 @@ if __name__ == "__main__":
size_1, size_2, size_3, BOX_LENGTH, BOX_HEIGHT, EPS, INLET_BOUNDARY_ID, OUTLET_BOUNDARY_ID, knots_y, TILING, CLOSING_FACE, MICROTILE size_1, size_2, size_3, BOX_LENGTH, BOX_HEIGHT, EPS, INLET_BOUNDARY_ID, OUTLET_BOUNDARY_ID, knots_y, TILING, CLOSING_FACE, MICROTILE
) )
export( export(
fname=f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train/microstructure/index_{train_index}.xml', multipatch=microstructure, fname=f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train/microstructure/index_{train_index}.xml', multipatch=microstructure,
indent=True, indent=True,
additional_blocks=gismo_export_options, additional_blocks=gismo_export_options,
as_base64=False as_base64=False
...@@ -132,7 +142,7 @@ if __name__ == "__main__": ...@@ -132,7 +142,7 @@ if __name__ == "__main__":
train_index+=1 train_index+=1
data_setup['N_SIZES_TRAIN'] = train_index data_setup['N_SIZES_TRAIN'] = train_index
df_sizes = pd.DataFrame(data_sizes) df_sizes = pd.DataFrame(data_sizes)
df_sizes.to_excel(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/train/parameter_input.xlsx', index=True) df_sizes.to_excel(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/train/parameter_input.xlsx', index=True)
# Test models # Test models
test_index = 0 test_index = 0
...@@ -147,7 +157,7 @@ if __name__ == "__main__": ...@@ -147,7 +157,7 @@ if __name__ == "__main__":
size_1, size_2, size_3, BOX_LENGTH, BOX_HEIGHT, EPS, INLET_BOUNDARY_ID, OUTLET_BOUNDARY_ID, knots_y, TILING, CLOSING_FACE, MICROTILE size_1, size_2, size_3, BOX_LENGTH, BOX_HEIGHT, EPS, INLET_BOUNDARY_ID, OUTLET_BOUNDARY_ID, knots_y, TILING, CLOSING_FACE, MICROTILE
) )
export( export(
fname=f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test/microstructure/index_{test_index}.xml', multipatch=microstructure, fname=f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test/microstructure/index_{test_index}.xml', multipatch=microstructure,
indent=True, indent=True,
additional_blocks=gismo_export_options, additional_blocks=gismo_export_options,
as_base64=False as_base64=False
...@@ -157,10 +167,10 @@ if __name__ == "__main__": ...@@ -157,10 +167,10 @@ if __name__ == "__main__":
data_setup['N_SIZES_TEST'] = test_index data_setup['N_SIZES_TEST'] = test_index
df_sizes = pd.DataFrame(data_sizes).iloc[train_index:] df_sizes = pd.DataFrame(data_sizes).iloc[train_index:]
df_sizes.to_excel(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/test/parameter_input.xlsx', index=True) df_sizes.to_excel(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/test/parameter_input.xlsx', index=True)
# df = pd.DataFrame(data) # df = pd.DataFrame(data)
df_setup = pd.DataFrame.from_dict(data_setup, orient='index') df_setup = pd.DataFrame.from_dict(data_setup, orient='index')
df_setup = df_setup.transpose() df_setup = df_setup.transpose()
df_setup.to_excel(f'Data/DegreeElevations_{DEGREE_ELEVATIONS}/HollowOctagon_Setup.xlsx', index=False) df_setup.to_excel(f'Data/H_REFINEMENTS_{H_REFINEMENTS}/HollowOctagon_Setup.xlsx', index=False)
\ No newline at end of file \ No newline at end of file
...@@ -9,12 +9,21 @@ import numpy as np ...@@ -9,12 +9,21 @@ import numpy as np
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--r", required=True, type=int) parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=False, type=int)
parser.add_argument("--Architecture", required=True, type=str) parser.add_argument("--Architecture", required=True, type=str)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
DEGREE_ELEVATIONS = args.r
else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS) print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' print('H_REFINEMENTS:', H_REFINEMENTS)
DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
# path to stokes executable # path to stokes executable
stokes_executable = 'gismo/stokes_example' stokes_executable = 'gismo/stokes_example'
...@@ -50,7 +59,7 @@ for stage in ['train', 'test']: ...@@ -50,7 +59,7 @@ for stage in ['train', 'test']:
os.system(f'rm -rf {DataFolder}/{stage}/paraview/index_{index}') os.system(f'rm -rf {DataFolder}/{stage}/paraview/index_{index}')
os.mkdir(f'{DataFolder}/{stage}/paraview/index_{index}') os.mkdir(f'{DataFolder}/{stage}/paraview/index_{index}')
os.system(f'./{stokes_executable} -f {file} --export-xml -r {DEGREE_ELEVATIONS} --no-plot') os.system(f'./{stokes_executable} -f {file} --export-xml -e {DEGREE_ELEVATIONS} -r {H_REFINEMENTS} --no-plot')
# Move ParaView output # Move ParaView output
# os.system(f'mv ParaViewOutput {DataFolder}/{stage}/paraview/index_{index}') # os.system(f'mv ParaViewOutput {DataFolder}/{stage}/paraview/index_{index}')
......
...@@ -11,11 +11,20 @@ import shutil ...@@ -11,11 +11,20 @@ import shutil
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--r", required=True, type=int) parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=False, type=int)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
DEGREE_ELEVATIONS = args.r
else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS) print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' print('H_REFINEMENTS:', H_REFINEMENTS)
DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
# Sizes of the microstructures # Sizes of the microstructures
VELOCITY_SIZE = get_matrix_size(f'{DataFolder}/train/velocities/velocity_field_0.xml') VELOCITY_SIZE = get_matrix_size(f'{DataFolder}/train/velocities/velocity_field_0.xml')
......
...@@ -9,27 +9,35 @@ from models.LinearRegression import LinearRegressionModel ...@@ -9,27 +9,35 @@ from models.LinearRegression import LinearRegressionModel
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=True, type=int) parser.add_argument("--r", required=True, type=int)
parser.add_argument("--n", required=False, type=int) parser.add_argument("--n", required=False, type=int)
parser.add_argument("--R_lower", required=False, type=int) parser.add_argument("--R_lower", required=False, type=int)
parser.add_argument("--R_upper", required=False, type=int) parser.add_argument("--R_upper", required=False, type=int)
parser.add_argument("--R_step", required=False, type=int) parser.add_argument("--R_step", required=False, type=int)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
DEGREE_ELEVATIONS = args.r
else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
print('H_REFINEMENTS:', H_REFINEMENTS)
if args.n is not None: if args.n is not None:
N_RUNS = args.n N_RUNS = args.n
else: else:
N_RUNS = 1 N_RUNS = 1
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}'
# Create the folder to store the trained models # Create the folder to store the trained models
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
if not os.path.exists(f'{DataFolder}/TrainedModels'): if not os.path.exists(f'{DataFolder}/TrainedModels'):
os.mkdir(f'{DataFolder}/TrainedModels') os.mkdir(f'{DataFolder}/TrainedModels')
models = { models = {
'LinearRegression': LinearRegressionModel(DEGREE_ELEVATIONS=DEGREE_ELEVATIONS), 'LinearRegression': LinearRegressionModel(H_REFINEMENTS=H_REFINEMENTS),
} }
# Training for different number of POD modes # Training for different number of POD modes
......
...@@ -16,6 +16,7 @@ from miscellaneous.DataPreparation import save_numpy_to_xml, get_matrix_size ...@@ -16,6 +16,7 @@ from miscellaneous.DataPreparation import save_numpy_to_xml, get_matrix_size
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=True, type=int) parser.add_argument("--r", required=True, type=int)
parser.add_argument("--n", required=False, type=int) parser.add_argument("--n", required=False, type=int)
parser.add_argument("--R_lower", required=False, type=int) parser.add_argument("--R_lower", required=False, type=int)
...@@ -23,13 +24,21 @@ parser.add_argument("--R_upper", required=False, type=int) ...@@ -23,13 +24,21 @@ parser.add_argument("--R_upper", required=False, type=int)
parser.add_argument("--R_step", required=False, type=int) parser.add_argument("--R_step", required=False, type=int)
parser.add_argument("--Architecture", required=False, type=str) parser.add_argument("--Architecture", required=False, type=str)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
DEGREE_ELEVATIONS = args.r
else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
if args.n is not None: if args.n is not None:
N_RUNS = args.n N_RUNS = args.n
else: else:
N_RUNS = 1 N_RUNS = 1
print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS) print('DEGREE_ELEVATIONS:', DEGREE_ELEVATIONS)
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' print('H_REFINEMENTS:', H_REFINEMENTS)
DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
PatchesFolder = f'{DataFolder}/Patches' PatchesFolder = f'{DataFolder}/Patches'
if not os.path.exists(PatchesFolder): if not os.path.exists(PatchesFolder):
os.mkdir(PatchesFolder) os.mkdir(PatchesFolder)
...@@ -53,11 +62,11 @@ models = { ...@@ -53,11 +62,11 @@ models = {
# 'GP_DotWhite': GaussianProcessRegressionModel(), # 'GP_DotWhite': GaussianProcessRegressionModel(),
# 'GP_RBF': GaussianProcessRegressionModel(), # 'GP_RBF': GaussianProcessRegressionModel(),
# 'GP_Matern': GaussianProcessRegressionModel(), # 'GP_Matern': GaussianProcessRegressionModel(),
'GP_RationalQuadratic': GaussianProcessRegressionModel(), # 'GP_RationalQuadratic': GaussianProcessRegressionModel(),
# 'GP_ExpSineSquared': GaussianProcessRegressionModel(), # 'GP_ExpSineSquared': GaussianProcessRegressionModel(),
'RBF_Linear': RadialBasisRegressionModel(), # 'RBF_Linear': RadialBasisRegressionModel(),
'RBF_thinplatespline': RadialBasisRegressionModel(), # 'RBF_thinplatespline': RadialBasisRegressionModel(),
'RBF_cubic': RadialBasisRegressionModel(), # 'RBF_cubic': RadialBasisRegressionModel(),
# 'RBF_quintic': RadialBasisRegressionModel(), # 'RBF_quintic': RadialBasisRegressionModel(),
# 'RBF_multiquadric': RadialBasisRegressionModel(), # 'RBF_multiquadric': RadialBasisRegressionModel(),
# 'RBF_inversemultiquadric': RadialBasisRegressionModel(), # 'RBF_inversemultiquadric': RadialBasisRegressionModel(),
...@@ -111,7 +120,6 @@ for model_name in models: ...@@ -111,7 +120,6 @@ for model_name in models:
# Iterate over each testcase # Iterate over each testcase
for sample in range(SAMPLES_TEST): for sample in range(SAMPLES_TEST):
# for sample in range(3):
save_numpy_to_xml(test_velocity_predict[:,sample], f'temp_{model_name}/predict_velocity_field_{sample}.xml', rows=VELOCITY_SIZE, cols='1') save_numpy_to_xml(test_velocity_predict[:,sample], f'temp_{model_name}/predict_velocity_field_{sample}.xml', rows=VELOCITY_SIZE, cols='1')
save_numpy_to_xml(test_pressure_predict[:,sample], f'temp_{model_name}/predict_pressure_field_{sample}.xml', rows=PRESSURE_SIZE, cols='1') save_numpy_to_xml(test_pressure_predict[:,sample], f'temp_{model_name}/predict_pressure_field_{sample}.xml', rows=PRESSURE_SIZE, cols='1')
...@@ -125,7 +133,7 @@ for model_name in models: ...@@ -125,7 +133,7 @@ for model_name in models:
# Create patches for the geometry with gismo script # Create patches for the geometry with gismo script
program_output = subprocess.Popen( program_output = subprocess.Popen(
[f'./{path_to_executable}', f'-f {geometry_file}', f'-v {velocity_solution}', f'-p {pressure_solution}', f'-w {velocity_predicted}', f'-q {pressure_predicted}', f'-r {DEGREE_ELEVATIONS}', '--no-plot'], [f'./{path_to_executable}', f'-f {geometry_file}', f'-v {velocity_solution}', f'-p {pressure_solution}', f'-w {velocity_predicted}', f'-q {pressure_predicted}', f'-e {DEGREE_ELEVATIONS}', f'-r {H_REFINEMENTS}', '--no-plot'],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True text=True
).communicate()[0] ).communicate()[0]
......
...@@ -8,19 +8,26 @@ from miscellaneous.error_evaluation import get_solution_vectors, load_geometry, ...@@ -8,19 +8,26 @@ from miscellaneous.error_evaluation import get_solution_vectors, load_geometry,
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Script that reads the Elevation Degrees from CMD" description="Script that reads the Elevation Degrees from CMD"
) )
parser.add_argument("--e", required=False, type=int)
parser.add_argument("--r", required=True, type=int) parser.add_argument("--r", required=True, type=int)
parser.add_argument("--R_lower", required=False, type=int) parser.add_argument("--R_lower", required=False, type=int)
parser.add_argument("--R_upper", required=False, type=int) parser.add_argument("--R_upper", required=False, type=int)
parser.add_argument("--R_step", required=False, type=int) parser.add_argument("--R_step", required=False, type=int)
parser.add_argument("--plot", required=False, type=bool) parser.add_argument("--plot", required=False, type=bool)
args = parser.parse_args() args = parser.parse_args()
DEGREE_ELEVATIONS = args.r if args.e is not None:
print(f'Plot: \t\t\t{args.plot}') DEGREE_ELEVATIONS = args.e
print(f'type(args.plot): \t{type(args.plot)}') else:
DEGREE_ELEVATIONS = 1
if args.r is not None:
H_REFINEMENTS = args.r
else:
H_REFINEMENTS = 0
if args.plot: if args.plot:
print('Plotting is enabled') print('Plotting is enabled')
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
print(f'Degree Elevation: \t{DEGREE_ELEVATIONS}') print(f'Degree Elevation: \t{H_REFINEMENTS}')
print(f'H_REFINEMENTS: \t\t{H_REFINEMENTS}')
PatchesFolder = f'{DataFolder}/Patches' PatchesFolder = f'{DataFolder}/Patches'
models = { models = {
...@@ -28,11 +35,11 @@ models = { ...@@ -28,11 +35,11 @@ models = {
# 'GP_DotWhite', # 'GP_DotWhite',
# 'GP_RBF', # 'GP_RBF',
# 'GP_Matern', # 'GP_Matern',
'GP_RationalQuadratic', # 'GP_RationalQuadratic',
# 'GP_ExpSineSquared', # 'GP_ExpSineSquared',
'RBF_Linear', # 'RBF_Linear',
'RBF_thinplatespline', # 'RBF_thinplatespline',
'RBF_cubic', # 'RBF_cubic',
# 'RBF_quintic', # 'RBF_quintic',
# 'RBF_multiquadric', # 'RBF_multiquadric',
# 'RBF_inversemultiquadric', # 'RBF_inversemultiquadric',
...@@ -106,7 +113,10 @@ for model_name in models: ...@@ -106,7 +113,10 @@ for model_name in models:
velocity_rec_data = get_solution_vectors(file_path=VELOCITY_REC_FILE, two_dimensional=True) velocity_rec_data = get_solution_vectors(file_path=VELOCITY_REC_FILE, two_dimensional=True)
# Load the geometry # Load the geometry
microstructure, ms_vel = load_geometry(GEOMETRY_FILE, degree_elevations=1, h_refinements=args.r) print(f'Geometry: \t\t{GEOMETRY_FILE}')
print(f'degree_elevations: \t{DEGREE_ELEVATIONS}')
print(f'h_refinements: \t{H_REFINEMENTS}')
microstructure, ms_vel = load_geometry(GEOMETRY_FILE, degree_elevations=DEGREE_ELEVATIONS, h_refinements=H_REFINEMENTS)
# Show pressure and velocity field # Show pressure and velocity field
if args.plot: if args.plot:
......
Source diff could not be displayed: it is too large. Options to address this: view the blob.
No preview for this file type
File deleted
No preview for this file type
...@@ -107,6 +107,8 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"): ...@@ -107,6 +107,8 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"):
List of patch solution vectors List of patch solution vectors
""" """
assert isinstance(solution_vectors, list), "Solution vectors have to be a list" assert isinstance(solution_vectors, list), "Solution vectors have to be a list"
print(f"Number of patches: {len(mp.patches)}")
print(f"Number of solution vectors: {len(solution_vectors)}")
assert len(mp.patches) == len(solution_vectors), "Mismatch between number of patches and patch solution vectors" assert len(mp.patches) == len(solution_vectors), "Mismatch between number of patches and patch solution vectors"
spline_data_list = [] spline_data_list = []
for mp_patch, sv in zip(mp.patches, solution_vectors): for mp_patch, sv in zip(mp.patches, solution_vectors):
...@@ -130,7 +132,7 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"): ...@@ -130,7 +132,7 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"):
# """ # """
# microstructure = sp.io.gismo.load(filename)[0] # microstructure = sp.io.gismo.load(filename)[0]
# if degree_elevations > 0: # if degree_elevations > 0:
# [patch.elevate_degrees([0,1]*degree_elevations) for patch in microstructure.patches] # [patch.elevate_degrees([0,1]*(degree_elevations)) for patch in microstructure.patches]
# patches_p_elevated = [] # patches_p_elevated = []
# for patch in microstructure.patches: # for patch in microstructure.patches:
# patch_elevated = patch.copy() # patch_elevated = patch.copy()
...@@ -141,52 +143,58 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"): ...@@ -141,52 +143,58 @@ def show_multipatch_field(mp, solution_vectors, data_name="solution"):
# return microstructure, microstructure_vel # return microstructure, microstructure_vel
def load_geometry(filename, degree_elevations=0, h_refinements=1): def load_geometry(filename, degree_elevations=0, h_refinements=0):
""" """
Load geometry and for velocity perform one degree elevation (Taylor-Hood elements) Load geometry and for velocity perform one degree elevation (Taylor-Hood elements)
filename: str filename: str
Filename of xml-file Filename of xml-file
""" """
assert(degree_elevations >= 0), "Degree elevations must be non-negative"
assert(h_refinements >= 0), "H-refinements must be non-negative"
assert(type(degree_elevations) == int), "Degree elevations must be an integer"
assert(type(h_refinements) == int), "H-refinements must be an integer"
# Load geometry from xml-file # Load geometry from xml-file
microstructure = sp.io.gismo.load(filename)[0] microstructure = sp.io.gismo.load(filename)[0]
# Perform degree elevations if simulations also were run with degree elevations # Perform degree elevations if simulations also were run with degree elevations
if degree_elevations > 0: if degree_elevations > 0:
[patch.elevate_degrees([0,1]*degree_elevations) for patch in microstructure.patches] [patch.elevate_degrees([0,1]*degree_elevations) for patch in microstructure.patches]
# Create two new list of patches for h-refinement
# Create new list of patches for h-refinement
patches_refined = [] patches_refined = []
# Go through patches # Go through patches of microstructure
for patch in microstructure.patches: for patch in microstructure.patches:
patch_elevated = patch.copy() patch_elevated = patch.copy()
# Convert Bezier to Bspline # Convert Bezier to Bspline
patch_elevated = patch_elevated.bspline patch_elevated = patch_elevated.bspline
# Perform h-refinements # Perform h-refinements
for _ in range(h_refinements-1): for _ in range(h_refinements):
patch_elevated.uniform_refine([0,1]) patch_elevated.uniform_refine([0,1])
# Extract Bezier patches for each knot span # Extract Bezier patches for each knot span
new_refined_patches = patch_elevated.extract_bezier_patches() new_refined_patches = patch_elevated.extract_bezier_patches()
# Add patches as new refined patches # Add patches as new refined patches
patches_refined += new_refined_patches patches_refined += new_refined_patches
# For velocity and Taylor-Hood elements, elevate degree once per patch
new_velocity_patches = new_refined_patches.copy()
[patch_refined.elevate_degrees([0,1]) for patch_refined in new_velocity_patches]
# Group all patches into one Multipatch object # Group all patches into one Multipatch object
microstructure_refined = sp.Multipatch(splines=patches_refined) microstructure_refined = sp.Multipatch(splines=patches_refined)
# Determine the interfaces for the multipatch objects
microstructure_refined.determine_interfaces() microstructure_refined.determine_interfaces()
# Refinement for velocity # Elevate the degrees of the velocity patches for Taylor-Hood elements
patches_p_elevated = [] patches_p_elevated = []
for patch in microstructure_refined.patches: for patch in microstructure_refined.patches:
patch_elevated = patch.copy() patch_elevated = patch.copy()
patch_elevated.elevate_degrees([0,1]) patch_elevated.elevate_degrees([0,1])
patches_p_elevated.append(patch_elevated) patches_p_elevated.append(patch_elevated)
# Create a new Multipatch object for the velocity field
microstructure_refined_vel = sp.Multipatch(splines=patches_p_elevated) microstructure_refined_vel = sp.Multipatch(splines=patches_p_elevated)
microstructure_refined_vel.determine_interfaces() microstructure_refined_vel.determine_interfaces()
return microstructure_refined, microstructure_refined_vel return microstructure_refined, microstructure_refined_vel
if __name__ == "__main__": if __name__ == "__main__":
# Get solution vector for pressure and velocity # Get solution vector for pressure and velocity
pressure_data = get_solution_vectors(file_path=PRESSURE_FILE) pressure_data = get_solution_vectors(file_path=PRESSURE_FILE)
......
...@@ -5,8 +5,8 @@ from sklearn.metrics import mean_absolute_error, root_mean_squared_error, max_er ...@@ -5,8 +5,8 @@ from sklearn.metrics import mean_absolute_error, root_mean_squared_error, max_er
from kneed import KneeLocator from kneed import KneeLocator
class RegressionModels: class RegressionModels:
def __init__(self, DEGREE_ELEVATIONS=1): def __init__(self, H_REFINEMENTS=0):
DataFolder = f'Data/DegreeElevations_{DEGREE_ELEVATIONS}' DataFolder = f'Data/H_REFINEMENTS_{H_REFINEMENTS}'
# Load unscaled data # Load unscaled data
self.velocity_matrix_train = np.loadtxt(f'{DataFolder}/train/matrices/velocity.csv') self.velocity_matrix_train = np.loadtxt(f'{DataFolder}/train/matrices/velocity.csv')
self.pressure_matrix_train = np.loadtxt(f'{DataFolder}/train/matrices/pressure.csv') self.pressure_matrix_train = np.loadtxt(f'{DataFolder}/train/matrices/pressure.csv')
......
No preview for this file type
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment