diff --git a/eg0_affine_thermoelastic_solver.py b/eg00_affine_thermoelastic_solver.py old mode 100644 new mode 100755 similarity index 63% rename from eg0_affine_thermoelastic_solver.py rename to eg00_affine_thermoelastic_solver.py index 29195bb..843c6f4 --- a/eg0_affine_thermoelastic_solver.py +++ b/eg00_affine_thermoelastic_solver.py @@ -7,7 +7,7 @@ from microstructures import * from utilities import verify_data, read_h5 -for microstructure in microstructures: +for microstructure in microstructures[-9:]: file_name, data_path, temp1, temp2 = itemgetter('file_name', 'data_path', 'temp1', 'temp2')(microstructure) @@ -17,6 +17,12 @@ mesh, samples = read_h5(file_name, data_path, temperatures) + # print(mesh.keys()) + # print(samples[0].keys()) + print('strain localication shape:', samples[0]['strain_localization'].shape) + print('material stiffness shape:', samples[0]['mat_stiffness'].shape) + print('plastic modes shape:', samples[0]['plastic_modes'].shape) + for sample in samples: verify_data(mesh, sample) diff --git a/eg1_approximation_of_mat_properties.py b/eg01_approximation_of_mat_properties.py old mode 100644 new mode 100755 similarity index 99% rename from eg1_approximation_of_mat_properties.py rename to eg01_approximation_of_mat_properties.py index 1613e66..404e891 --- a/eg1_approximation_of_mat_properties.py +++ b/eg01_approximation_of_mat_properties.py @@ -6,6 +6,8 @@ from material_parameters import * from optimize_alpha import naive, opt1, opt2, opt4 from utilities import plot_and_save, cm +from matplotlib import rc +rc('text', usetex=True) temp1 = 300 temp2 = 1300 diff --git a/eg2_compare_approximations.py b/eg02_compare_approximations.py old mode 100644 new mode 100755 similarity index 87% rename from eg2_compare_approximations.py rename to eg02_compare_approximations.py index 0e2597c..a04fbfc --- a/eg2_compare_approximations.py +++ b/eg02_compare_approximations.py @@ -7,8 +7,11 @@ from interpolate_fluctuation_modes import interpolate_fluctuation_modes from microstructures import * +from material_parameters import * from optimize_alpha import opt1, opt2, opt4, naive from utilities import read_h5, construct_stress_localization, volume_average, compute_residual_efficient +from matplotlib import rc +rc('text', usetex=True) np.random.seed(0) file_name, data_path, temp1, temp2, n_tests, sampling_alphas = itemgetter('file_name', 'data_path', 'temp1', 'temp2', 'n_tests', @@ -26,11 +29,11 @@ strain_dof = mesh['strain_dof'] global_gradient = mesh['global_gradient'] n_gp = mesh['n_integration_points'] -n_modes = ref[0]['strain_localization'].shape[-1] +n_modes = ref[0]['plastic_modes'].shape[-1] _, samples = read_h5(file_name, data_path, [temp1, temp2], get_mesh=False) -strains = np.random.normal(size=(n_loading_directions, mesh['strain_dof'])) +strains = np.random.normal(size=(n_loading_directions, strain_dof)) strains /= la.norm(strains, axis=1)[:, None] n_approaches = 5 @@ -54,38 +57,46 @@ Eref = ref[idx]['strain_localization'] ref_C = ref[idx]['mat_stiffness'] ref_eps = ref[idx]['mat_thermal_strain'] + ref_C_ = np.stack(([stiffness_cu(temperature), stiffness_wsc(temperature)])) + ref_eps_ = np.expand_dims(np.stack(([thermal_strain_cu(temperature), thermal_strain_wsc(temperature)])), axis=2) + print(np.linalg.norm(ref_C - ref_C_), np.linalg.norm(ref_eps - ref_eps_)) + plastic_modes = ref[idx]['plastic_modes'] normalization_factor_mech = ref[idx]['normalization_factor_mech'] - Sref = construct_stress_localization(Eref, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Sref = construct_stress_localization(Eref, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSref = volume_average(Sref) # interpolated quantities using an explicit interpolation scheme with one DOF approx_C, approx_eps = naive(alpha, sampling_C, sampling_eps, ref_C, ref_eps) Enaive = interpolate_temp(E0, E1) - Snaive = construct_stress_localization(Enaive, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Snaive = construct_stress_localization(Enaive, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSnaive = volume_average(Snaive) # interpolated quantities using an explicit interpolation scheme with one DOF - Eopt0, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt0 = construct_stress_localization(Eopt0, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt0, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt0 = construct_stress_localization(Eopt0, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt0 = volume_average(Sopt0) # interpolated quantities using an implicit interpolation scheme with one DOF approx_C, approx_eps = opt1(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt1, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt1 = construct_stress_localization(Eopt1, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt1, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt1 = construct_stress_localization(Eopt1, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt1 = volume_average(Sopt1) # interpolated quantities using an implicit interpolation scheme with two DOF approx_C, approx_eps = opt2(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt2, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt2 = construct_stress_localization(Eopt2, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt2, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt2 = construct_stress_localization(Eopt2, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt2 = volume_average(Sopt2) # interpolated quantities using an implicit interpolation scheme with four DOF approx_C, approx_eps = opt4(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt4 = volume_average(Sopt4) err = lambda x, y: np.mean(la.norm(x - y, axis=(-1, -2)) / la.norm(y, axis=(-1, -2))) * 100 @@ -97,7 +108,7 @@ err(effSopt2, effSref), err(effSopt4, effSref)] for strain_idx, strain in enumerate(strains): - zeta = np.hstack((strain, 1)) + zeta = np.hstack((strain, 1, np.ones(plastic_modes.shape[-1]))) eff_stress_ref = effSref @ zeta err_eff_stress[:, idx * n_loading_directions + strain_idx] = \ @@ -112,7 +123,7 @@ stress_opt4 = np.einsum('ijk,k', Sopt4, zeta, optimize='optimal') residuals = compute_residual_efficient([stress_naive, stress_opt0, stress_opt1, stress_opt2, stress_opt4], - mesh['global_gradient']) + global_gradient) err_f[:, idx * n_loading_directions + strain_idx] = la.norm(residuals, np.inf, axis=0) / normalization_factor_mech * 100 diff --git a/eg3_hierarchical_sampling.py b/eg03_hierarchical_sampling.py old mode 100644 new mode 100755 similarity index 90% rename from eg3_hierarchical_sampling.py rename to eg03_hierarchical_sampling.py index f4fa926..d5847e1 --- a/eg3_hierarchical_sampling.py +++ b/eg03_hierarchical_sampling.py @@ -10,10 +10,12 @@ from optimize_alpha import opt4 from utilities import read_h5, construct_stress_localization, volume_average, compute_residual_efficient, \ compute_err_indicator_efficient +from matplotlib import rc +rc('text', usetex=True) np.random.seed(0) file_name, data_path, temp1, temp2, n_tests, sampling_alphas = itemgetter('file_name', 'data_path', 'temp1', 'temp2', 'n_tests', - 'sampling_alphas')(microstructures[6]) + 'sampling_alphas')(microstructures[0]) print(file_name, '\t', data_path) n_loading_directions = 1 @@ -28,7 +30,7 @@ global_gradient = mesh['global_gradient'] n_gp = mesh['n_integration_points'] n_phases = len(np.unique(mat_id)) -n_modes = ref[0]['strain_localization'].shape[-1] +n_modes = ref[0]['plastic_modes'].shape[-1] strains = np.random.normal(size=(n_loading_directions, strain_dof)) strains /= la.norm(strains, axis=1)[:, None] @@ -60,22 +62,27 @@ sampling_eps = np.stack((samples[id0]['mat_thermal_strain'], samples[id1]['mat_thermal_strain'])).transpose([1, 0, 2, 3]) # reference values + Eref = ref[idx]['strain_localization'] ref_C = ref[idx]['mat_stiffness'] ref_eps = ref[idx]['mat_thermal_strain'] + plastic_modes = ref[idx]['plastic_modes'] normalization_factor_mech = ref[idx]['normalization_factor_mech'] - effSref = np.vstack((ref[idx]['eff_stiffness'], -ref[idx]['eff_stiffness'] @ ref[idx]['eff_thermal_strain'])).T + Sref = construct_stress_localization(Eref, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) + effSref = volume_average(Sref) + # effSref = np.vstack((ref[idx]['eff_stiffness'], -ref[idx]['eff_stiffness'] @ ref[idx]['eff_thermal_strain'])).T # interpolated quantities using an implicit interpolation scheme with four DOF approx_C, approx_eps = opt4(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt = volume_average(Sopt4) err_indicators[level, idx] = np.mean(np.max(np.abs(compute_err_indicator_efficient(Sopt4, global_gradient)), axis=0)) / normalization_factor_mech * 100 for strain_idx, strain in enumerate(strains): - zeta = np.hstack((strain, 1)) + zeta = np.hstack((strain, 1, np.ones(plastic_modes.shape[-1]))) stress_opt4 = np.einsum('ijk,k', Sopt4, zeta, optimize='optimal') residual = compute_residual_efficient(stress_opt4, global_gradient) @@ -89,11 +96,11 @@ invL = la.inv(la.cholesky(Cref)) err_eff_C[level, idx] = la.norm(invL @ Capprox @ invL.T - np.eye(6)) / la.norm(np.eye(6)) * 100 - err_eff_eps[level, idx] = err(la.solve(Capprox, effSopt[:, -1]), la.solve(Cref, effSref[:, -1])) + err_eff_eps[level, idx] = err(la.solve(Capprox, effSopt[:, 7]), la.solve(Cref, effSref[:, 7])) # max_err_idx = np.argmax(np.mean(err_nodal_force[level], axis=1)) max_err_idx = np.argmax(err_indicators[level]) - alpha_levels.append(np.sort(np.hstack((alphas, test_alphas[max_err_idx])))) + alpha_levels.append(np.unique(np.sort(np.hstack((alphas, test_alphas[max_err_idx]))))) print(f'{np.max(np.mean(err_nodal_force[level], axis=1)) = }') print(f'{np.max(err_indicators[level]) = }') diff --git a/eg4_hierarchical_sampling_efficient.py b/eg04_hierarchical_sampling_efficient.py old mode 100644 new mode 100755 similarity index 81% rename from eg4_hierarchical_sampling_efficient.py rename to eg04_hierarchical_sampling_efficient.py index 90c14e9..8a67346 --- a/eg4_hierarchical_sampling_efficient.py +++ b/eg04_hierarchical_sampling_efficient.py @@ -6,13 +6,13 @@ from interpolate_fluctuation_modes import update_affine_decomposition, effective_S, effective_stress_localization, \ interpolate_fluctuation_modes, get_phi, transform_strain_localization from microstructures import * -from optimize_alpha import opt4_alphas -from utilities import read_h5, construct_stress_localization, compute_err_indicator_efficient +from optimize_alpha import opt4_alphas, opt4 +from utilities import read_h5, construct_stress_localization, compute_err_indicator_efficient, volume_average np.random.seed(0) # np.set_printoptions(precision=3) -for ms_id in [6, 7, 8, 9]: +for ms_id in [0]: file_name, data_path, temp1, temp2, n_tests, sampling_alphas = itemgetter('file_name', 'data_path', 'temp1', 'temp2', 'n_tests', 'sampling_alphas')(microstructures[ms_id]) @@ -36,20 +36,25 @@ global_gradient = mesh['global_gradient'] n_gp = mesh['n_integration_points'] n_phases = len(np.unique(mat_id)) - n_modes = refs[0]['strain_localization'].shape[-1] + N_modes = refs[0]['strain_localization'].shape[2] - 7 # extract temperature dependent data from the reference solutions # such as: material stiffness and thermal strain at each temperature and for all phases + Erefs = np.zeros((n_tests, *refs[0]['strain_localization'].shape)) # n_tests x n_phases x 6 x 6 ref_Cs = np.zeros((n_tests, *refs[0]['mat_stiffness'].shape)) # n_tests x n_phases x 6 x 6 ref_epss = np.zeros((n_tests, *refs[0]['mat_thermal_strain'].shape)) # n_tests x n_phases x 6 x 1 - effSref = np.zeros((n_tests, strain_dof, n_modes)) + effSref = np.zeros((n_tests, strain_dof, N_modes + 7)) # n_tests x 6 x (N + 7) normalization_factor_mech = np.zeros((n_tests)) + plastic_modes = refs[0]['plastic_modes'] # temperature independent for idx, alpha in enumerate(test_alphas): + print(idx) + Erefs[idx] = refs[idx]['strain_localization'] ref_Cs[idx] = refs[idx]['mat_stiffness'] ref_epss[idx] = refs[idx]['mat_thermal_strain'] normalization_factor_mech[idx] = refs[idx]['normalization_factor_mech'] - effSref[idx] = np.hstack( - (refs[idx]['eff_stiffness'], -np.reshape(refs[idx]['eff_stiffness'] @ refs[idx]['eff_thermal_strain'], (-1, 1)))) + Sref = construct_stress_localization(Erefs[idx], ref_Cs[idx], ref_epss[idx], plastic_modes, mat_id, n_gauss, + strain_dof) + effSref[idx] = volume_average(Sref) err_indicators, err_eff_S, err_eff_C, err_eff_eps = [np.zeros((n_hierarchical_levels, n_tests)) for _ in range(4)] interpolate_temp = lambda x1, x2, alpha: x1 + alpha * (x2 - x1) @@ -105,30 +110,39 @@ current_sampling_id = alphas_indexing[idx] K0, K1, F0, F1, F2, F3, S001, S101, S103, S002, S102, S104 = update_affine_decomposition( - E01s[current_sampling_id], sampling_C, sampling_eps, n_modes, n_phases, n_gp, strain_dof, mat_id, n_gauss) + E01s[current_sampling_id], sampling_C, sampling_eps, plastic_modes, N_modes, n_phases, + n_gp, strain_dof, + mat_id, n_gauss) phi = get_phi(K0, K1, F0, F1, F2, F3, alpha_C, alpha_eps, alpha_C_eps) speed = 1 - if speed == 0: - C, eps = ref_Cs[idx], ref_epss[idx] - # C, eps = opt4(sampling_C, sampling_eps, ref_Cs[idx], ref_epss[idx]) - _, effSopt = interpolate_fluctuation_modes(E01s[current_sampling_id], C, eps, mat_id, n_gauss, strain_dof, - n_modes, n_gp) - elif speed == 1: - effSopt = effective_stress_localization(E01s[current_sampling_id], phi, ref_Cs[idx], ref_epss[idx], mat_id, - n_gauss, n_gp, strain_dof, n_modes) - elif speed == 2: - # matches the result from interpolate_fluctuation_modes with a difference - # that depends on using ref_Cs[idx],ref_epss[idx] instead of alphas - effSopt, phi = effective_S(phi, S001, S101, S103, S002, S102, S104, alpha_C, np.squeeze(alpha_eps, axis=-1), - np.squeeze(alpha_C_eps, axis=-1)) - else: - raise NotImplementedError() + # if speed == 0: + C, eps = ref_Cs[idx], ref_epss[idx] + # C, eps = opt4(sampling_C, sampling_eps, ref_Cs[idx], ref_epss[idx]) + _, effSopt = interpolate_fluctuation_modes(E01s[current_sampling_id], C, eps, plastic_modes, mat_id, n_gauss, + strain_dof, + N_modes, n_gp) + #elif speed == 1: + # TODO verify the result when plasticity is on + effSopt1 = effective_stress_localization(E01s[current_sampling_id], phi, ref_Cs[idx], ref_epss[idx], plastic_modes, + mat_id, + n_gauss, n_gp, strain_dof, N_modes) + #elif speed == 2: + # TODO verify the result when plasticity is on + # matches the result from interpolate_fluctuatioN_modes with a difference + # that depends on using ref_Cs[idx],ref_epss[idx] instead of alphas + effSopt2, phi2 = effective_S(phi, S001, S101, S103, S002, S102, S104, alpha_C, np.squeeze(alpha_eps, axis=-1), + np.squeeze(alpha_C_eps, axis=-1)) + #else: + # raise NotImplementedError() + print(np.linalg.norm(effSopt - effSopt1)) if not given_alpha_levels: - Eopt4 = transform_strain_localization(E01s[current_sampling_id], phi, n_gp, strain_dof, n_modes) - Sopt4 = construct_stress_localization(Eopt4, ref_Cs[idx], ref_epss[idx], mat_id, n_gauss, strain_dof) + Eopt4 = transform_strain_localization(E01s[current_sampling_id], phi, n_gp, strain_dof, N_modes) + Sopt4 = construct_stress_localization(Eopt4, ref_Cs[idx], ref_epss[idx], plastic_modes, mat_id, n_gauss, + strain_dof) + # effSopt = volume_average(Sopt4) err_indicators[level, idx] = np.mean(np.max(np.abs(compute_err_indicator_efficient(Sopt4, global_gradient)), axis=0)) / normalization_factor_mech[idx] * 100 @@ -140,7 +154,7 @@ invL = la.inv(la.cholesky(Cref)) err_eff_C[level, idx] = la.norm(invL @ Capprox @ invL.T - np.eye(6)) / la.norm(np.eye(6)) * 100 - err_eff_eps[level, idx] = err(la.solve(Capprox, effSopt[:, -1]), la.solve(Cref, effSref[idx][:, -1])) + err_eff_eps[level, idx] = err(la.solve(Capprox, effSopt[:, 7]), la.solve(Cref, effSref[idx][:, 7])) # TODO remove dtype='f' group = file.require_group(f'{data_path}_level{level}') @@ -149,7 +163,7 @@ dset_stiffness = group.require_dataset(f'eff_stiffness_{temperature:07.2f}', (6, 6), dtype='f') dset_thermal_strain = group.require_dataset(f'eff_thermal_strain_{temperature:07.2f}', (6), dtype='f') dset_stiffness[:] = Capprox.T - dset_thermal_strain[:] = la.solve(Capprox, effSopt[:, -1]) + dset_thermal_strain[:] = la.solve(Capprox, effSopt[:, 7]) if not given_alpha_levels: max_err_idx = np.argmax(err_indicators[level]) diff --git a/eg5_FFANN.py b/eg05_FFANN.py old mode 100644 new mode 100755 similarity index 100% rename from eg5_FFANN.py rename to eg05_FFANN.py diff --git a/eg6_post_process_ann_vs_proposed.py b/eg06_post_process_ann_vs_proposed.py old mode 100644 new mode 100755 similarity index 100% rename from eg6_post_process_ann_vs_proposed.py rename to eg06_post_process_ann_vs_proposed.py diff --git a/eg7_staggered_model_interpolation.py b/eg07_staggered_model_interpolation.py old mode 100644 new mode 100755 similarity index 99% rename from eg7_staggered_model_interpolation.py rename to eg07_staggered_model_interpolation.py index 92aedfa..f87a893 --- a/eg7_staggered_model_interpolation.py +++ b/eg07_staggered_model_interpolation.py @@ -11,7 +11,7 @@ # Offline stage: Load precomputed optimal data load_start = time.time() -ms_id = 6 +ms_id = 0 level = 4 file_name, data_path, temp1, temp2, n_samples, sampling_alphas = itemgetter('file_name', 'data_path', 'temp1', 'temp2', 'n_tests', 'sampling_alphas')(microstructures[ms_id]) diff --git a/eg8_plot_localization.py b/eg08_plot_localization.py old mode 100644 new mode 100755 similarity index 99% rename from eg8_plot_localization.py rename to eg08_plot_localization.py index 5a92306..2075f34 --- a/eg8_plot_localization.py +++ b/eg08_plot_localization.py @@ -3,12 +3,13 @@ """ #%% from operator import itemgetter - +import numpy as np import numpy.linalg as la import matplotlib.pyplot as plt from microstructures import * from utilities import read_h5, construct_stress_localization + np.random.seed(0) file_name, data_path, temp1, temp2, n_tests, sampling_alphas = itemgetter( "file_name", "data_path", "temp1", "temp2", "n_tests", "sampling_alphas" diff --git a/eg09_compute_ntfa_matrices.py b/eg09_compute_ntfa_matrices.py new file mode 100755 index 0000000..9f8bc1e --- /dev/null +++ b/eg09_compute_ntfa_matrices.py @@ -0,0 +1,71 @@ +""" +Demo code for plastic mode identification and processing, i.e. computation of the system matrices +""" +#%% +from operator import itemgetter +import time +from microstructures import * +from utilities import read_h5 +from ntfa import read_snapshots, mode_identification, compute_tabular_data, save_tabular_data + +np.random.seed(0) +for microstructure in microstructures: + file_name, data_path, temp1, temp2, n_tests, sampling_alphas = itemgetter( + "file_name", "data_path", "temp1", "temp2", "n_tests", "sampling_alphas" + )(microstructure) + print(file_name, "\t", data_path) + + sample_temperatures = np.linspace(temp1, temp2, num=n_tests) + + mesh, samples = read_h5(file_name, data_path, sample_temperatures) + mat_id = mesh["mat_id"] + n_gauss = mesh["n_gauss"] + strain_dof = mesh["strain_dof"] + nodal_dof = mesh["nodal_dof"] + n_elements = mesh["n_elements"] + n_integration_points = mesh["n_integration_points"] + global_gradient = mesh["global_gradient"] + n_gp = mesh["n_integration_points"] + disc = mesh["combo_discretisation"] + vol_frac0, vol_frac1 = mesh['volume_fraction'][0], mesh['volume_fraction'][1] + + # Mode identification + + # Read plastic snapshots from h5 file + plastic_snapshots = read_snapshots(file_name, data_path) + print('plastic_snapshots.shape:', plastic_snapshots.shape) + + # Identification of plastic modes + r_min = 1e-3 + plastic_modes_svd = mode_identification(plastic_snapshots, vol_frac0, r_min) + print('plastic_modes_svd.shape:', plastic_modes_svd.shape) + + # Compare computed plastic modes with plastic modes from h5 file + plastic_modes = samples[0]['plastic_modes'] + plastic_modes = plastic_modes_svd + assert np.allclose(plastic_modes / np.sign(np.expand_dims(np.expand_dims(plastic_modes[0,0,:], axis=0), axis=0)), plastic_modes_svd), 'Identified plastic modes do not match plastic modes in h5 file' + + # Mode processing to compute system matrices + + n_temp = 1000 + temperatures = np.linspace(temp1, temp2, num=n_temp) + start_time = time.time() + # TODO: compute system matrices for multiple intermediate temperatures in an efficient way + C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1 = compute_tabular_data(samples, mesh, temperatures) + elapsed_time = time.time() - start_time + print(f'Computed tabular data for {n_temp} temperatures in {elapsed_time}s') + print('C_bar.shape:', C_bar.shape) + print('tau_theta.shape:', tau_theta.shape) + print('A_bar.shape:', A_bar.shape) + print('tau_xi.shape:', tau_xi.shape) + print('D_xi.shape:', D_xi.shape) + print('D_theta.shape:', D_theta.shape) + print('A_bar/A0/A1 error:', np.linalg.norm(vol_frac0 * A0[:, strain_dof + 1:] + vol_frac1 * A1[:, strain_dof + 1:] - A_bar) / np.linalg.norm(A_bar)) + + # Save system matrices for multiple temperatures as tabular data + save_tabular_data(file_name, data_path, temperatures, C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1) + # Tabular data is saved to the input h5 file and can be copied to a new h5 file using e.g. + # h5copy -i input/file.h5 -o input/file_ntfa.h5 -s ms_1p/dset0_ntfa -d ms_1p/dset0_ntfa -p + +#%% Compare interpolated NTFA matrices with exact NTFA matrices +# TODO: compute errors diff --git a/eg10_compare_ntfa_matrices.py b/eg10_compare_ntfa_matrices.py new file mode 100755 index 0000000..9418746 --- /dev/null +++ b/eg10_compare_ntfa_matrices.py @@ -0,0 +1,36 @@ +""" +Demo code for plastic mode identification and processing, i.e. computation of the system matrices +""" +#%% +from operator import itemgetter +from microstructures import * +from ntfa import read_tabular_data + +file_name_10, data_path_10, temp1_10, temp2_10, n_tests_10, sampling_alphas_10 = itemgetter( + "file_name", "data_path", "temp1", "temp2", "n_tests", "sampling_alphas" +)(microstructures[0]) + +temperatures_10, C_bar_10, tau_theta_10, A_bar_10, tau_xi_10, D_xi_10, D_theta_10, A0_10, A1_10, C0_10, C1_10 = \ + read_tabular_data(file_name_10, data_path_10) + +file_name_100, data_path_100, temp1_100, temp2_100, n_tests_100, sampling_alphas_100 = itemgetter( + "file_name", "data_path", "temp1", "temp2", "n_tests", "sampling_alphas" +)(microstructures[-1]) + +temperatures_100, C_bar_100, tau_theta_100, A_bar_100, tau_xi_100, D_xi_100, D_theta_100, A0_100, A1_100, C0_100, C1_100 = \ + read_tabular_data(file_name_100, data_path_100) + +C_bar_diff = C_bar_10 - C_bar_100 +tau_theta_diff = tau_theta_10 - tau_theta_100 +# A_bar_diff +# tau_xi_diff +# D_xi_diff +D_theta_diff = tau_theta_10 - tau_theta_100 +# A0_diff +# A1_diff +C0_diff = C0_10 - C0_100 +C1_10 = C1_10 - C1_100 +print('C_bar error:', np.linalg.norm(C_bar_diff, axis=(0,1)) / np.linalg.norm(C_bar_100, axis=(0,1))) +print('tau_theta error:', np.linalg.norm(tau_theta_diff, axis=0) / np.linalg.norm(tau_theta_100, axis=0)) +print('D_theta error:', np.linalg.norm(D_theta_diff, axis=0) / np.linalg.norm(D_theta_100, axis=0)) +print(C_bar_10.shape, C_bar_100.shape) \ No newline at end of file diff --git a/interpolate_fluctuation_modes.py b/interpolate_fluctuation_modes.py old mode 100644 new mode 100755 index 5882ccd..f5e20dd --- a/interpolate_fluctuation_modes.py +++ b/interpolate_fluctuation_modes.py @@ -1,18 +1,19 @@ import numpy as np from numba import jit, prange -@jit(nopython=True, cache=True, parallel=True, nogil=True) -def interpolate_fluctuation_modes(E01, mat_stiffness, mat_thermal_strain, mat_id, n_gauss, strain_dof, n_modes, n_gp): - K = np.zeros((2 * n_modes, 2 * n_modes)) - F = np.zeros((2 * n_modes, n_modes)) - E_approx = np.zeros((n_gp, strain_dof, n_modes)) +# @jit(nopython=True, cache=True, parallel=True, nogil=True) +def interpolate_fluctuation_modes(E01, mat_stiffness, mat_thermal_strain, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp): + K = np.zeros((2 * (n_modes + 7), 2 * (n_modes + 7))) + F = np.zeros((2 * (n_modes + 7), (n_modes + 7))) + E_approx = np.zeros((n_gp, strain_dof, n_modes + 7)) I = np.eye(strain_dof) - S_average = np.zeros((n_modes, n_modes)) + S_average = np.zeros((n_modes + 7, n_modes + 7)) for gp_id in prange(n_gp): phase_id = mat_id[gp_id // n_gauss] - P = np.hstack((-I, mat_thermal_strain[phase_id])) + P = np.hstack((-I, mat_thermal_strain[phase_id], plastic_modes[gp_id])) E01t_C = E01[gp_id].T @ mat_stiffness[phase_id] @@ -29,21 +30,22 @@ def interpolate_fluctuation_modes(E01, mat_stiffness, mat_thermal_strain, mat_id return E_approx, S_average[:6, :] -def update_affine_decomposition(E01, sampling_C, sampling_eps, n_modes, n_phases, n_gp, strain_dof, mat_id, n_gauss): +def update_affine_decomposition(E01, sampling_C, sampling_eps, plastic_modes, n_modes, n_phases, n_gp, strain_dof, mat_id, + n_gauss): I = np.eye(strain_dof) - K0 = np.zeros((2 * n_modes, 2 * n_modes)) - K1 = np.zeros((n_phases, 2 * n_modes, 2 * n_modes)) - F0 = np.zeros((2 * n_modes, n_modes)) - F1 = np.zeros((n_phases, 2 * n_modes, n_modes)) - F2, F3 = [np.zeros((n_phases, 2 * n_modes, 1)) for _ in range(2)] + K0 = np.zeros((2 * (n_modes + 7), 2 * (n_modes + 7))) + K1 = np.zeros((n_phases, 2 * (n_modes + 7), 2 * (n_modes + 7))) + F0 = np.zeros((2 * (n_modes + 7), (n_modes + 7))) + F1 = np.zeros((n_phases, 2 * (n_modes + 7), (n_modes + 7))) + F2, F3 = [np.zeros((n_phases, 2 * (n_modes + 7), 1)) for _ in range(2)] quick = True dim0 = 1 if quick else n_gp - S001 = np.zeros((dim0, strain_dof, 2 * n_modes)) - S002 = np.zeros((dim0, n_phases, strain_dof, 2 * n_modes)) - S101 = np.zeros((dim0, strain_dof, n_modes)) - S102 = np.zeros((dim0, n_phases, strain_dof, n_modes)) + S001 = np.zeros((dim0, strain_dof, 2 * (n_modes + 7))) + S002 = np.zeros((dim0, n_phases, strain_dof, 2 * (n_modes + 7))) + S101 = np.zeros((dim0, strain_dof, n_modes + 7)) + S102 = np.zeros((dim0, n_phases, strain_dof, n_modes + 7)) S103 = np.zeros((dim0, n_phases, strain_dof, 1)) S104 = np.zeros((dim0, n_phases, strain_dof, 1)) @@ -59,7 +61,7 @@ def update_affine_decomposition(E01, sampling_C, sampling_eps, n_modes, n_phases eps0phase = eps0[phase_id] deltaCphase = deltaC[phase_id] deltaEPSphase = delta_eps[phase_id] - P0_phase = np.hstack((-I, eps0phase)) + P0_phase = np.hstack((-I, eps0phase, plastic_modes[gp_id])) E01_transposed = E01[gp_id].T # essential terms that construct all other precomputed terms @@ -93,7 +95,7 @@ def update_affine_decomposition(E01, sampling_C, sampling_eps, n_modes, n_phases else: return K0, K1, F0, F1, F2, F3, S001, S101, S103, S002, S102, S104 -# @jit(nopython=True, cache=True, parallel=True, nogil=True) +@jit(nopython=True, cache=True, parallel=True, nogil=True) def get_phi(K0, K1, F0, F1, F2, F3, alpha_C, alpha_eps, alpha_C_eps): K = K0 + np.sum(K1 * alpha_C, 0) # sum over the phases F = F0 + np.sum(F1 * alpha_C, 0) @@ -101,7 +103,7 @@ def get_phi(K0, K1, F0, F1, F2, F3, alpha_C, alpha_eps, alpha_C_eps): return np.linalg.lstsq(K, F, rcond=-1)[0] -# @jit(nopython=True, cache=True, parallel=True, nogil=True) +@jit(nopython=True, cache=True, parallel=True, nogil=True) def effective_S(phi, S001, S101, S103, S002, S102, S104, alpha_C, alpha_eps2D, alpha_C_eps2D): S00 = S001 + np.sum(S002 * alpha_C, 0) # sum over the phases S11 = S101 + np.sum(S102 * alpha_C, 0) @@ -110,17 +112,18 @@ def effective_S(phi, S001, S101, S103, S002, S102, S104, alpha_C, alpha_eps2D, a @jit(nopython=True, cache=True, parallel=True, fastmath=True, nogil=True) def transform_strain_localization(E01, phi, n_gp, strain_dof, n_modes): - E_approx = np.zeros((n_gp, strain_dof, n_modes)) + E_approx = np.zeros((n_gp, strain_dof, n_modes + 7)) for gp_id in prange(n_gp): E_approx[gp_id] = E01[gp_id] @ phi return E_approx @jit(nopython=True, cache=True, parallel=True, nogil=True) -def effective_stress_localization(E01, phi, mat_stiffness, mat_thermal_strain, mat_id, n_gauss, n_gp, strain_dof, n_modes): - S_average = np.zeros((strain_dof, n_modes)) +def effective_stress_localization(E01, phi, mat_stiffness, mat_thermal_strain, plastic_modes, mat_id, n_gauss, n_gp, strain_dof, + n_modes): + S_average = np.zeros((strain_dof, n_modes + 7)) I = np.eye(strain_dof) for gp_id in prange(n_gp): phase_id = mat_id[gp_id // n_gauss] - P = np.hstack((-I, mat_thermal_strain[phase_id])) + P = np.hstack((-I, mat_thermal_strain[phase_id], plastic_modes[gp_id])) S_average += mat_stiffness[phase_id] @ (E01[gp_id] @ phi - P) return S_average / n_gp diff --git a/material_parameters.py b/material_parameters.py old mode 100644 new mode 100755 index 8dcb4d5..0128d6d --- a/material_parameters.py +++ b/material_parameters.py @@ -7,8 +7,9 @@ import matplotlib.pyplot as plt import numpy as np import scipy.integrate as integrate +import utilities -from utilities import cm +cm = 1 / 2.54 # centimeters in inches I2 = np.asarray([1., 1., 1., 0, 0, 0]) I4 = np.eye(6) @@ -24,8 +25,10 @@ heat_capacity_cu = lambda x: 2.94929e+03 * x ** 0 + 2.30217e+00 * x ** 1 + -2.95302e-03 * x ** 2 + 1.47057e-06 * x ** 3 cte_cu = lambda x: 1.28170e-05 * x ** 0 + 8.23091e-09 * x ** 1 elastic_modulus_cu = lambda x: 1.35742e+08 * x ** 0 + 5.85757e+03 * x ** 1 + -8.16134e+01 * x ** 2 +hardening_cu = lambda x: 20e+06 * x ** 0 -thermal_strain_cu = lambda x: integrate.quad(cte_cu, min_temperature, x)[0] * I2 +# thermal_strain_cu = lambda x: integrate.quad(cte_cu, min_temperature, x)[0] * I2 +thermal_strain_cu = lambda x: (1.28170e-05 * (x - min_temperature) + 8.23091e-09 * 0.5 * (x ** 2 - min_temperature ** 2)) * I2 shear_modulus_cu = lambda x: elastic_modulus_cu(x) / (2. * (1. + poisson_ratio_cu(x))) bulk_modulus_cu = lambda x: elastic_modulus_cu(x) / (3. * (1. - 2. * poisson_ratio_cu(x))) stiffness_cu = lambda x: bulk_modulus_cu(x) * IxI + 2. * shear_modulus_cu(x) * P2 @@ -36,7 +39,8 @@ cte_wsc = lambda x: 5.07893e-06 * x ** 0 + 5.67524e-10 * x ** 1 elastic_modulus_wsc = lambda x: 4.13295e+08 * x ** 0 + -7.83159e+03 * x ** 1 + -3.65909e+01 * x ** 2 + 5.48782e-03 * x ** 3 -thermal_strain_wsc = lambda x: integrate.quad(cte_wsc, min_temperature, x)[0] * I2 +# thermal_strain_wsc = lambda x: integrate.quad(cte_wsc, min_temperature, x)[0] * I2 +thermal_strain_wsc = lambda x: (5.07893e-06 * (x - min_temperature) + 5.67524e-10 * 0.5 * (x ** 2 - min_temperature ** 2)) * I2 shear_modulus_wsc = lambda x: elastic_modulus_wsc(x) / (2. * (1. + poisson_ratio_wsc(x))) bulk_modulus_wsc = lambda x: elastic_modulus_wsc(x) / (3. * (1. - 2. * poisson_ratio_wsc(x))) stiffness_wsc = lambda x: bulk_modulus_wsc(x) * IxI + 2. * shear_modulus_wsc(x) * P2 diff --git a/microstructures.py b/microstructures.py old mode 100644 new mode 100755 index 2447e07..5d20ae8 --- a/microstructures.py +++ b/microstructures.py @@ -2,122 +2,21 @@ import numpy as np -microstructures = [{ - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/striped_normal_4x4x4.h5"), - 'temp1': 300, - 'temp2': 1300, - 'n_tests': 100, - 'sampling_alphas': None -}, { - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/sphere_normal_16x16x16_10samples.h5"), - 'temp1': 300, - 'temp2': 1300, - 'n_tests': 10, - 'sampling_alphas': None -}, { - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/sphere_normal_32x32x32_10samples.h5"), - 'temp1': 300, - 'temp2': 1300, - 'n_tests': 10, - 'sampling_alphas': None -}, { - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/sphere_combo_16x16x16_10samples.h5"), +microstructures = [ +{ + 'data_path': '/ms_9p/dset0_sim', + 'file_name': path("input/rve_thermoplastic_6loadings_10samples.h5"), 'temp1': 300, 'temp2': 1300, 'n_tests': 10, 'sampling_alphas': None -}, { - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/octahedron_normal_16x16x16_10samples.h5"), +}, +{ + 'data_path': '/ms_9p/dset0_sim', + 'file_name': path("input/simple_3d_rve_B1-B6_16x16x16_100samples_fix.h5"), 'temp1': 300, 'temp2': 1300, - 'n_tests': 10, - 'sampling_alphas': None -}, { - 'data_path': '/ms_1p/dset0_sim', - 'file_name': path("input/octahedron_combo_16x16x16_10samples.h5"), - 'temp1': 300, - 'temp2': 1300, - 'n_tests': 10, + 'n_tests': 100, 'sampling_alphas': None -}, { - 'data_path': - '/ms_1p/dset0_sim', - 'file_name': - path("input/octahedron_combo_32x32x32.h5"), - 'temp1': - 300, - 'temp2': - 1300, - 'n_tests': - 100, - 'sampling_alphas': - np.asarray([ - np.asarray([0., 1.]), - np.asarray([0., 0.82828283, 1.]), - np.asarray([0., 0.82828283, 0.93939394, 1.]), - np.asarray([0., 0.60606061, 0.82828283, 0.93939394, 1.]), - np.asarray([0., 0.60606061, 0.82828283, 0.93939394, 0.97979798, 1.]) - ], dtype=object) -}, { - 'data_path': - '/image_data/dset_0_sim', - 'file_name': - path("input/random_rve_vol20.h5"), - 'temp1': - 300, - 'temp2': - 1300, - 'n_tests': - 100, - 'sampling_alphas': - np.asarray([ - np.asarray([0., 1.]), - np.asarray([0., 0.85858586, 1.]), - np.asarray([0., 0.85858586, 0.94949495, 1.]), - np.asarray([0., 0.66666667, 0.85858586, 0.94949495, 1.]), - np.asarray([0., 0.66666667, 0.85858586, 0.94949495, 0.97979798, 1.]) - ], dtype=object) -}, { - 'data_path': - '/image_data/dset_0_sim', - 'file_name': - path("input/random_rve_vol40.h5"), - 'temp1': - 300, - 'temp2': - 1300, - 'n_tests': - 100, - 'sampling_alphas': - np.asarray([ - np.asarray([0., 1.]), - np.asarray([0., 0.8989899, 1.]), - np.asarray([0., 0.8989899, 0.96969697, 1.]), - np.asarray([0., 0.71717172, 0.8989899, 0.96969697, 1.]), - np.asarray([0., 0.51515152, 0.71717172, 0.8989899, 0.96969697, 1.]) - ], dtype=object) -}, { - 'data_path': - '/image_data/dset_0_sim', - 'file_name': - path('input/random_rve_vol60.h5'), - 'temp1': - 300, - 'temp2': - 1300, - 'n_tests': - 100, - 'sampling_alphas': - np.asarray([ - np.asarray([0., 1.]), - np.asarray([0., 0.8989899, 1.]), - np.asarray([0., 0.72727273, 0.8989899, 1.]), - np.asarray([0., 0.72727273, 0.8989899, 0.96969697, 1.]), - np.asarray([0., 0.52525253, 0.72727273, 0.8989899, 0.96969697, 1.]) - ], dtype=object) -}] +}, +] diff --git a/ntfa.py b/ntfa.py new file mode 100755 index 0000000..410c904 --- /dev/null +++ b/ntfa.py @@ -0,0 +1,341 @@ +""" +Extension of the AdaptiveThermoMechROM for plastic models using the Nonuniform Transformation Field Analysis (NTFA) +For further information, see https://github.com/DataAnalyticsEngineering/ThermoNTFA +""" +import numpy as np +import h5py +import re +from operator import itemgetter +from utilities import read_h5, construct_stress_localization, construct_stress_localization_phases, \ + volume_average, volume_average_phases, norm_2 +from material_parameters import stiffness_cu, stiffness_wsc, thermal_strain_cu, thermal_strain_wsc +from interpolate_fluctuation_modes import interpolate_fluctuation_modes +from optimize_alpha import opt4 +from microstructures import microstructures +from tqdm import tqdm + + +def read_snapshots(file_name, data_path): + """ + Read an H5 file that contains responses of simulated microstructures + + :param file_name: e.g. "input/simple_3d_rve_combo.h5" + :param data_path: the path to the simulation results within the h5 file, e.g. '/ms_1p/dset0_sim' + :return: + strain_snapshots: plastic strain snapshots eps_p + with shape (n_integration_points, strain_dof, n_frames) + """ + plastic_snapshots = None + with h5py.File(file_name, 'r') as file: + plastic_snapshots = np.transpose(file[f'{data_path}/plastic_strains'][:], (0, 2, 1)) + return plastic_snapshots + + +def mode_identification(plastic_snapshots, vol_frac, r_min=1e-8): + """ + Identification of plastic strain modes µ using POD and renormalization + + :param strain_snapshots: plastic strain snapshots eps_p (ordered as described in `read_snapshots`) + with shape (n_integration_points, strain_dof, n_frames) + :param r_min: stop criterion + :return: + plastic_modes: plastic strain modes µ with shape (n_integration_points, strain_dof, n_modes) + """ + n_integration_points, strain_dof, n_frames = plastic_snapshots.shape + plastic_snapshots_rs = plastic_snapshots.transpose(1, 0, 2).reshape((strain_dof * n_integration_points, n_frames)) + u, s, v = np.linalg.svd(plastic_snapshots_rs, full_matrices=False) + s = s / s[0] + n_modes = np.argwhere(s > r_min).size + plastic_modes = u[:, :n_modes].reshape((strain_dof, n_integration_points, n_modes)).transpose(1, 0, 2) + + # Renormalize plastic modes (sign convention can differ between different implementations) + for i in range(n_modes): + weighting_factor = vol_frac / volume_average(norm_2(plastic_modes[:, :, i])) + plastic_modes[:, :, i] = plastic_modes[:, :, i] * weighting_factor * np.sign(plastic_modes[0, 0, i]) + return plastic_modes + + +def compute_ntfa_matrices(strain_localization, stress_localization, plastic_modes, mat_thermal_strain, mesh): + """ + Compute the ntfa matrices C_bar, A_bar, D_xi, the vectors tau_theta, tau_xi and the scalar D_theta + for given strain_localization, stress_localization, plastic_modes and mat_thermal_strain + + :param strain_localization: strain localization 3D array + with shape (n_integration_points, strain_dof, 7 + n_modes) + :param stress_localization: stress localization 3D array + with shape (n_integration_points, strain_dof, 7 + n_modes) + :param plastic_modes: plastic strain modes + with shape (n_integration_points, strain_dof, n_modes) + :param mat_thermal_strain: thermal strains of the phases + with shape (n_phases, strain_dof, 1) + :param mesh: dict with mesh information + :return: + C_bar with shape (strain_dof, strain_dof) + tau_theta with shape (strain_dof,) + A_bar with shape (strain_dof, strain_dof) + tau_xi with shape (n_modes,) + D_xi with shape (strain_dof, n_modes) + D_theta with shape (1,) + """ + strain_dof = mesh['strain_dof'] + mat_id = mesh['mat_id'] + n_modes = plastic_modes.shape[2] + n_gp = mesh['n_integration_points'] + n_gauss = mesh['n_gauss'] + I2 = np.eye(6) + + # slice strain localization operator E into E_eps, E_theta, E_xi + E_eps = strain_localization[:, :, :strain_dof] + E_theta = strain_localization[:, :, strain_dof] + E_xi = strain_localization[:, :, strain_dof + 1:] + + # slice stress localization operator S into S_eps, S_theta, S_xi + S_eps = stress_localization[:, :, :strain_dof] + S_theta = stress_localization[:, :, strain_dof] + S_xi = stress_localization[:, :, strain_dof + 1:] + + # Compute C_bar via < (E_eps + I).T @ S_eps > + C_bar = volume_average((E_eps + I2).transpose((0, 2, 1)) @ S_eps) + + # Compute tau_theta via < (E_eps + I).T @ S_theta > + tau_theta = volume_average(np.einsum('nij,nj->ni', (E_eps + I2).transpose((0, 2, 1)), S_theta)) + + # Compute A_bar via < (E_eps + I).T @ S_eps > + A_bar = volume_average((E_eps + I2).transpose((0, 2, 1)) @ S_xi) + + # Compute tau_xi via < (E_theta - P_theta).T @ S_xi > + # Account for the phase-wise thermal strain by an explicit summation over all integration points + tau_xi = np.zeros((1, n_modes)) + for gp_id in range(n_gp): + phase_id = mat_id[gp_id // n_gauss] + tau_xi += (np.expand_dims(E_theta[gp_id], axis=1) - mat_thermal_strain[phase_id]).T @ S_xi[gp_id] / n_gp + + # Compute D_xi via < (E_xi - P_xi).T @ S_xi > + D_xi = volume_average((E_xi - plastic_modes).transpose((0, 2, 1)) @ S_xi) + + # Compute D_theta via < (E_theta - P_theta).T @ S_theta > + # Account for the phase-wise thermal strain by an explicit summation over all integration points + D_theta = 0. + for gp_id in range(n_gp): + phase_id = mat_id[gp_id // n_gauss] + D_theta += (np.expand_dims(E_theta[gp_id], axis=1) - mat_thermal_strain[phase_id]).T @ S_theta[gp_id] / n_gp + + return C_bar, tau_theta.ravel(), A_bar, tau_xi.ravel(), D_xi, D_theta + + +def compute_phase_average_stresses(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, zeta, mesh): + """ + Compute the phase-wise average of the stresses in the individual phases of the composite material + + :param strain_localization: strain localization 3D array + with shape (n_integration_points, strain_dof, 7 + n_modes) + :param mat_stiffness: stiffness tensor of the phases + with shape (n_phases, 6, 6) + :param mat_thermal_strain: thermal strains of the phases + with shape (n_phases, strain_dof, 1) + :param plastic_modes: plastic strain modes + with shape (n_integration_points, strain_dof, n_modes) + :param zeta: mode activation coefficients + :param mesh: dict with mesh information + """ + combo_strain_loc0, combo_strain_loc1 = None, None + stress_localization0, stress_localization1, combo_stress_loc0, combo_stress_loc1 = construct_stress_localization_phases( + strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, combo_strain_loc0, combo_strain_loc1, mesh) + + combo_stress0 = np.einsum('ijk,k', combo_stress_loc0, zeta, optimize='optimal') if combo_stress_loc0 is not None else None + combo_stress1 = np.einsum('ijk,k', combo_stress_loc1, zeta, optimize='optimal') if combo_stress_loc1 is not None else None + + stress0 = np.einsum('ijk,k', stress_localization0, zeta, optimize='optimal') + stress1 = np.einsum('ijk,k', stress_localization1, zeta, optimize='optimal') + average_stress_0, average_stress_1 = volume_average_phases(stress0, stress1, combo_stress0, combo_stress1, mesh) + return average_stress_0, average_stress_1 + + +def compute_phase_average_stress_localizations(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, mesh): + """ + Compute the phase-wise average of the stress localizations in the individual phases of the composite material + + :param strain_localization: strain localization 3D array + with shape (n_integration_points, strain_dof, 7 + n_modes) + :param mat_stiffness: stiffness tensor of the phases + with shape (n_phases, 6, 6) + :param mat_thermal_strain: thermal strains of the phases + with shape (n_phases, strain_dof, 1) + :param plastic_modes: plastic strain modes + with shape (n_integration_points, strain_dof, n_modes) + :param mesh: dict with mesh information + """ + assert mesh['vox_type'] == 'normal', 'For now, only normal voxels are supported' + combo_strain_loc0, combo_strain_loc1 = None, None + stress_localization0, stress_localization1, _, _ = construct_stress_localization_phases( + strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, combo_strain_loc0, combo_strain_loc1, mesh) + average_stress_localization0, average_stress_localization1 = \ + volume_average(stress_localization0), volume_average(stress_localization1) + return average_stress_localization0, average_stress_localization1 + + +def compute_tabular_data_for_ms(ms_id, temperatures): + """ + Perform `compute_tabular_data` for the microstructure with id `ms_id` + + :param ms_id: id of the microstructure + :param temperatures: list of sampling temperatures + """ + file_name, data_path, temp1, temp2, n_tests, sampling_alphas = \ + itemgetter('file_name', 'data_path', 'temp1', 'temp2', 'n_tests', 'sampling_alphas')(microstructures[ms_id]) + sample_temperatures = np.linspace(temp1, temp2, num=n_tests) + mesh, samples = read_h5(file_name, data_path, sample_temperatures) + return compute_tabular_data(samples, mesh, temperatures) + + +def compute_tabular_data(samples, mesh, temperatures): + """ + Compute tabular data for a given list of temperatures + + :param samples: + :param mesh: dict with mesh information + :param temperatures: list of sampling temperatures + :return: + C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1 + """ + assert mesh['vox_type'] == 'normal', 'For now, only normal voxels are supported' + mat_id = mesh['mat_id'] + n_gauss = mesh['n_gauss'] + strain_dof = mesh['strain_dof'] + n_gp = mesh['n_integration_points'] + n_modes = samples[0]['plastic_modes'].shape[-1] + n_temps = len(temperatures) + C_bar = np.zeros((strain_dof, strain_dof, n_temps)) + tau_theta = np.zeros((strain_dof, n_temps)) + A_bar = np.zeros((strain_dof, n_modes, n_temps)) + tau_xi = np.zeros((n_modes, n_temps)) + D_xi = np.zeros((n_modes, n_modes, n_temps)) + D_theta = np.zeros((n_temps)) + C0 = np.zeros((strain_dof, strain_dof, n_temps)) + C1 = np.zeros((strain_dof, strain_dof, n_temps)) + A0 = np.zeros((strain_dof, 7 + n_modes, n_temps)) + A1 = np.zeros((strain_dof, 7 + n_modes, n_temps)) + plastic_modes = samples[0]['plastic_modes'] + sample_temperatures = np.array([sample['temperature'] for sample in samples]) + temp1, temp2 = min(sample_temperatures), max(sample_temperatures) + sample_alphas = (sample_temperatures - temp1) / (temp2 - temp1) + for idx in tqdm(range(n_temps)): + temperature = temperatures[idx] + ref_C = np.stack(([stiffness_cu(temperature), stiffness_wsc(temperature)])) + ref_eps = np.expand_dims(np.stack(([thermal_strain_cu(temperature), thermal_strain_wsc(temperature)])), axis=2) + alpha = (temperature - temp1) / (temp2 - temp1) + upper_bound = np.searchsorted(sample_alphas, alpha) + if np.floor(alpha) == alpha: + # sample for given temperature exists, no need for interpolation + id = upper_bound + approx_C, approx_eps = ref_C, ref_eps + E = samples[id]['strain_localization'] + S = construct_stress_localization(E, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) + else: + id1 = upper_bound if upper_bound > 0 else 1 + id0 = id1 - 1 + + E0 = samples[id0]['strain_localization'] + E1 = samples[id1]['strain_localization'] + E01 = np.ascontiguousarray(np.concatenate((E0, E1), axis=-1)) + + sampling_C = np.stack((samples[id0]['mat_stiffness'], samples[id1]['mat_stiffness'])).transpose([1, 0, 2, 3]) + sampling_eps = np.stack((samples[id0]['mat_thermal_strain'], + samples[id1]['mat_thermal_strain'])).transpose([1, 0, 2, 3]) + + # interpolated quantities using an implicit interpolation scheme with four DOF + approx_C, approx_eps = opt4(sampling_C, sampling_eps, ref_C, ref_eps) + E, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, + n_gauss, strain_dof, n_modes, n_gp) + S = construct_stress_localization(E, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) + + # Compute NTFA matrices + C_bar[:, :, idx], tau_theta[:, idx], A_bar[:, :, idx], tau_xi[:, idx], D_xi[:, :, idx], D_theta[idx] = \ + compute_ntfa_matrices(E, S, plastic_modes, ref_eps, mesh) + + # Compute phase average stresses + A0_full, A1_full = compute_phase_average_stress_localizations(E, ref_C, ref_eps, plastic_modes, mesh) + A0[:, :, idx], A1[:, :, idx] = A0_full, A1_full + + # Save phase-wise stiffness tensors + C0[:, :, idx], C1[:, :, idx] = ref_C[0, :, :], ref_C[1, :, :] + return C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1 + + +def save_tabular_data(file_name, data_path, temperatures, C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1): + """ + Save tabular data + + :param file_name: e.g. "input/simple_3d_rve_combo.h5" + :param data_path: + :param temperatures: + :param C_bar: tabular data for C_bar with shape (strain_dof, strain_dof, n_temp) + :param tau_theta: tabular data for tau_theta with shape (strain_dof, n_temp) + :param A_bar: tabular data for A_bar with shape (strain_dof, n_modes, n_temp) + :param tau_xi: tabular data for tau_xi with shape (n_modes, n_temp) + :param D_xi: tabular data for D_xi with shape (n_modes, n_modes, n_temp) + :param D_theta: tabular data for D_theta with shape (n_temp) + :param A0: tabular data for A0 with shape (strain_dof, n_modes, n_temp) + :param A1: tabular data for A1 with shape (strain_dof, n_modes, n_temp) + :param C0: tabular data for C0 with shape (strain_dof, strain_dof, n_temp) + :param C1: tabular data for C1 with shape (strain_dof, strain_dof, n_temp) + """ + with h5py.File(file_name, 'a') as file: + dset_sim = file[data_path] + dset = dset_sim.parent + ntfa_path = re.sub('_sim$', '_ntfa', dset_sim.name) + if ntfa_path in file: + # Delete h5 group with tabular data if it already exists + del file[ntfa_path] + dset_ntfa = dset.create_group(ntfa_path) + [dset_ntfa.attrs.create(key, value) for key, value in dset_sim.attrs.items()] + dset_ntfa.create_dataset('temperatures', data=temperatures) + dset_ntfa.create_dataset('C_bar', data=C_bar) + dset_ntfa.create_dataset('tau_theta', data=tau_theta) + dset_ntfa.create_dataset('A_bar', data=A_bar) + dset_ntfa.create_dataset('tau_xi', data=tau_xi) + dset_ntfa.create_dataset('D_xi', data=D_xi) + dset_ntfa.create_dataset('D_theta', data=D_theta) + dset_ntfa.create_dataset('A0', data=A0) + dset_ntfa.create_dataset('A1', data=A1) + dset_ntfa.create_dataset('C0', data=C0) + dset_ntfa.create_dataset('C1', data=C1) + + +def read_tabular_data(file_name, data_path): + """ + Read tabular data + + :param file_name: e.g. "input/simple_3d_rve_combo.h5" + :param data_path: + :return: + temperatures: + C_bar: tabular data for C_bar with shape (strain_dof, strain_dof, n_temp) + tau_theta: tabular data for tau_theta with shape (strain_dof, n_temp) + A_bar: tabular data for A_bar with shape (strain_dof, n_modes, n_temp) + tau_xi: tabular data for tau_xi with shape (n_modes, n_temp) + D_xi: tabular data for D_xi with shape (n_modes, n_modes, n_temp) + D_theta: tabular data for D_theta with shape (n_temp) + A0: tabular data for A0 with shape (strain_dof, n_modes, n_temp) + A1: tabular data for A1 with shape (strain_dof, n_modes, n_temp) + C0: tabular data for C0 with shape (strain_dof, strain_dof, n_temp) + C1: tabular data for C1 with shape (strain_dof, strain_dof, n_temp) + """ + with h5py.File(file_name, 'r') as file: + if '_ntfa' in data_path: + ntfa_path = data_path + else: + ntfa_path = re.sub('_sim$', '_ntfa', data_path) + dset_ntfa = file[ntfa_path] + temperatures = dset_ntfa['temperatures'][:] + C_bar = dset_ntfa['C_bar'][:] + tau_theta = dset_ntfa['tau_theta'][:] + A_bar = dset_ntfa['A_bar'][:] + tau_xi = dset_ntfa['tau_xi'][:] + D_xi = dset_ntfa['D_xi'][:] + D_theta = dset_ntfa['D_theta'][:] + A0 = dset_ntfa['A0'][:] + A1 = dset_ntfa['A1'][:] + C0 = dset_ntfa['C0'][:] + C1 = dset_ntfa['C1'][:] + return temperatures, C_bar, tau_theta, A_bar, tau_xi, D_xi, D_theta, A0, A1, C0, C1 diff --git a/optimize_alpha.py b/optimize_alpha.py old mode 100644 new mode 100755 diff --git a/requirements.txt b/requirements.txt index 9fc7e79..9a59936 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,5 @@ torch~=1.13.1 sympy~=1.11.1 numba~=0.56.4 pytest~=7.2.1 -latex~=0.7.0 \ No newline at end of file +latex~=0.7.0 +tqdm~=4.66.5 \ No newline at end of file diff --git a/tests/test_approximations.py b/tests/test_approximations.py index 533fc79..b3f5595 100644 --- a/tests/test_approximations.py +++ b/tests/test_approximations.py @@ -32,7 +32,7 @@ def test_approximations(): _, samples = read_h5(file_name, data_path, [temp1, temp2], get_mesh=False) - strains = np.random.normal(size=(n_loading_directions, mesh['strain_dof'])) + strains = np.random.normal(size=(n_loading_directions, strain_dof)) strains /= la.norm(strains, axis=1)[:, None] n_approaches = 5 @@ -56,38 +56,43 @@ def test_approximations(): Eref = ref[idx]['strain_localization'] ref_C = ref[idx]['mat_stiffness'] ref_eps = ref[idx]['mat_thermal_strain'] + plastic_modes = ref[idx]['plastic_modes'] normalization_factor_mech = ref[idx]['normalization_factor_mech'] - Sref = construct_stress_localization(Eref, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Sref = construct_stress_localization(Eref, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSref = volume_average(Sref) # interpolated quantities using an explicit interpolation scheme with one DOF approx_C, approx_eps = naive(alpha, sampling_C, sampling_eps, ref_C, ref_eps) Enaive = interpolate_temp(E0, E1) - Snaive = construct_stress_localization(Enaive, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Snaive = construct_stress_localization(Enaive, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSnaive = volume_average(Snaive) # interpolated quantities using an explicit interpolation scheme with one DOF - Eopt0, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt0 = construct_stress_localization(Eopt0, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt0, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt0 = construct_stress_localization(Eopt0, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt0 = volume_average(Sopt0) # interpolated quantities using an implicit interpolation scheme with one DOF approx_C, approx_eps = opt1(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt1, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt1 = construct_stress_localization(Eopt1, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt1, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt1 = construct_stress_localization(Eopt1, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt1 = volume_average(Sopt1) # interpolated quantities using an implicit interpolation scheme with two DOF approx_C, approx_eps = opt2(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt2, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt2 = construct_stress_localization(Eopt2, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt2, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt2 = construct_stress_localization(Eopt2, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt2 = volume_average(Sopt2) # interpolated quantities using an implicit interpolation scheme with four DOF approx_C, approx_eps = opt4(sampling_C, sampling_eps, ref_C, ref_eps) - Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, mat_id, n_gauss, strain_dof, n_modes, n_gp) - Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, mat_id, n_gauss, strain_dof) + Eopt4, _ = interpolate_fluctuation_modes(E01, approx_C, approx_eps, plastic_modes, mat_id, n_gauss, strain_dof, n_modes, + n_gp) + Sopt4 = construct_stress_localization(Eopt4, ref_C, ref_eps, plastic_modes, mat_id, n_gauss, strain_dof) effSopt4 = volume_average(Sopt4) err = lambda x, y: np.mean(la.norm(x - y, axis=(-1, -2)) / la.norm(y, axis=(-1, -2))) * 100 @@ -99,7 +104,7 @@ def test_approximations(): err(effSopt2, effSref), err(effSopt4, effSref)] for strain_idx, strain in enumerate(strains): - zeta = np.hstack((strain, 1)) + zeta = np.hstack((strain, 1, np.ones(plastic_modes.shape[-1]))) eff_stress_ref = effSref @ zeta err_eff_stress[:, idx * n_loading_directions + strain_idx] = \ @@ -114,7 +119,7 @@ def test_approximations(): stress_opt4 = np.einsum('ijk,k', Sopt4, zeta, optimize='optimal') residuals = compute_residual_efficient([stress_naive, stress_opt0, stress_opt1, stress_opt2, stress_opt4], - mesh['global_gradient']) + global_gradient) err_f[:, idx * n_loading_directions + strain_idx] = la.norm(residuals, np.inf, axis=0) / normalization_factor_mech * 100 diff --git a/utilities.py b/utilities.py old mode 100644 new mode 100755 index 9a71d72..b9c56fc --- a/utilities.py +++ b/utilities.py @@ -1,5 +1,4 @@ import contextlib - import h5py import matplotlib.pyplot as plt import numpy as np @@ -7,6 +6,7 @@ import scipy.sparse from numba import jit, prange from sympy import symbols, lambdify, Array +from microstructures import * plt.rcParams.update({ 'font.size': 8, @@ -21,6 +21,7 @@ cm = 1 / 2.54 # centimeters in inches + def plot_and_save(xlabel, ylabel, fig_name, xlim=None, ylim=None, loc='best'): gca = plt.gca() gca.set_xlim(xlim) @@ -33,12 +34,14 @@ def plot_and_save(xlabel, ylabel, fig_name, xlim=None, ylim=None, loc='best'): plt.savefig(f'output/{fig_name}.png') plt.show() + def ecdf(x): """empirical cumulative distribution function""" # plt.step(*ecdf(np.asarray([1, 1, 2, 3, 4])), where='post') n = len(x) return [np.hstack((np.min(x), np.sort(np.squeeze(np.asarray(x))))), np.hstack((0, np.linspace(1 / n, 1, n)))] + def voxel_quadrature(discretisation, strain_dof=6, nodal_dof=3): """ Voxel formulation with full integration, works with structured grids only @@ -90,7 +93,8 @@ def voxel_quadrature(discretisation, strain_dof=6, nodal_dof=3): return gradient_operators, integration_weights -def read_h5(file_name, data_path, temperatures, get_mesh=True): + +def read_h5(file_name, data_path, temperatures, get_mesh=True, dummy_plastic_data=True): """ Read an H5 file that contains responses of simulated microstructures :param file_name: e.g. "input/simple_3d_rve_combo.h5" @@ -100,7 +104,7 @@ def read_h5(file_name, data_path, temperatures, get_mesh=True): mesh: dictionary that contains microstructural details such as volume fraction, voxel type, ... samples: list of simulation results at each temperature """ - axis_order = [0, 2, 1] # n_gauss x strain_dof x 7 (7=6 mechanical + 1 thermal expansion) + axis_order = [0, 2, 1] # n_gauss x strain_dof x (n_modes + 7) (n_modes + 7 = 6 mechanical + 1 thermal expansion + n_modes plastic) samples = [] with h5py.File(file_name, 'r') as file: @@ -131,7 +135,7 @@ def read_h5(file_name, data_path, temperatures, get_mesh=True): sample['combo_strain_loc0'] = None sample['combo_strain_loc1'] = None - with contextlib.suppress(Exception): + with contextlib.suppress(Exception): # used because some strain localizations are deleted to make h5 files smaller sample['strain_localization'] = file[f'{data_path}/localization_strain_{temperature:07.2f}'][:].transpose( axis_order) @@ -141,6 +145,17 @@ def read_h5(file_name, data_path, temperatures, get_mesh=True): sample['combo_strain_loc1'] = \ file[f'{data_path}/localization_strain1_{temperature:07.2f}'][:].transpose(axis_order) + + if 'plastic_modes' in file[f'{data_path}'].keys(): + sample['plastic_modes'] = file[f'{data_path}/plastic_modes'][:].transpose(axis_order) + else: + sample['plastic_modes'] = np.zeros((*sample['strain_localization'].shape[:2], 0)) + # sample['plastic_modes'] = create_dummy_plastic_modes(*sample['strain_localization'].shape[:2], N_modes=13) + # sample['strain_localization'] = create_dummy_plastic_strain_localization(sample['strain_localization'], N_modes=13) + # if mesh['vox_type'] == 'combo': + # sample['combo_strain_loc0'] = create_dummy_plastic_strain_localization(sample['combo_strain_loc0'], N_modes=13) + # sample['combo_strain_loc1'] = create_dummy_plastic_strain_localization(sample['combo_strain_loc1'], N_modes=13) + if get_mesh: mesh['volume_fraction'] = file[f'{data_path}'].attrs['combo_volume_fraction'] mesh['combo_discretisation'] = np.int64(file[f'{data_path}'].attrs['combo_discretisation']) @@ -198,6 +213,7 @@ def read_h5(file_name, data_path, temperatures, get_mesh=True): return mesh, samples + def verify_data(mesh, sample): """ Sanity check to see if it is possible to reconstruct stress, strain fields and effective properties @@ -210,25 +226,31 @@ def verify_data(mesh, sample): eff_stiffness, eff_thermal_strain = sample['eff_stiffness'], sample['eff_thermal_strain'] mat_thermal_strain, mat_stiffness = sample['mat_thermal_strain'], sample['mat_stiffness'] combo_strain_loc0, combo_strain_loc1 = sample['combo_strain_loc0'], sample['combo_strain_loc1'] + plastic_modes = sample['plastic_modes'] macro_strain = np.asarray([3, .7, 1.5, 0.5, 2, 1]) - zeta = np.hstack((macro_strain, 1)) # 1 accounts for thermoelastic strain, more details in the paper cited in readme.md + xi = np.ones(plastic_modes.shape[-1]) + # 1 accounts for thermoelastic strain, more details in the paper cited in readme.md + # xi accounts for plastic mode activations + zeta = np.hstack((macro_strain, 1, xi)) strain = macro_strain + np.einsum('ijk,k', strain_localization, zeta, optimize='optimal') - stress_localization = construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, mesh['mat_id'], - mesh['n_gauss'], mesh['strain_dof']) + stress_localization = construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, + mesh['mat_id'], mesh['n_gauss'], mesh['strain_dof']) eff_stiffness_from_localization = volume_average(stress_localization) stress = np.einsum('ijk,k', stress_localization, zeta, optimize='optimal') residual = compute_residual(stress, mesh['dof'], mesh['n_elements'], mesh['element_dof'], mesh['n_gauss'], mesh['assembly_idx'], mesh['gradient_operators_times_w']) - abs_err = eff_stiffness_from_localization - np.hstack((eff_stiffness, -np.reshape(eff_stiffness @ eff_thermal_strain, - (-1, 1)))) + eff_thermoelastic_stiffness = eff_stiffness_from_localization[:, :7] + abs_err = eff_thermoelastic_stiffness - np.hstack((eff_stiffness, -np.reshape(eff_stiffness @ eff_thermal_strain, + (-1, 1)))) + # - C @ eff_plastic_strain # eff_plastic_strain is not stored because it depends on the macroscopic strain err = lambda x, y: np.mean(la.norm(x - y) / la.norm(y)) - assert err(eff_stiffness_from_localization, + assert err(eff_thermoelastic_stiffness, np.vstack((eff_stiffness, -eff_stiffness @ eff_thermal_strain)).T) < convergence_tolerance, \ 'incompatibility between stress_localization and effective quantities' @@ -239,7 +261,7 @@ def verify_data(mesh, sample): 'stress field is not statically admissible' stress_localization0, stress_localization1, combo_stress_loc0, combo_stress_loc1 = construct_stress_localization_phases( - strain_localization, mat_stiffness, mat_thermal_strain, combo_strain_loc0, combo_strain_loc1, mesh) + strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, combo_strain_loc0, combo_strain_loc1, mesh) combo_stress0 = np.einsum('ijk,k', combo_stress_loc0, zeta, optimize='optimal') if combo_stress_loc0 is not None else None combo_stress1 = np.einsum('ijk,k', combo_stress_loc1, zeta, optimize='optimal') if combo_stress_loc1 is not None else None @@ -256,6 +278,13 @@ def verify_data(mesh, sample): vol_frac0 * average_stress_0 + vol_frac1 * average_stress_1) < convergence_tolerance, \ 'phasewise volume average is not admissible' + plastic_modes = sample['plastic_modes'] + n_modes = plastic_modes.shape[-1] + gramian = volume_average(np.einsum('ijk,ijl->ikl', plastic_modes, plastic_modes)) + assert np.allclose(gramian, np.diag(np.diag(gramian))), 'plastic modes are not orthogonal' + assert np.allclose([volume_average(norm_2(plastic_modes[:,:,i])) for i in range(n_modes)], vol_frac0), 'plastic modes are not normalized correctly' + + def compute_residual(stress, dof, n_elements, element_dof, n_gauss, assembly_idx, gradient_operators_times_w): """ Compute FEM residual given a stress field @@ -279,6 +308,7 @@ def compute_residual(stress, dof, n_elements, element_dof, n_gauss, assembly_idx residuals[assembly_idx[element_id], idx] += elmental_force return residuals + def compute_residual_efficient(stress, global_gradient): stresses = stress if not isinstance(stress, list): @@ -286,6 +316,7 @@ def compute_residual_efficient(stress, global_gradient): return (np.vstack([x.flatten() for x in stresses]) @ global_gradient).T + @jit(nopython=True, cache=True, parallel=True, nogil=True) def compute_err_indicator(stress_loc, gradient_operators_times_w, dof, n_gauss, assembly_idx): """ @@ -309,15 +340,19 @@ def compute_err_indicator(stress_loc, gradient_operators_times_w, dof, n_gauss, # return la.norm(err_indicator) return err_indicator + def compute_err_indicator_efficient(stress_loc, global_gradient): return global_gradient.T @ stress_loc.reshape(global_gradient.shape[0], -1) -# @jit(nopython=True, cache=True, parallel=True, nogil=True) + +@jit(nopython=True, cache=True, parallel=True, nogil=True) def cheap_err_indicator(stress_loc, global_gradient): return la.norm(global_gradient.T @ np.sum(stress_loc, -1).flatten()) + @jit(nopython=True, cache=True, parallel=True, nogil=True) -def construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, mat_id, n_gauss, strain_dof): +def construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, mat_id, n_gauss, + strain_dof): """ Construct stress localization operator out of the strain localization one. :param strain_localization: strain localization 3D array @@ -331,11 +366,12 @@ def construct_stress_localization(strain_localization, mat_stiffness, mat_therma I = np.eye(strain_dof) for gp_id in prange(strain_localization.shape[0]): phase_id = mat_id[gp_id // n_gauss] - P = np.hstack((-I, mat_thermal_strain[phase_id])) + P = np.hstack((-I, mat_thermal_strain[phase_id], plastic_modes[gp_id])) stress_localization[gp_id] = mat_stiffness[phase_id] @ (strain_localization[gp_id] - P) return stress_localization -def construct_stress_localization_phases(strain_localization, mat_stiffness, mat_thermal_strain, combo_strain_loc0, + +def construct_stress_localization_phases(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, combo_strain_loc0, combo_strain_loc1, mesh): """ Same as construct_stress_localization() but it returns stress localization operators for each material phase @@ -354,7 +390,8 @@ def construct_stress_localization_phases(strain_localization, mat_stiffness, mat n_gauss = mesh['n_gauss'] idx0 = np.repeat(mesh['mat_id'] == 0, n_gauss) idx1 = np.repeat(mesh['mat_id'] == 1, n_gauss) - stress_localization = construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, mesh['mat_id'], + stress_localization = construct_stress_localization(strain_localization, mat_stiffness, mat_thermal_strain, plastic_modes, + mesh['mat_id'], mesh['n_gauss'], mesh['strain_dof']) stress_localization0 = stress_localization[idx0] if np.any(idx0) else np.zeros((1, *stress_localization.shape[1:])) stress_localization1 = stress_localization[idx1] if np.any(idx1) else np.zeros((1, *stress_localization.shape[1:])) @@ -368,10 +405,13 @@ def construct_stress_localization_phases(strain_localization, mat_stiffness, mat for idx in range(len(mesh['combo_idx'])): stiffness_idx = idx + 2 P = np.hstack([-I, mat_thermal_strain[stiffness_idx]]) + # TODO: do we need separate plastic modes for each material phase? + P = np.hstack((-I, mat_thermal_strain[stiffness_idx], plastic_modes[stiffness_idx // 2])) combo_stress_loc0[idx] = mat_stiffness[0] @ combo_strain_loc0[idx] - mat_stiffness[stiffness_idx] @ P combo_stress_loc1[idx] = mat_stiffness[1] @ combo_strain_loc1[idx] - mat_stiffness[stiffness_idx] @ P return stress_localization0, stress_localization1, combo_stress_loc0, combo_stress_loc1 + def volume_average_phases(stress0, stress1, combo_stress0, combo_stress1, mesh): """ Volume average of stress field in each phase @@ -400,6 +440,30 @@ def volume_average_phases(stress0, stress1, combo_stress0, combo_stress1, mesh): return average_stress_0, average_stress_1 + def volume_average(field): - """ Volume average of a given field in case of identical weights that sum to one """ + """ + Volume average of a given field in case of identical weights that sum to one + :param field: array with shape (n_integration_points, ...) + """ return np.mean(field, axis=0) + + +def inner_product(a, b): + """ + Compute inner product between tensor fields a and b in case of identical weights that sum to one + :param a: array with shape (n_integration_points, ...) + :param b: array with same shape as b + """ + assert a.shape == b.shape + summation_axes = tuple(ax for ax in range(1, a.ndim)) + return np.sum(a * b, axis=summation_axes) + + +def norm_2(a): + """ + Compute euclidean norm of tensor field a in case of identical weights that sum to one + :param a: array with shape (n_integration_points, ...) + :param b: array with same shape as b + """ + return np.sqrt(inner_product(a, a)) diff --git a/utilities_ann.py b/utilities_ann.py old mode 100644 new mode 100755