From fc5e61d99022e33c4edc3481ab9f81199556705a Mon Sep 17 00:00:00 2001 From: Christopher Sherman Date: Fri, 16 Aug 2024 10:14:25 -0700 Subject: [PATCH 1/4] Resolving merge conflict --- .../geos/pygeos_tools/geophysics/__init__.py | 0 .../src/geos/pygeos_tools/geophysics/fiber.py | 27 + .../src/geos/pygeos_tools/geophysics/insar.py | 297 ++++++++++ .../pygeos_tools/geophysics/microseismic.py | 556 ++++++++++++++++++ 4 files changed, 880 insertions(+) create mode 100644 pygeos-tools/src/geos/pygeos_tools/geophysics/__init__.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/__init__.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py new file mode 100644 index 0000000..ac69db7 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py @@ -0,0 +1,27 @@ +import os +import numpy as np +from pygeosx_tools import wrapper, parallel_io, plot_tools +import matplotlib.pyplot as plt +from matplotlib import cm +from pyevtk.hl import gridToVTK + + +class Fiber(): + def __init__(self): + self.time = [] + self.channel_position = [] + self.gage_length = -1.0 + + self.x = [] + self.dudx = [] + self.dudt = [] + self.dudxdt = [] + + +class FiberAnalysis(): + def __init__(self): + """ + InSAR Analysis class + """ + self.set_names = [] + diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py new file mode 100644 index 0000000..9723b6b --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py @@ -0,0 +1,297 @@ + +import os +import numpy as np +from pygeosx_tools import wrapper, parallel_io, plot_tools +import hdf5_wrapper +import matplotlib.pyplot as plt +from matplotlib import cm +from pyevtk.hl import gridToVTK +from scipy import interpolate + + +class InSAR_Analysis(): + def __init__(self, restart_fname=''): + """ + InSAR Analysis class + """ + self.set_names = [] + self.set_keys = [] + self.local_insar_map = [] + + self.node_position_key = '' + self.node_displacement_key = '' + self.node_ghost_key = '' + + self.satellite_vector = [0.0, 0.0, 1.0] + self.wavelength = 1.0 + + self.x_grid = [] + self.y_grid = [] + self.elevation = 0.0 + self.times = [] + self.mask = [] + self.displacement = [] + self.range_change = [] + self.phase = [] + + if restart_fname: + self.resume_from_restart(restart_fname) + + def resume_from_restart(self, fname): + with hdf5_wrapper.hdf5_wrapper(fname) as r: + for k in dir(self): + if ('_' not in k): + setattr(self, k, r[k]) + + def write_restart(self, fname): + with hdf5_wrapper.hdf5_wrapper(fname, mode='w') as r: + for k in dir(self): + if ('_' not in k): + r[k] = getattr(self, k) + + def setup_grid(self, problem, set_names=[], x_range=[], y_range=[], dx=1.0, dy=1.0): + """ + Setup the InSAR grid + + Args: + problem (pygeosx.group): GEOSX problem handle + set_names (list): List of set names to apply to the analysis to + x_range (list): Extents of the InSAR image in the x-direction (optional) + y_range (list): Extents of the InSAR image in the y-direction (optional) + dx (float): Resolution of the InSAR image in the x-direction (default=1) + dy (float): Resolution of the InSAR image in the y-direction (default=1) + """ + # Determine pygeosx keys + self.set_names = set_names + self.set_keys = [wrapper.get_matching_wrapper_path(problem, ['nodeManager', s]) for s in set_names] + self.node_position_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'ReferencePosition']) + self.node_displacement_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'TotalDisplacement']) + self.node_ghost_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'ghostRank']) + + # If not specified, then setup the grid extents + ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) + x = wrapper.get_wrapper(problem, self.node_position_key) + set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + + # Choose non-ghost set members + xb = x[set_ids, :] + gb = ghost_rank[set_ids] + xc = xb[gb < 0, :] + global_min, global_max = parallel_io.get_global_array_range(xc) + + if (len(x_range) == 0): + x_range = [global_min[0], global_max[0]] + if (len(y_range) == 0): + y_range = [global_min[1], global_max[1]] + + # Choose the grid + Nx = int(np.ceil((x_range[1] - x_range[0]) / dx)) + Ny = int(np.ceil((y_range[1] - y_range[0]) / dy)) + self.x_grid = np.linspace(x_range[0], x_range[1], Nx + 1) + self.y_grid = np.linspace(y_range[0], y_range[1], Ny + 1) + + # Save the average elevation for vtk outputs + self.elevation = global_min[0] + + # Trigger the map build + self.build_map(problem) + + def build_map(self, problem): + """ + Build the map between the mesh, InSAR image. + Note: this method can be used to update the map after + significant changes to the mesh. + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + # Load the data + ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) + x = wrapper.get_wrapper(problem, self.node_position_key) + set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + + # Choose non-ghost set members + xb = x[set_ids, :] + gb = ghost_rank[set_ids] + xc = xb[gb < 0, :] + + # Setup the node to insar map + self.local_insar_map = [] + dx = self.x_grid[1] - self.x_grid[0] + dy = self.y_grid[1] - self.y_grid[0] + x_bins = np.concatenate([[self.x_grid[0] - 0.5 * dx], + 0.5*(self.x_grid[1:] + self.x_grid[:-1]), + [self.x_grid[-1] + 0.5 * dx]]) + y_bins = np.concatenate([[self.y_grid[0] - 0.5 * dy], + 0.5*(self.y_grid[1:] + self.y_grid[:-1]), + [self.y_grid[-1] + 0.5 * dy]]) + if len(xc): + Ix = np.digitize(np.squeeze(xc[:, 0]), x_bins) - 1 + Iy = np.digitize(np.squeeze(xc[:, 1]), y_bins) - 1 + for ii in range(len(self.x_grid)): + for jj in range(len(self.y_grid)): + tmp = np.where((Ix == ii) & (Iy == jj))[0] + if len(tmp): + self.local_insar_map.append([ii, jj, tmp]) + + def extract_insar(self, problem): + """ + Extract InSAR image for current step + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + # Load values + time = wrapper.get_wrapper(problem, 'Events/time')[0] + ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) + x = wrapper.get_wrapper(problem, self.node_displacement_key) + set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + + # Choose non-ghost set members + xb = x[set_ids, :] + gb = ghost_rank[set_ids] + xc = xb[gb < 0, :] + + # Find local displacements + Nx = len(self.x_grid) + Ny = len(self.y_grid) + local_displacement_sum = np.zeros((Nx, Ny, 3)) + local_N = np.zeros((Nx, Ny), dtype='int') + for m in self.local_insar_map: + local_N[m[0], m[1]] += len(m[2]) + for ii in m[2]: + local_displacement_sum[m[0], m[1], :] += xc[ii, :] + + # Communicate values + global_displacement_sum = np.sum(np.array(parallel_io.gather_array(local_displacement_sum, concatenate=False)), axis=0) + global_N = np.sum(np.array(parallel_io.gather_array(local_N, concatenate=False)), axis=0) + + # Find final 3D displacement + global_displacement = np.zeros((Nx, Ny, 3)) + if (parallel_io.rank == 0): + range_change = np.zeros((Nx, Ny)) + for ii in range(3): + d = np.squeeze(global_displacement_sum[:, :, ii]) / (global_N + 1e-10) + d[global_N == 0] = np.NaN + global_displacement[:, :, ii] = self.fill_nan_gaps(d) + range_change += global_displacement[:, :, ii] * self.satellite_vector[ii] + + # Filter nans + self.displacement.append(global_displacement) + self.range_change.append(range_change) + self.phase.append(np.angle(np.exp(2j * np.pi * range_change / self.wavelength))) + self.mask.append(global_N > 0) + self.times.append(time) + + def fill_nan_gaps(self, values): + """ + Fill gaps in the insar data which are specified via NaN's + """ + z = np.isnan(values) + if np.sum(z): + N = np.shape(values) + grid = np.meshgrid(self.x_grid, self.y_grid, indexing='ij') + + # Filter out values in flattened arrays + x_flat = np.reshape(grid[0], (-1)) + y_flat = np.reshape(grid[1], (-1)) + v_flat = np.reshape(values, (-1)) + t_flat = np.reshape(z, (-1)) + I_valid = np.where(~t_flat)[0] + x_valid = x_flat[I_valid] + y_valid = y_flat[I_valid] + v_valid = v_flat[I_valid] + + # Re-interpolate values + vb = interpolate.griddata((x_valid, y_valid), v_valid, tuple(grid), method='linear') + values = np.reshape(vb, N) + return values + + def save_hdf5(self, header='insar', output_root='./results'): + if (parallel_io.rank == 0): + os.makedirs(output_root, exist_ok=True) + with hdf5_wrapper.hdf5_wrapper('%s/%s.hdf5' % (output_root, header), mode='w') as data: + data['x'] = self.x_grid + data['y'] = self.y_grid + data['time'] = self.times + data['displacement'] = np.array(self.displacement) + data['range_change'] = np.array(self.range_change) + data['phase'] = np.array(self.phase) + + def save_csv(self, header='insar', output_root='./results'): + if (parallel_io.rank == 0): + os.makedirs(output_root, exist_ok=True) + np.savetxt('%s/%s_x_grid.csv' % (output_root, header), + self.x_grid, + delimiter=', ') + np.savetxt('%s/%s_y_grid.csv' % (output_root, header), + self.y_grid, + delimiter=', ') + for ii, t in enumerate(self.times): + comments = 'T (days), %1.8e' % (t / (60 * 60 * 24)) + np.savetxt('%s/%s_range_change_%03d.csv' % (output_root, header, ii), + self.range_change[ii], + delimiter=', ', + header=comments) + np.savetxt('%s/%s_phase_%03d.csv' % (output_root, header, ii), + self.phase[ii], + delimiter=', ', + header=comments) + + def save_vtk(self, header='insar', output_root='./results'): + if (parallel_io.rank == 0): + os.makedirs(output_root, exist_ok=True) + x = np.ascontiguousarray(self.x_grid) + y = np.ascontiguousarray(self.y_grid) + z = np.array([self.elevation]) + + for ii, t in enumerate(self.times): + data = {'range_change': np.ascontiguousarray(np.expand_dims(self.range_change[ii], -1)), + 'phase': np.ascontiguousarray(np.expand_dims(self.phase[ii], -1)), + 'dx': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 0], -1)), + 'dy': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 1], -1)), + 'dz': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 2], -1))} + + gridToVTK('%s/%s_%03d' % (output_root, header, ii), + x, + y, + z, + pointData=data) + + def save_image(self, header='insar', output_root='./results', interp_method='quadric'): + if (parallel_io.rank == 0): + os.makedirs(output_root, exist_ok=True) + fig = plot_tools.HighResPlot() + + for ii, t in enumerate(self.times): + # Range change + fig.reset() + extents = [self.x_grid[0], self.x_grid[-1], self.y_grid[0], self.y_grid[-1]] + ca = plt.imshow(np.transpose(np.flipud(self.range_change[ii])), + extent=extents, + cmap=cm.jet, + aspect='auto', + interpolation=interp_method) + plt.title('T = %1.4e (days)' % (t / (60 * 60 * 24))) + plt.xlabel('X (m)') + plt.ylabel('Y (m)') + cb = plt.colorbar(ca) + cb.set_label('Range Change (m)') + fig.save('%s/%s_range_change_%03d' % (output_root, header, ii)) + + # Wrapped phase + fig.reset() + extents = [self.x_grid[0], self.x_grid[-1], self.y_grid[0], self.y_grid[-1]] + ca = plt.imshow(np.transpose(np.flipud(self.phase[ii])), + extent=extents, + cmap=cm.jet, + aspect='auto', + interpolation=interp_method) + plt.title('T = %1.4e (days)' % (t / (60 * 60 * 24))) + plt.xlabel('X (m)') + plt.ylabel('Y (m)') + cb = plt.colorbar(ca) + cb.set_label('Phase (radians)') + fig.save('%s/%s_wrapped_phase_%03d' % (output_root, header, ii)) + + diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py new file mode 100644 index 0000000..9ccb36f --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py @@ -0,0 +1,556 @@ +import numpy as np +from pygeosx_tools import wrapper, parallel_io +from scipy.spatial import Delaunay +from pyevtk.hl import pointsToVTK + + +class JointSet(): + """ + Joint used for microseismic analysis. + Note: values that are in the strike-dip-normal reference + frame are indicated by 'sdn' + + Attributes: + region_name (str): GEOSX mesh region name + solid_name (str): GEOSX solid material name + fluid_name (str): GEOSX fluid material name + sigma_key (str): Key pointing to the GEOSX stress wrapper + pressure_key (str): Key pointing to the GEOSX pressure wrapper + allow_repeating (bool): Flag to allow repeating events on joints + shear_modulus (float): Shear modulus of host rock (Pa) + N (int): Number of joints in the set + strike (list): Strike (counter-clockwise from x-axis, degrees) + dip (list): True Dip refering to strike (degrees) + mu (list): Coefficient of friction (unitless) + cohesion (list): Cohesion (Pa) + element_id (list): Parent element index + location (list): Joint locations (m) + sigma_sdn_offset (list): Stress offset in sdn frame (Pa) + fail_count (list): Cumulative number of microseismic events on the joint + rotation (list): Rotation matrices to map from xyz to sdn frames + """ + + def __init__(self, + region_name='Region', + solid_name='rock', + fluid_name='', + mesh_level='FE1', + allow_repeating=False, + shear_modulus=1.0e9): + # GEOSX object names / keys + self.region_name = region_name + self.solid_name = solid_name + self.fluid_name = fluid_name + self.mesh_level = mesh_level + self.sigma_key = '' + self.pressure_key = '' + + # Joint behavior + self.allow_repeating = allow_repeating + self.shear_modulus = shear_modulus + + # Joint values + self.N = 0 + self.strike = [] + self.dip = [] + self.mu = [] + self.cohesion = [] + self.cell_id = [] + self.location = [] + self.sigma_sdn_offset = [] + self.fail_count = [] + self.rotation = [] + self.sigma_origin = [] + self.sigma_new = [] + + def build_joint_set(self, + problem, + random_location=True, + density=1.0, + uniform_orientation=True, + strike_mean=0.0, + strike_std=0.0, + dip_mean=0.0, + dip_std=0.0, + mu_mean=0.6, + mu_std=0.0, + cohesion_mean=1.0e6, + cohesion_std=0.0): + """ + Add a joint set to a given region + + Args: + problem (pygeosx.group): GEOSX problem handle + random_location (bool): Choose random locations within the regin (default=True) + density (float): Joint density (#/m3) + uniform_orientation (bool): Use randomly oriented joints (default=True) + strike_mean (float): Strike angle mean (degrees) + strike_std (float): Strike angle std deviation (degrees) + dip_mean (float): Dip angle mean (degrees) + dip_std: Dip angle std deviation (degrees) + mu_mean (float): Friction coefficient mean (unitless) + mu_std: Friction coefficient std deviation (unitless) + cohesion_mean (float): Cohesion mean (Pa) + cohesion_std: Cohesion std deviation (Pa) + """ + # Choose locations + if random_location: + self.choose_random_locations(problem, density) + else: + # TODO: Add structured joint set + raise Exception('Option not available: random_location=False') + + # Choose orientations + if uniform_orientation: + self.choose_random_uniform_orientations() + else: + self.choose_random_normal_orientations(strike_mean=strike_mean, + strike_std=strike_std, + dip_mean=dip_mean, + dip_std=dip_std) + + # Setup other values + self.mu = (np.random.randn(self.N) * mu_std) + mu_mean + self.cohesion = (np.random.randn(self.N) * cohesion_std) + cohesion_mean + self.rotation = [self.get_rotation_matrix(s, d) for s, d in zip(self.strike, self.dip)] + self.sigma_sdn_offset = np.zeros((self.N, 3, 3)) + self.fail_count = np.zeros(self.N, dtype=int) + self.sigma_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, self.region_name, self.solid_name, 'stress']) + + self.sigma_n = np.zeros(self.N) + self.tau = np.zeros(self.N) + self.pressure = np.zeros(self.N) + ################################################ + self.sigma_origin = np.zeros((self.N, 3, 3)) + self.sigma_new = np.zeros((self.N, 3, 3)) + ################################################ + if self.fluid_name: + self.pressure_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, self.region_name, 'pressure']) + + def choose_random_locations(self, problem, density): + """ + Choose random joint locations within the target region + + Args: + problem (pygeosx.group): GEOSX problem handle + density (float): Joint density (#/m3) + """ + # Find key values + ghost_key_node = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'ghostRank']) + node_position_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'ReferencePosition']) + node_element_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'elemList']) + element_node_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'ElementRegions', self.region_name, 'nodeList']) + + # Find the region extents + ghost_rank = wrapper.get_wrapper(problem, ghost_key_node) + x = wrapper.get_wrapper(problem, node_position_key) + xb = x[ghost_rank < 0, :] + xmin = np.amin(x, axis=0) + xmax = np.amax(xb, axis=0) + + # Choose the inital number of elements to test connectivity + Ntest = int(np.prod(xmax - xmin) * density) + tmp_index = np.zeros(Ntest, dtype=int) + tmp_location = np.zeros((Ntest, 3)) + test_locations = np.random.uniform(size=(Ntest, 3)) + for ii in range(3): + test_locations[:, ii] *= (xmax[ii] - xmin[ii]) + test_locations[:, ii] += xmin[ii] + + # Map joint location to elements + node_element_map = wrapper.get_wrapper(problem, node_element_key) + element_node_map = wrapper.get_wrapper(problem, element_node_key) + self.N = 0 + print('Attempting to add %i microseismic elements' % (Ntest), flush=True) + for ii in range(Ntest): + # Find the closest node + R2 = (x[:, 0] - test_locations[ii, 0])**2 + (x[:, 1] - test_locations[ii, 1])**2 + (x[:, 2] - test_locations[ii, 2])**2 + Ia = np.argmin(R2) + + # Test to see if the joint is within an adjoining element + test_elements = node_element_map[Ia] + for element in test_elements: + test_nodes = element_node_map[element] + element_corners = x[test_nodes, :] + if self.point_in_convex_element(test_locations[ii, :], element_corners): + tmp_index[self.N] = element + tmp_location[self.N, :] = test_locations[ii, :] + self.N += 1 + break + + # Finalize location selection + print('%i joints generated' % (self.N)) + self.element_id = tmp_index[:self.N] + self.location = tmp_location[:self.N, :] + + def point_in_convex_element(self, point, vertices): + """ + Check whether a point is within a convex element + + Args: + point (np.array): target location (N) + vertices (np.array): element vertices (MxN) + """ + hull = Delaunay(vertices) + return hull.find_simplex(point) >= 0 + + def get_joint_stress_sdn(self, sigma, index): + """ + Check the critical stress + + Args: + sigma (np.array): Stress values + index (int): Joint index + """ + s = np.zeros((3, 3)) + sigma_ori = np.zeros((3,3)) + jj = self.element_id[index] + s[0, 0] = sigma[jj, 0, 0] + s[1, 1] = sigma[jj, 0, 1] + s[2, 2] = sigma[jj, 0, 2] + s[0, 1] = sigma[jj, 0, 5] + s[0, 2] = sigma[jj, 0, 4] + s[1, 2] = sigma[jj, 0, 3] + s[1, 0] = s[0, 1] + s[2, 0] = s[0, 2] + s[2, 1] = s[1, 2] + ######################### + sigma_ori[0, 0] = sigma[jj, 0, 0] + sigma_ori[1, 1] = sigma[jj, 0, 1] + sigma_ori[2, 2] = sigma[jj, 0, 2] + sigma_ori[0, 1] = sigma[jj, 0, 5] + sigma_ori[0, 2] = sigma[jj, 0, 4] + sigma_ori[1, 2] = sigma[jj, 0, 3] + sigma_ori[1, 0] = s[0, 1] + sigma_ori[2, 0] = s[0, 2] + sigma_ori[2, 1] = s[1, 2] + ######################## + R = self.rotation[index] + #print('Rotation matrix: \n{}'.format(R)) + s_sdn = np.matmul(np.matmul(R, s), np.transpose(R)) + #print('Original stress tensor: \n{}'.format(sigma_ori)) + #print('Result stress tensor: \n{}'.format(s_sdn)) + return sigma_ori, s_sdn + + def check_critical_stress(self, problem): + """ + Check the critical stress on each joint + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + sigma = wrapper.get_wrapper(problem, self.sigma_key) + pressure = [] + if self.pressure_key: + pressure = wrapper.get_wrapper(problem, self.pressure_key) + + for ii in range(self.N): + jj = self.element_id[ii] + sigma_ori, s_sdn = self.get_joint_stress_sdn(sigma, ii) + #print('Original stress tensor: \n{}'.format(sigma_ori)) + #print("Processed rotated tensor is \n{}".format(s_sdn)) + ################################# + self.sigma_origin[ii] = sigma_ori + self.sigma_new[ii] = s_sdn + ################################# + # Store the current stress values + self.tau[ii] = np.sqrt(s_sdn[0, 2]**2 + s_sdn[1, 2]**2) + self.sigma_n[ii] = -s_sdn[2, 2] + if self.pressure_key: + self.pressure[ii] = pressure[jj] + + # Check failure criteria + dT = self.tau[ii] - (self.cohesion[ii] + self.mu[ii] * (self.sigma_n[ii])) + + # If the failure criteria are met, then set the sdn_offset + # to match a state between 0 and 1 events to the next failure + if (dT > 0.0): + dT += np.random.uniform() * 0.001 * self.shear_modulus + rake = np.arctan2(s_sdn[1, 2], s_sdn[0, 2]) + self.sigma_sdn_offset[ii, 0, 2] = dT * np.cos(rake) + self.sigma_sdn_offset[ii, 1, 2] = dT * np.sin(rake) + + def check_failure_criteria(self, problem): + """ + Check the failure criteria on each joint + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + events = [] + sigma = wrapper.get_wrapper(problem, self.sigma_key) + pressure = [] + if self.pressure_key: + pressure = wrapper.get_wrapper(problem, self.pressure_key) + + for ii in range(self.N): + if ((self.fail_count[ii] == 0) | self.allow_repeating): + jj = self.element_id[ii] + sigma_ori, s_sdn = self.get_joint_stress_sdn(sigma, ii) + s_sdn -= self.sigma_sdn_offset[ii, ...] + + # Store the current stress values + self.tau[ii] = np.sqrt(s_sdn[0, 2]**2 + s_sdn[1, 2]**2) + self.sigma_n[ii] = -s_sdn[2, 2] + if self.pressure_key: + self.pressure[ii] = pressure[jj] + + # Check failure criteria + fail_criteria = self.tau[ii] - (self.cohesion[ii] + self.mu[ii] * (self.sigma_n[ii])) + # If the failure criteria are met, then set the sdn_offset + # to match a state between 0 and 1 events to the next failure + if (fail_criteria > 0): + self.fail_count[ii] += 1 + rake = np.arctan2(s_sdn[1, 2], s_sdn[0, 2]) + dT = np.random.uniform()*0.001 * self.shear_modulus + self.sigma_sdn_offset[ii, 0, 2] += dT * np.cos(rake) + self.sigma_sdn_offset[ii, 1, 2] += dT * np.sin(rake) + events.append([self.location[ii, ...], + self.strike[ii], + self.dip[ii], + rake]) + return events + + def choose_random_uniform_orientations(self): + """ + Choose random orientations for joints that + are uniform across the focal sphere + """ + self.strike = np.random.uniform(low=0.0, + high=2.0*np.pi, + size=self.N) + self.dip = np.arccos(np.random.uniform(low=-1.0, + high=1.0, + size=self.N)) + + def choose_random_normal_orientations(self, strike_mean, strike_std, dip_mean, dip_std): + """ + Choose random normal orientations for joints + + Args: + strike_mean (float): Strike angle mean (radians) + strike_std (float): Strike angle std deviation (radians) + dip_mean (float): Dip angle mean (radians) + dip_std: Dip angle std deviation (radians) + """ + strike_mean_rd = strike_mean / 180.0 * np.pi + strike_std_rd = strike_std / 180.0 * np.pi + dip_mean_rd = dip_mean / 180.0 * np.pi + dip_std_rd = dip_std / 180.0 * np.pi + self.strike = (np.random.randn(self.N) * strike_std_rd) + strike_mean_rd + self.dip = (np.random.randn(self.N) * dip_std_rd) + dip_mean_rd + + def get_rotation_matrix(self, strike, dip): + """ + Get the rotation matrix to convert from xyz to sdn frames + + Args: + strike (float): Strike (counter-clockwise from x-axis, radians) + dip (float): Dip (radians) + + Returns: + np.array: Rotation matrix + """ + #print("strike is {}, dip is {}\n".format(strike, dip)) + stheta = np.sin(strike) + ctheta = np.cos(strike) + sdelta = np.sin(dip) + cdelta = np.cos(dip) + + rotation_matrix_strike = np.array([[ctheta,-stheta,0],[stheta,ctheta,0],[0,0,1]]) + rotation_matrix_dip = np.array([[cdelta,0,-sdelta],[0,1,0],[sdelta,0,cdelta]]) + rotation_matrix = np.matmul(rotation_matrix_dip,rotation_matrix_strike) + #print(rotation_matrix) + return rotation_matrix + + +class MicroseismicAnalysis(): + def __init__(self): + """ + Microseismic Analysis class + """ + self.joint_sets = [] + self.set_names = [] + self.catalog_order = ['t', + 'x', + 'y', + 'z', + 'strike', + 'dip', + 'rake', + 'set_id'] + self.catalog = {k: [] for k in self.catalog_order} + self.catalog_requires_sync = False + self.catalog_all = {} + + def __len__(self): + return len(self.catalog_all['t']) + + def add_joint_set(self, problem, set_name, **xargs): + """ + Add a joint set to a given region + + Args: + problem (pygeosx.group): GEOSX problem handle + set_name (str): Joint set name + xargs: See JointSet.__init__ and JointSet.build_joint_set + """ + init_args = ['region_name', 'solid_name', 'fluid_name', 'allow_repeating', 'shear_modulus'] + joint_xargs = {k: xargs[k] for k in xargs.keys() if k in init_args} + build_xargs = {k: xargs[k] for k in xargs.keys() if k not in init_args} + + self.joint_sets.append(JointSet(**joint_xargs)) + self.set_names.append(set_name) + self.joint_sets[-1].build_joint_set(problem, **build_xargs) + + def check_critical_stress(self, problem): + """ + Check critical stress for each joint set, updating + the stress offset values + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + for joint_set in self.joint_sets: + joint_set.check_critical_stress(problem) + + def check_microseismic_activity(self, problem): + """ + Check microseismic activity for each joint set, + and save any events to the catalog + + Args: + problem (pygeosx.group): GEOSX problem handle + """ + time = wrapper.get_wrapper(problem, 'Events/time')[0] + self.catalog_requires_sync = True + for ii, joint_set in enumerate(self.joint_sets): + events = joint_set.check_failure_criteria(problem) + for event in events: + self.catalog['t'].append(time) + self.catalog['x'].append(event[0][0]) + self.catalog['y'].append(event[0][1]) + self.catalog['z'].append(event[0][2]) + self.catalog['strike'].append(event[1] * 180.0 / np.pi) + self.catalog['dip'].append(event[2] * 180.0 / np.pi) + self.catalog['rake'].append(event[3] * 180.0 / np.pi) + self.catalog['set_id'].append(ii) + + def sync_catalog(self): + if self.catalog_requires_sync: + self.catalog_requires_sync = False + if (parallel_io.comm.size == 1): + # This is a serial run + self.catalog_all = {k: np.ascontiguousarray(v) for k, v in self.catalog.items()} + else: + for k, v in self.catalog.items(): + self.catalog_all[k] = np.ascontiguousarray(parallel_io.gather_array(v)) + + def save_catalog_csv(self, fname): + """ + Save the catalog to a csv format file + + Args: + fname (str): Name of the file (with no extension) + """ + self.sync_catalog() + if (parallel_io.rank == 0): + catalog_header = ', '.join(self.catalog_order) + catalog_data = np.transpose(np.array([self.catalog_all[k] for k in self.catalog_order])) + np.savetxt('%s.csv' % (fname), + catalog_data, + fmt='%1.4f', + header=catalog_header, + delimiter=', ') + + def save_catalog_vtk(self, fname): + """ + Save the catalog to a vtk format file + + Args: + fname (str): Name of the file (with no extension) + """ + self.sync_catalog() + if (parallel_io.rank == 0): + if len(self.catalog_all['x']): + pointsToVTK(fname, + self.catalog_all['x'], + self.catalog_all['y'], + self.catalog_all['z'], + data=self.catalog_all) + else: + empty_catalog = {k: np.zeros(1) for k in self.catalog_order} + pointsToVTK(fname, + np.zeros(1), + np.zeros(1) - 1e-6, + np.zeros(1) + 1e-6, + data=empty_catalog) + + def save_stereonet(self, fname, density=True, poles=True): + import matplotlib.pyplot as plt + import mplstereonet + + if len(self): + f = plt.figure() + ax = f.add_subplot(111, projection='equal_angle_stereonet') + if density: + cax = ax.density_contourf(self.catalog_all['strike'], self.catalog_all['dip'], measurement='poles') + f.colorbar(cax) + if poles: + ax.pole(self.catalog_all['strike'], self.catalog_all['dip']) + ax.grid(True) + plt.savefig(fname) + + + def save_all_joints(self, fname): + """ + Save the catalog to a vtk format file + + Args: + fname (str): Name of the file (with no extension) + """ + targets = ['strike', 'dip', 'mu', 'cohesion', + 'location', 'fail_count', 'sigma_sdn_offset', + 'sigma_n', 'pressure', 'tau', 'sigma_origin', 'sigma_new'] + + # Grab all local values + local_values = {k: [] for k in targets} + for j in self.joint_sets: + for k in targets: + local_values[k].append(getattr(j, k)) + + # Format local_values + for k in targets: + local_values[k] = np.concatenate(local_values[k], axis=0) + local_values['strike'] *= 180.0 / np.pi + local_values['dip'] *= 180.0 / np.pi + local_values['rank'] = np.zeros(len(local_values['strike'])) + parallel_io.rank + + # Handle parallelism, non-scalar values + all_values = {} + for k, v in local_values.items(): + if (parallel_io.comm.size > 1): + v = parallel_io.gather_array(v) + + N = np.shape(v) + if (len(N) == 1): + all_values[k] = np.ascontiguousarray(v) + elif (len(N) == 2): + for ii in range(N[1]): + all_values['%s_%i' % (k, ii)] = np.ascontiguousarray(np.squeeze(v[:, ii])) + elif (len(N) == 3): + for ii in range(N[1]): + for jj in range(N[2]): + all_values['%s_%i_%i' % (k, ii, jj)] = np.ascontiguousarray(np.squeeze(v[:, ii, jj])) + else: + print('Unrecognized shape!') + + # Write the vtk file + if (parallel_io.rank == 0): + pointsToVTK(fname, + all_values['location_0'], + all_values['location_1'], + all_values['location_2'], + data=all_values) + + From 1e0b734ea82bb00366202ca41f6291b38221358e Mon Sep 17 00:00:00 2001 From: Christopher Sherman Date: Fri, 16 Aug 2024 10:17:55 -0700 Subject: [PATCH 2/4] Resolving merge conflicts --- .../src/geos/pygeos_tools/geophysics/fiber.py | 7 +- .../src/geos/pygeos_tools/geophysics/insar.py | 365 ++++++------- .../pygeos_tools/geophysics/microseismic.py | 508 +++++++++--------- .../src/geos/pygeos_tools/parallel_io.py | 86 +++ pygeos-tools/src/geos/pygeos_tools/patches.py | 32 ++ .../src/geos/pygeos_tools/plot_tools.py | 115 ++++ pygeos-tools/src/geos/pygeos_tools/wrapper.py | 85 +-- pygeos-tools/src/geos/pygeos_tools/xml.py | 23 + 8 files changed, 693 insertions(+), 528 deletions(-) create mode 100644 pygeos-tools/src/geos/pygeos_tools/parallel_io.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/patches.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/plot_tools.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/xml.py diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py index ac69db7..428035a 100644 --- a/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/fiber.py @@ -7,7 +7,8 @@ class Fiber(): - def __init__(self): + + def __init__( self ): self.time = [] self.channel_position = [] self.gage_length = -1.0 @@ -19,9 +20,9 @@ def __init__(self): class FiberAnalysis(): - def __init__(self): + + def __init__( self ): """ InSAR Analysis class """ self.set_names = [] - diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py index 9723b6b..40d8aff 100644 --- a/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/insar.py @@ -1,4 +1,3 @@ - import os import numpy as np from pygeosx_tools import wrapper, parallel_io, plot_tools @@ -10,7 +9,8 @@ class InSAR_Analysis(): - def __init__(self, restart_fname=''): + + def __init__( self, restart_fname='' ): """ InSAR Analysis class """ @@ -22,7 +22,7 @@ def __init__(self, restart_fname=''): self.node_displacement_key = '' self.node_ghost_key = '' - self.satellite_vector = [0.0, 0.0, 1.0] + self.satellite_vector = [ 0.0, 0.0, 1.0 ] self.wavelength = 1.0 self.x_grid = [] @@ -35,21 +35,21 @@ def __init__(self, restart_fname=''): self.phase = [] if restart_fname: - self.resume_from_restart(restart_fname) + self.resume_from_restart( restart_fname ) - def resume_from_restart(self, fname): - with hdf5_wrapper.hdf5_wrapper(fname) as r: - for k in dir(self): - if ('_' not in k): - setattr(self, k, r[k]) + def resume_from_restart( self, fname ): + with hdf5_wrapper.hdf5_wrapper( fname ) as r: + for k in dir( self ): + if ( '_' not in k ): + setattr( self, k, r[ k ] ) - def write_restart(self, fname): - with hdf5_wrapper.hdf5_wrapper(fname, mode='w') as r: - for k in dir(self): - if ('_' not in k): - r[k] = getattr(self, k) + def write_restart( self, fname ): + with hdf5_wrapper.hdf5_wrapper( fname, mode='w' ) as r: + for k in dir( self ): + if ( '_' not in k ): + r[ k ] = getattr( self, k ) - def setup_grid(self, problem, set_names=[], x_range=[], y_range=[], dx=1.0, dy=1.0): + def setup_grid( self, problem, set_names=[], x_range=[], y_range=[], dx=1.0, dy=1.0 ): """ Setup the InSAR grid @@ -63,40 +63,41 @@ def setup_grid(self, problem, set_names=[], x_range=[], y_range=[], dx=1.0, dy=1 """ # Determine pygeosx keys self.set_names = set_names - self.set_keys = [wrapper.get_matching_wrapper_path(problem, ['nodeManager', s]) for s in set_names] - self.node_position_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'ReferencePosition']) - self.node_displacement_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'TotalDisplacement']) - self.node_ghost_key = wrapper.get_matching_wrapper_path(problem, ['nodeManager', 'ghostRank']) + self.set_keys = [ wrapper.get_matching_wrapper_path( problem, [ 'nodeManager', s ] ) for s in set_names ] + self.node_position_key = wrapper.get_matching_wrapper_path( problem, [ 'nodeManager', 'ReferencePosition' ] ) + self.node_displacement_key = wrapper.get_matching_wrapper_path( problem, + [ 'nodeManager', 'TotalDisplacement' ] ) + self.node_ghost_key = wrapper.get_matching_wrapper_path( problem, [ 'nodeManager', 'ghostRank' ] ) # If not specified, then setup the grid extents - ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) - x = wrapper.get_wrapper(problem, self.node_position_key) - set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + ghost_rank = wrapper.get_wrapper( problem, self.node_ghost_key ) + x = wrapper.get_wrapper( problem, self.node_position_key ) + set_ids = np.concatenate( [ wrapper.get_wrapper( problem, k ) for k in self.set_keys ], axis=0 ) # Choose non-ghost set members - xb = x[set_ids, :] - gb = ghost_rank[set_ids] - xc = xb[gb < 0, :] - global_min, global_max = parallel_io.get_global_array_range(xc) + xb = x[ set_ids, : ] + gb = ghost_rank[ set_ids ] + xc = xb[ gb < 0, : ] + global_min, global_max = parallel_io.get_global_array_range( xc ) - if (len(x_range) == 0): - x_range = [global_min[0], global_max[0]] - if (len(y_range) == 0): - y_range = [global_min[1], global_max[1]] + if ( len( x_range ) == 0 ): + x_range = [ global_min[ 0 ], global_max[ 0 ] ] + if ( len( y_range ) == 0 ): + y_range = [ global_min[ 1 ], global_max[ 1 ] ] # Choose the grid - Nx = int(np.ceil((x_range[1] - x_range[0]) / dx)) - Ny = int(np.ceil((y_range[1] - y_range[0]) / dy)) - self.x_grid = np.linspace(x_range[0], x_range[1], Nx + 1) - self.y_grid = np.linspace(y_range[0], y_range[1], Ny + 1) + Nx = int( np.ceil( ( x_range[ 1 ] - x_range[ 0 ] ) / dx ) ) + Ny = int( np.ceil( ( y_range[ 1 ] - y_range[ 0 ] ) / dy ) ) + self.x_grid = np.linspace( x_range[ 0 ], x_range[ 1 ], Nx + 1 ) + self.y_grid = np.linspace( y_range[ 0 ], y_range[ 1 ], Ny + 1 ) # Save the average elevation for vtk outputs - self.elevation = global_min[0] + self.elevation = global_min[ 0 ] # Trigger the map build - self.build_map(problem) + self.build_map( problem ) - def build_map(self, problem): + def build_map( self, problem ): """ Build the map between the mesh, InSAR image. Note: this method can be used to update the map after @@ -106,35 +107,33 @@ def build_map(self, problem): problem (pygeosx.group): GEOSX problem handle """ # Load the data - ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) - x = wrapper.get_wrapper(problem, self.node_position_key) - set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + ghost_rank = wrapper.get_wrapper( problem, self.node_ghost_key ) + x = wrapper.get_wrapper( problem, self.node_position_key ) + set_ids = np.concatenate( [ wrapper.get_wrapper( problem, k ) for k in self.set_keys ], axis=0 ) # Choose non-ghost set members - xb = x[set_ids, :] - gb = ghost_rank[set_ids] - xc = xb[gb < 0, :] + xb = x[ set_ids, : ] + gb = ghost_rank[ set_ids ] + xc = xb[ gb < 0, : ] # Setup the node to insar map self.local_insar_map = [] - dx = self.x_grid[1] - self.x_grid[0] - dy = self.y_grid[1] - self.y_grid[0] - x_bins = np.concatenate([[self.x_grid[0] - 0.5 * dx], - 0.5*(self.x_grid[1:] + self.x_grid[:-1]), - [self.x_grid[-1] + 0.5 * dx]]) - y_bins = np.concatenate([[self.y_grid[0] - 0.5 * dy], - 0.5*(self.y_grid[1:] + self.y_grid[:-1]), - [self.y_grid[-1] + 0.5 * dy]]) - if len(xc): - Ix = np.digitize(np.squeeze(xc[:, 0]), x_bins) - 1 - Iy = np.digitize(np.squeeze(xc[:, 1]), y_bins) - 1 - for ii in range(len(self.x_grid)): - for jj in range(len(self.y_grid)): - tmp = np.where((Ix == ii) & (Iy == jj))[0] - if len(tmp): - self.local_insar_map.append([ii, jj, tmp]) - - def extract_insar(self, problem): + dx = self.x_grid[ 1 ] - self.x_grid[ 0 ] + dy = self.y_grid[ 1 ] - self.y_grid[ 0 ] + x_bins = np.concatenate( [ [ self.x_grid[ 0 ] - 0.5 * dx ], 0.5 * ( self.x_grid[ 1: ] + self.x_grid[ :-1 ] ), + [ self.x_grid[ -1 ] + 0.5 * dx ] ] ) + y_bins = np.concatenate( [ [ self.y_grid[ 0 ] - 0.5 * dy ], 0.5 * ( self.y_grid[ 1: ] + self.y_grid[ :-1 ] ), + [ self.y_grid[ -1 ] + 0.5 * dy ] ] ) + if len( xc ): + Ix = np.digitize( np.squeeze( xc[ :, 0 ] ), x_bins ) - 1 + Iy = np.digitize( np.squeeze( xc[ :, 1 ] ), y_bins ) - 1 + for ii in range( len( self.x_grid ) ): + for jj in range( len( self.y_grid ) ): + tmp = np.where( ( Ix == ii ) & ( Iy == jj ) )[ 0 ] + if len( tmp ): + self.local_insar_map.append( [ ii, jj, tmp ] ) + + def extract_insar( self, problem ): """ Extract InSAR image for current step @@ -142,156 +141,150 @@ def extract_insar(self, problem): problem (pygeosx.group): GEOSX problem handle """ # Load values - time = wrapper.get_wrapper(problem, 'Events/time')[0] - ghost_rank = wrapper.get_wrapper(problem, self.node_ghost_key) - x = wrapper.get_wrapper(problem, self.node_displacement_key) - set_ids = np.concatenate([wrapper.get_wrapper(problem, k) for k in self.set_keys], axis=0) + time = wrapper.get_wrapper( problem, 'Events/time' )[ 0 ] + ghost_rank = wrapper.get_wrapper( problem, self.node_ghost_key ) + x = wrapper.get_wrapper( problem, self.node_displacement_key ) + set_ids = np.concatenate( [ wrapper.get_wrapper( problem, k ) for k in self.set_keys ], axis=0 ) # Choose non-ghost set members - xb = x[set_ids, :] - gb = ghost_rank[set_ids] - xc = xb[gb < 0, :] + xb = x[ set_ids, : ] + gb = ghost_rank[ set_ids ] + xc = xb[ gb < 0, : ] # Find local displacements - Nx = len(self.x_grid) - Ny = len(self.y_grid) - local_displacement_sum = np.zeros((Nx, Ny, 3)) - local_N = np.zeros((Nx, Ny), dtype='int') + Nx = len( self.x_grid ) + Ny = len( self.y_grid ) + local_displacement_sum = np.zeros( ( Nx, Ny, 3 ) ) + local_N = np.zeros( ( Nx, Ny ), dtype='int' ) for m in self.local_insar_map: - local_N[m[0], m[1]] += len(m[2]) - for ii in m[2]: - local_displacement_sum[m[0], m[1], :] += xc[ii, :] + local_N[ m[ 0 ], m[ 1 ] ] += len( m[ 2 ] ) + for ii in m[ 2 ]: + local_displacement_sum[ m[ 0 ], m[ 1 ], : ] += xc[ ii, : ] # Communicate values - global_displacement_sum = np.sum(np.array(parallel_io.gather_array(local_displacement_sum, concatenate=False)), axis=0) - global_N = np.sum(np.array(parallel_io.gather_array(local_N, concatenate=False)), axis=0) + global_displacement_sum = np.sum( np.array( parallel_io.gather_array( local_displacement_sum, + concatenate=False ) ), + axis=0 ) + global_N = np.sum( np.array( parallel_io.gather_array( local_N, concatenate=False ) ), axis=0 ) # Find final 3D displacement - global_displacement = np.zeros((Nx, Ny, 3)) - if (parallel_io.rank == 0): - range_change = np.zeros((Nx, Ny)) - for ii in range(3): - d = np.squeeze(global_displacement_sum[:, :, ii]) / (global_N + 1e-10) - d[global_N == 0] = np.NaN - global_displacement[:, :, ii] = self.fill_nan_gaps(d) - range_change += global_displacement[:, :, ii] * self.satellite_vector[ii] + global_displacement = np.zeros( ( Nx, Ny, 3 ) ) + if ( parallel_io.rank == 0 ): + range_change = np.zeros( ( Nx, Ny ) ) + for ii in range( 3 ): + d = np.squeeze( global_displacement_sum[ :, :, ii ] ) / ( global_N + 1e-10 ) + d[ global_N == 0 ] = np.NaN + global_displacement[ :, :, ii ] = self.fill_nan_gaps( d ) + range_change += global_displacement[ :, :, ii ] * self.satellite_vector[ ii ] # Filter nans - self.displacement.append(global_displacement) - self.range_change.append(range_change) - self.phase.append(np.angle(np.exp(2j * np.pi * range_change / self.wavelength))) - self.mask.append(global_N > 0) - self.times.append(time) + self.displacement.append( global_displacement ) + self.range_change.append( range_change ) + self.phase.append( np.angle( np.exp( 2j * np.pi * range_change / self.wavelength ) ) ) + self.mask.append( global_N > 0 ) + self.times.append( time ) - def fill_nan_gaps(self, values): + def fill_nan_gaps( self, values ): """ Fill gaps in the insar data which are specified via NaN's """ - z = np.isnan(values) - if np.sum(z): - N = np.shape(values) - grid = np.meshgrid(self.x_grid, self.y_grid, indexing='ij') + z = np.isnan( values ) + if np.sum( z ): + N = np.shape( values ) + grid = np.meshgrid( self.x_grid, self.y_grid, indexing='ij' ) # Filter out values in flattened arrays - x_flat = np.reshape(grid[0], (-1)) - y_flat = np.reshape(grid[1], (-1)) - v_flat = np.reshape(values, (-1)) - t_flat = np.reshape(z, (-1)) - I_valid = np.where(~t_flat)[0] - x_valid = x_flat[I_valid] - y_valid = y_flat[I_valid] - v_valid = v_flat[I_valid] + x_flat = np.reshape( grid[ 0 ], ( -1 ) ) + y_flat = np.reshape( grid[ 1 ], ( -1 ) ) + v_flat = np.reshape( values, ( -1 ) ) + t_flat = np.reshape( z, ( -1 ) ) + I_valid = np.where( ~t_flat )[ 0 ] + x_valid = x_flat[ I_valid ] + y_valid = y_flat[ I_valid ] + v_valid = v_flat[ I_valid ] # Re-interpolate values - vb = interpolate.griddata((x_valid, y_valid), v_valid, tuple(grid), method='linear') - values = np.reshape(vb, N) + vb = interpolate.griddata( ( x_valid, y_valid ), v_valid, tuple( grid ), method='linear' ) + values = np.reshape( vb, N ) return values - def save_hdf5(self, header='insar', output_root='./results'): - if (parallel_io.rank == 0): - os.makedirs(output_root, exist_ok=True) - with hdf5_wrapper.hdf5_wrapper('%s/%s.hdf5' % (output_root, header), mode='w') as data: - data['x'] = self.x_grid - data['y'] = self.y_grid - data['time'] = self.times - data['displacement'] = np.array(self.displacement) - data['range_change'] = np.array(self.range_change) - data['phase'] = np.array(self.phase) - - def save_csv(self, header='insar', output_root='./results'): - if (parallel_io.rank == 0): - os.makedirs(output_root, exist_ok=True) - np.savetxt('%s/%s_x_grid.csv' % (output_root, header), - self.x_grid, - delimiter=', ') - np.savetxt('%s/%s_y_grid.csv' % (output_root, header), - self.y_grid, - delimiter=', ') - for ii, t in enumerate(self.times): - comments = 'T (days), %1.8e' % (t / (60 * 60 * 24)) - np.savetxt('%s/%s_range_change_%03d.csv' % (output_root, header, ii), - self.range_change[ii], - delimiter=', ', - header=comments) - np.savetxt('%s/%s_phase_%03d.csv' % (output_root, header, ii), - self.phase[ii], - delimiter=', ', - header=comments) - - def save_vtk(self, header='insar', output_root='./results'): - if (parallel_io.rank == 0): - os.makedirs(output_root, exist_ok=True) - x = np.ascontiguousarray(self.x_grid) - y = np.ascontiguousarray(self.y_grid) - z = np.array([self.elevation]) - - for ii, t in enumerate(self.times): - data = {'range_change': np.ascontiguousarray(np.expand_dims(self.range_change[ii], -1)), - 'phase': np.ascontiguousarray(np.expand_dims(self.phase[ii], -1)), - 'dx': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 0], -1)), - 'dy': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 1], -1)), - 'dz': np.ascontiguousarray(np.expand_dims(self.displacement[ii][:, :, 2], -1))} - - gridToVTK('%s/%s_%03d' % (output_root, header, ii), - x, - y, - z, - pointData=data) - - def save_image(self, header='insar', output_root='./results', interp_method='quadric'): - if (parallel_io.rank == 0): - os.makedirs(output_root, exist_ok=True) + def save_hdf5( self, header='insar', output_root='./results' ): + if ( parallel_io.rank == 0 ): + os.makedirs( output_root, exist_ok=True ) + with hdf5_wrapper.hdf5_wrapper( '%s/%s.hdf5' % ( output_root, header ), mode='w' ) as data: + data[ 'x' ] = self.x_grid + data[ 'y' ] = self.y_grid + data[ 'time' ] = self.times + data[ 'displacement' ] = np.array( self.displacement ) + data[ 'range_change' ] = np.array( self.range_change ) + data[ 'phase' ] = np.array( self.phase ) + + def save_csv( self, header='insar', output_root='./results' ): + if ( parallel_io.rank == 0 ): + os.makedirs( output_root, exist_ok=True ) + np.savetxt( '%s/%s_x_grid.csv' % ( output_root, header ), self.x_grid, delimiter=', ' ) + np.savetxt( '%s/%s_y_grid.csv' % ( output_root, header ), self.y_grid, delimiter=', ' ) + for ii, t in enumerate( self.times ): + comments = 'T (days), %1.8e' % ( t / ( 60 * 60 * 24 ) ) + np.savetxt( '%s/%s_range_change_%03d.csv' % ( output_root, header, ii ), + self.range_change[ ii ], + delimiter=', ', + header=comments ) + np.savetxt( '%s/%s_phase_%03d.csv' % ( output_root, header, ii ), + self.phase[ ii ], + delimiter=', ', + header=comments ) + + def save_vtk( self, header='insar', output_root='./results' ): + if ( parallel_io.rank == 0 ): + os.makedirs( output_root, exist_ok=True ) + x = np.ascontiguousarray( self.x_grid ) + y = np.ascontiguousarray( self.y_grid ) + z = np.array( [ self.elevation ] ) + + for ii, t in enumerate( self.times ): + data = { + 'range_change': np.ascontiguousarray( np.expand_dims( self.range_change[ ii ], -1 ) ), + 'phase': np.ascontiguousarray( np.expand_dims( self.phase[ ii ], -1 ) ), + 'dx': np.ascontiguousarray( np.expand_dims( self.displacement[ ii ][ :, :, 0 ], -1 ) ), + 'dy': np.ascontiguousarray( np.expand_dims( self.displacement[ ii ][ :, :, 1 ], -1 ) ), + 'dz': np.ascontiguousarray( np.expand_dims( self.displacement[ ii ][ :, :, 2 ], -1 ) ) + } + + gridToVTK( '%s/%s_%03d' % ( output_root, header, ii ), x, y, z, pointData=data ) + + def save_image( self, header='insar', output_root='./results', interp_method='quadric' ): + if ( parallel_io.rank == 0 ): + os.makedirs( output_root, exist_ok=True ) fig = plot_tools.HighResPlot() - for ii, t in enumerate(self.times): + for ii, t in enumerate( self.times ): # Range change fig.reset() - extents = [self.x_grid[0], self.x_grid[-1], self.y_grid[0], self.y_grid[-1]] - ca = plt.imshow(np.transpose(np.flipud(self.range_change[ii])), - extent=extents, - cmap=cm.jet, - aspect='auto', - interpolation=interp_method) - plt.title('T = %1.4e (days)' % (t / (60 * 60 * 24))) - plt.xlabel('X (m)') - plt.ylabel('Y (m)') - cb = plt.colorbar(ca) - cb.set_label('Range Change (m)') - fig.save('%s/%s_range_change_%03d' % (output_root, header, ii)) + extents = [ self.x_grid[ 0 ], self.x_grid[ -1 ], self.y_grid[ 0 ], self.y_grid[ -1 ] ] + ca = plt.imshow( np.transpose( np.flipud( self.range_change[ ii ] ) ), + extent=extents, + cmap=cm.jet, + aspect='auto', + interpolation=interp_method ) + plt.title( 'T = %1.4e (days)' % ( t / ( 60 * 60 * 24 ) ) ) + plt.xlabel( 'X (m)' ) + plt.ylabel( 'Y (m)' ) + cb = plt.colorbar( ca ) + cb.set_label( 'Range Change (m)' ) + fig.save( '%s/%s_range_change_%03d' % ( output_root, header, ii ) ) # Wrapped phase fig.reset() - extents = [self.x_grid[0], self.x_grid[-1], self.y_grid[0], self.y_grid[-1]] - ca = plt.imshow(np.transpose(np.flipud(self.phase[ii])), - extent=extents, - cmap=cm.jet, - aspect='auto', - interpolation=interp_method) - plt.title('T = %1.4e (days)' % (t / (60 * 60 * 24))) - plt.xlabel('X (m)') - plt.ylabel('Y (m)') - cb = plt.colorbar(ca) - cb.set_label('Phase (radians)') - fig.save('%s/%s_wrapped_phase_%03d' % (output_root, header, ii)) - - + extents = [ self.x_grid[ 0 ], self.x_grid[ -1 ], self.y_grid[ 0 ], self.y_grid[ -1 ] ] + ca = plt.imshow( np.transpose( np.flipud( self.phase[ ii ] ) ), + extent=extents, + cmap=cm.jet, + aspect='auto', + interpolation=interp_method ) + plt.title( 'T = %1.4e (days)' % ( t / ( 60 * 60 * 24 ) ) ) + plt.xlabel( 'X (m)' ) + plt.ylabel( 'Y (m)' ) + cb = plt.colorbar( ca ) + cb.set_label( 'Phase (radians)' ) + fig.save( '%s/%s_wrapped_phase_%03d' % ( output_root, header, ii ) ) diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py index 9ccb36f..fcfa990 100644 --- a/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py @@ -30,13 +30,13 @@ class JointSet(): rotation (list): Rotation matrices to map from xyz to sdn frames """ - def __init__(self, - region_name='Region', - solid_name='rock', - fluid_name='', - mesh_level='FE1', - allow_repeating=False, - shear_modulus=1.0e9): + def __init__( self, + region_name='Region', + solid_name='rock', + fluid_name='', + mesh_level='FE1', + allow_repeating=False, + shear_modulus=1.0e9 ): # GEOSX object names / keys self.region_name = region_name self.solid_name = solid_name @@ -63,19 +63,19 @@ def __init__(self, self.sigma_origin = [] self.sigma_new = [] - def build_joint_set(self, - problem, - random_location=True, - density=1.0, - uniform_orientation=True, - strike_mean=0.0, - strike_std=0.0, - dip_mean=0.0, - dip_std=0.0, - mu_mean=0.6, - mu_std=0.0, - cohesion_mean=1.0e6, - cohesion_std=0.0): + def build_joint_set( self, + problem, + random_location=True, + density=1.0, + uniform_orientation=True, + strike_mean=0.0, + strike_std=0.0, + dip_mean=0.0, + dip_std=0.0, + mu_mean=0.6, + mu_std=0.0, + cohesion_mean=1.0e6, + cohesion_std=0.0 ): """ Add a joint set to a given region @@ -95,39 +95,41 @@ def build_joint_set(self, """ # Choose locations if random_location: - self.choose_random_locations(problem, density) + self.choose_random_locations( problem, density ) else: # TODO: Add structured joint set - raise Exception('Option not available: random_location=False') + raise Exception( 'Option not available: random_location=False' ) # Choose orientations if uniform_orientation: self.choose_random_uniform_orientations() else: - self.choose_random_normal_orientations(strike_mean=strike_mean, - strike_std=strike_std, - dip_mean=dip_mean, - dip_std=dip_std) + self.choose_random_normal_orientations( strike_mean=strike_mean, + strike_std=strike_std, + dip_mean=dip_mean, + dip_std=dip_std ) # Setup other values - self.mu = (np.random.randn(self.N) * mu_std) + mu_mean - self.cohesion = (np.random.randn(self.N) * cohesion_std) + cohesion_mean - self.rotation = [self.get_rotation_matrix(s, d) for s, d in zip(self.strike, self.dip)] - self.sigma_sdn_offset = np.zeros((self.N, 3, 3)) - self.fail_count = np.zeros(self.N, dtype=int) - self.sigma_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, self.region_name, self.solid_name, 'stress']) - - self.sigma_n = np.zeros(self.N) - self.tau = np.zeros(self.N) - self.pressure = np.zeros(self.N) + self.mu = ( np.random.randn( self.N ) * mu_std ) + mu_mean + self.cohesion = ( np.random.randn( self.N ) * cohesion_std ) + cohesion_mean + self.rotation = [ self.get_rotation_matrix( s, d ) for s, d in zip( self.strike, self.dip ) ] + self.sigma_sdn_offset = np.zeros( ( self.N, 3, 3 ) ) + self.fail_count = np.zeros( self.N, dtype=int ) + self.sigma_key = wrapper.get_matching_wrapper_path( + problem, [ self.mesh_level, self.region_name, self.solid_name, 'stress' ] ) + + self.sigma_n = np.zeros( self.N ) + self.tau = np.zeros( self.N ) + self.pressure = np.zeros( self.N ) ################################################ - self.sigma_origin = np.zeros((self.N, 3, 3)) - self.sigma_new = np.zeros((self.N, 3, 3)) + self.sigma_origin = np.zeros( ( self.N, 3, 3 ) ) + self.sigma_new = np.zeros( ( self.N, 3, 3 ) ) ################################################ if self.fluid_name: - self.pressure_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, self.region_name, 'pressure']) + self.pressure_key = wrapper.get_matching_wrapper_path( problem, + [ self.mesh_level, self.region_name, 'pressure' ] ) - def choose_random_locations(self, problem, density): + def choose_random_locations( self, problem, density ): """ Choose random joint locations within the target region @@ -136,54 +138,57 @@ def choose_random_locations(self, problem, density): density (float): Joint density (#/m3) """ # Find key values - ghost_key_node = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'ghostRank']) - node_position_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'ReferencePosition']) - node_element_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'nodeManager', 'elemList']) - element_node_key = wrapper.get_matching_wrapper_path(problem, [self.mesh_level, 'ElementRegions', self.region_name, 'nodeList']) + ghost_key_node = wrapper.get_matching_wrapper_path( problem, [ self.mesh_level, 'nodeManager', 'ghostRank' ] ) + node_position_key = wrapper.get_matching_wrapper_path( problem, + [ self.mesh_level, 'nodeManager', 'ReferencePosition' ] ) + node_element_key = wrapper.get_matching_wrapper_path( problem, [ self.mesh_level, 'nodeManager', 'elemList' ] ) + element_node_key = wrapper.get_matching_wrapper_path( + problem, [ self.mesh_level, 'ElementRegions', self.region_name, 'nodeList' ] ) # Find the region extents - ghost_rank = wrapper.get_wrapper(problem, ghost_key_node) - x = wrapper.get_wrapper(problem, node_position_key) - xb = x[ghost_rank < 0, :] - xmin = np.amin(x, axis=0) - xmax = np.amax(xb, axis=0) + ghost_rank = wrapper.get_wrapper( problem, ghost_key_node ) + x = wrapper.get_wrapper( problem, node_position_key ) + xb = x[ ghost_rank < 0, : ] + xmin = np.amin( x, axis=0 ) + xmax = np.amax( xb, axis=0 ) # Choose the inital number of elements to test connectivity - Ntest = int(np.prod(xmax - xmin) * density) - tmp_index = np.zeros(Ntest, dtype=int) - tmp_location = np.zeros((Ntest, 3)) - test_locations = np.random.uniform(size=(Ntest, 3)) - for ii in range(3): - test_locations[:, ii] *= (xmax[ii] - xmin[ii]) - test_locations[:, ii] += xmin[ii] + Ntest = int( np.prod( xmax - xmin ) * density ) + tmp_index = np.zeros( Ntest, dtype=int ) + tmp_location = np.zeros( ( Ntest, 3 ) ) + test_locations = np.random.uniform( size=( Ntest, 3 ) ) + for ii in range( 3 ): + test_locations[ :, ii ] *= ( xmax[ ii ] - xmin[ ii ] ) + test_locations[ :, ii ] += xmin[ ii ] # Map joint location to elements - node_element_map = wrapper.get_wrapper(problem, node_element_key) - element_node_map = wrapper.get_wrapper(problem, element_node_key) + node_element_map = wrapper.get_wrapper( problem, node_element_key ) + element_node_map = wrapper.get_wrapper( problem, element_node_key ) self.N = 0 - print('Attempting to add %i microseismic elements' % (Ntest), flush=True) - for ii in range(Ntest): + print( 'Attempting to add %i microseismic elements' % ( Ntest ), flush=True ) + for ii in range( Ntest ): # Find the closest node - R2 = (x[:, 0] - test_locations[ii, 0])**2 + (x[:, 1] - test_locations[ii, 1])**2 + (x[:, 2] - test_locations[ii, 2])**2 - Ia = np.argmin(R2) + R2 = ( x[ :, 0 ] - test_locations[ ii, 0 ] )**2 + ( x[ :, 1 ] - test_locations[ ii, 1 ] )**2 + ( + x[ :, 2 ] - test_locations[ ii, 2 ] )**2 + Ia = np.argmin( R2 ) # Test to see if the joint is within an adjoining element - test_elements = node_element_map[Ia] + test_elements = node_element_map[ Ia ] for element in test_elements: - test_nodes = element_node_map[element] - element_corners = x[test_nodes, :] - if self.point_in_convex_element(test_locations[ii, :], element_corners): - tmp_index[self.N] = element - tmp_location[self.N, :] = test_locations[ii, :] + test_nodes = element_node_map[ element ] + element_corners = x[ test_nodes, : ] + if self.point_in_convex_element( test_locations[ ii, : ], element_corners ): + tmp_index[ self.N ] = element + tmp_location[ self.N, : ] = test_locations[ ii, : ] self.N += 1 break # Finalize location selection - print('%i joints generated' % (self.N)) - self.element_id = tmp_index[:self.N] - self.location = tmp_location[:self.N, :] + print( '%i joints generated' % ( self.N ) ) + self.element_id = tmp_index[ :self.N ] + self.location = tmp_location[ :self.N, : ] - def point_in_convex_element(self, point, vertices): + def point_in_convex_element( self, point, vertices ): """ Check whether a point is within a convex element @@ -191,10 +196,10 @@ def point_in_convex_element(self, point, vertices): point (np.array): target location (N) vertices (np.array): element vertices (MxN) """ - hull = Delaunay(vertices) - return hull.find_simplex(point) >= 0 + hull = Delaunay( vertices ) + return hull.find_simplex( point ) >= 0 - def get_joint_stress_sdn(self, sigma, index): + def get_joint_stress_sdn( self, sigma, index ): """ Check the critical stress @@ -202,75 +207,75 @@ def get_joint_stress_sdn(self, sigma, index): sigma (np.array): Stress values index (int): Joint index """ - s = np.zeros((3, 3)) - sigma_ori = np.zeros((3,3)) - jj = self.element_id[index] - s[0, 0] = sigma[jj, 0, 0] - s[1, 1] = sigma[jj, 0, 1] - s[2, 2] = sigma[jj, 0, 2] - s[0, 1] = sigma[jj, 0, 5] - s[0, 2] = sigma[jj, 0, 4] - s[1, 2] = sigma[jj, 0, 3] - s[1, 0] = s[0, 1] - s[2, 0] = s[0, 2] - s[2, 1] = s[1, 2] + s = np.zeros( ( 3, 3 ) ) + sigma_ori = np.zeros( ( 3, 3 ) ) + jj = self.element_id[ index ] + s[ 0, 0 ] = sigma[ jj, 0, 0 ] + s[ 1, 1 ] = sigma[ jj, 0, 1 ] + s[ 2, 2 ] = sigma[ jj, 0, 2 ] + s[ 0, 1 ] = sigma[ jj, 0, 5 ] + s[ 0, 2 ] = sigma[ jj, 0, 4 ] + s[ 1, 2 ] = sigma[ jj, 0, 3 ] + s[ 1, 0 ] = s[ 0, 1 ] + s[ 2, 0 ] = s[ 0, 2 ] + s[ 2, 1 ] = s[ 1, 2 ] ######################### - sigma_ori[0, 0] = sigma[jj, 0, 0] - sigma_ori[1, 1] = sigma[jj, 0, 1] - sigma_ori[2, 2] = sigma[jj, 0, 2] - sigma_ori[0, 1] = sigma[jj, 0, 5] - sigma_ori[0, 2] = sigma[jj, 0, 4] - sigma_ori[1, 2] = sigma[jj, 0, 3] - sigma_ori[1, 0] = s[0, 1] - sigma_ori[2, 0] = s[0, 2] - sigma_ori[2, 1] = s[1, 2] + sigma_ori[ 0, 0 ] = sigma[ jj, 0, 0 ] + sigma_ori[ 1, 1 ] = sigma[ jj, 0, 1 ] + sigma_ori[ 2, 2 ] = sigma[ jj, 0, 2 ] + sigma_ori[ 0, 1 ] = sigma[ jj, 0, 5 ] + sigma_ori[ 0, 2 ] = sigma[ jj, 0, 4 ] + sigma_ori[ 1, 2 ] = sigma[ jj, 0, 3 ] + sigma_ori[ 1, 0 ] = s[ 0, 1 ] + sigma_ori[ 2, 0 ] = s[ 0, 2 ] + sigma_ori[ 2, 1 ] = s[ 1, 2 ] ######################## - R = self.rotation[index] + R = self.rotation[ index ] #print('Rotation matrix: \n{}'.format(R)) - s_sdn = np.matmul(np.matmul(R, s), np.transpose(R)) + s_sdn = np.matmul( np.matmul( R, s ), np.transpose( R ) ) #print('Original stress tensor: \n{}'.format(sigma_ori)) #print('Result stress tensor: \n{}'.format(s_sdn)) return sigma_ori, s_sdn - def check_critical_stress(self, problem): + def check_critical_stress( self, problem ): """ Check the critical stress on each joint Args: problem (pygeosx.group): GEOSX problem handle """ - sigma = wrapper.get_wrapper(problem, self.sigma_key) + sigma = wrapper.get_wrapper( problem, self.sigma_key ) pressure = [] if self.pressure_key: - pressure = wrapper.get_wrapper(problem, self.pressure_key) + pressure = wrapper.get_wrapper( problem, self.pressure_key ) - for ii in range(self.N): - jj = self.element_id[ii] - sigma_ori, s_sdn = self.get_joint_stress_sdn(sigma, ii) + for ii in range( self.N ): + jj = self.element_id[ ii ] + sigma_ori, s_sdn = self.get_joint_stress_sdn( sigma, ii ) #print('Original stress tensor: \n{}'.format(sigma_ori)) #print("Processed rotated tensor is \n{}".format(s_sdn)) ################################# - self.sigma_origin[ii] = sigma_ori - self.sigma_new[ii] = s_sdn + self.sigma_origin[ ii ] = sigma_ori + self.sigma_new[ ii ] = s_sdn ################################# # Store the current stress values - self.tau[ii] = np.sqrt(s_sdn[0, 2]**2 + s_sdn[1, 2]**2) - self.sigma_n[ii] = -s_sdn[2, 2] + self.tau[ ii ] = np.sqrt( s_sdn[ 0, 2 ]**2 + s_sdn[ 1, 2 ]**2 ) + self.sigma_n[ ii ] = -s_sdn[ 2, 2 ] if self.pressure_key: - self.pressure[ii] = pressure[jj] + self.pressure[ ii ] = pressure[ jj ] # Check failure criteria - dT = self.tau[ii] - (self.cohesion[ii] + self.mu[ii] * (self.sigma_n[ii])) + dT = self.tau[ ii ] - ( self.cohesion[ ii ] + self.mu[ ii ] * ( self.sigma_n[ ii ] ) ) # If the failure criteria are met, then set the sdn_offset # to match a state between 0 and 1 events to the next failure - if (dT > 0.0): + if ( dT > 0.0 ): dT += np.random.uniform() * 0.001 * self.shear_modulus - rake = np.arctan2(s_sdn[1, 2], s_sdn[0, 2]) - self.sigma_sdn_offset[ii, 0, 2] = dT * np.cos(rake) - self.sigma_sdn_offset[ii, 1, 2] = dT * np.sin(rake) - - def check_failure_criteria(self, problem): + rake = np.arctan2( s_sdn[ 1, 2 ], s_sdn[ 0, 2 ] ) + self.sigma_sdn_offset[ ii, 0, 2 ] = dT * np.cos( rake ) + self.sigma_sdn_offset[ ii, 1, 2 ] = dT * np.sin( rake ) + + def check_failure_criteria( self, problem ): """ Check the failure criteria on each joint @@ -278,52 +283,45 @@ def check_failure_criteria(self, problem): problem (pygeosx.group): GEOSX problem handle """ events = [] - sigma = wrapper.get_wrapper(problem, self.sigma_key) + sigma = wrapper.get_wrapper( problem, self.sigma_key ) pressure = [] if self.pressure_key: - pressure = wrapper.get_wrapper(problem, self.pressure_key) + pressure = wrapper.get_wrapper( problem, self.pressure_key ) - for ii in range(self.N): - if ((self.fail_count[ii] == 0) | self.allow_repeating): - jj = self.element_id[ii] - sigma_ori, s_sdn = self.get_joint_stress_sdn(sigma, ii) - s_sdn -= self.sigma_sdn_offset[ii, ...] + for ii in range( self.N ): + if ( ( self.fail_count[ ii ] == 0 ) | self.allow_repeating ): + jj = self.element_id[ ii ] + sigma_ori, s_sdn = self.get_joint_stress_sdn( sigma, ii ) + s_sdn -= self.sigma_sdn_offset[ ii, ...] # Store the current stress values - self.tau[ii] = np.sqrt(s_sdn[0, 2]**2 + s_sdn[1, 2]**2) - self.sigma_n[ii] = -s_sdn[2, 2] + self.tau[ ii ] = np.sqrt( s_sdn[ 0, 2 ]**2 + s_sdn[ 1, 2 ]**2 ) + self.sigma_n[ ii ] = -s_sdn[ 2, 2 ] if self.pressure_key: - self.pressure[ii] = pressure[jj] + self.pressure[ ii ] = pressure[ jj ] # Check failure criteria - fail_criteria = self.tau[ii] - (self.cohesion[ii] + self.mu[ii] * (self.sigma_n[ii])) - # If the failure criteria are met, then set the sdn_offset - # to match a state between 0 and 1 events to the next failure - if (fail_criteria > 0): - self.fail_count[ii] += 1 - rake = np.arctan2(s_sdn[1, 2], s_sdn[0, 2]) - dT = np.random.uniform()*0.001 * self.shear_modulus - self.sigma_sdn_offset[ii, 0, 2] += dT * np.cos(rake) - self.sigma_sdn_offset[ii, 1, 2] += dT * np.sin(rake) - events.append([self.location[ii, ...], - self.strike[ii], - self.dip[ii], - rake]) + fail_criteria = self.tau[ ii ] - ( self.cohesion[ ii ] + self.mu[ ii ] * ( self.sigma_n[ ii ] ) ) + # If the failure criteria are met, then set the sdn_offset + # to match a state between 0 and 1 events to the next failure + if ( fail_criteria > 0 ): + self.fail_count[ ii ] += 1 + rake = np.arctan2( s_sdn[ 1, 2 ], s_sdn[ 0, 2 ] ) + dT = np.random.uniform() * 0.001 * self.shear_modulus + self.sigma_sdn_offset[ ii, 0, 2 ] += dT * np.cos( rake ) + self.sigma_sdn_offset[ ii, 1, 2 ] += dT * np.sin( rake ) + events.append( [ self.location[ ii, ...], self.strike[ ii ], self.dip[ ii ], rake ] ) return events - def choose_random_uniform_orientations(self): + def choose_random_uniform_orientations( self ): """ Choose random orientations for joints that are uniform across the focal sphere """ - self.strike = np.random.uniform(low=0.0, - high=2.0*np.pi, - size=self.N) - self.dip = np.arccos(np.random.uniform(low=-1.0, - high=1.0, - size=self.N)) + self.strike = np.random.uniform( low=0.0, high=2.0 * np.pi, size=self.N ) + self.dip = np.arccos( np.random.uniform( low=-1.0, high=1.0, size=self.N ) ) - def choose_random_normal_orientations(self, strike_mean, strike_std, dip_mean, dip_std): + def choose_random_normal_orientations( self, strike_mean, strike_std, dip_mean, dip_std ): """ Choose random normal orientations for joints @@ -335,12 +333,12 @@ def choose_random_normal_orientations(self, strike_mean, strike_std, dip_mean, d """ strike_mean_rd = strike_mean / 180.0 * np.pi strike_std_rd = strike_std / 180.0 * np.pi - dip_mean_rd = dip_mean / 180.0 * np.pi + dip_mean_rd = dip_mean / 180.0 * np.pi dip_std_rd = dip_std / 180.0 * np.pi - self.strike = (np.random.randn(self.N) * strike_std_rd) + strike_mean_rd - self.dip = (np.random.randn(self.N) * dip_std_rd) + dip_mean_rd + self.strike = ( np.random.randn( self.N ) * strike_std_rd ) + strike_mean_rd + self.dip = ( np.random.randn( self.N ) * dip_std_rd ) + dip_mean_rd - def get_rotation_matrix(self, strike, dip): + def get_rotation_matrix( self, strike, dip ): """ Get the rotation matrix to convert from xyz to sdn frames @@ -352,41 +350,35 @@ def get_rotation_matrix(self, strike, dip): np.array: Rotation matrix """ #print("strike is {}, dip is {}\n".format(strike, dip)) - stheta = np.sin(strike) - ctheta = np.cos(strike) - sdelta = np.sin(dip) - cdelta = np.cos(dip) - - rotation_matrix_strike = np.array([[ctheta,-stheta,0],[stheta,ctheta,0],[0,0,1]]) - rotation_matrix_dip = np.array([[cdelta,0,-sdelta],[0,1,0],[sdelta,0,cdelta]]) - rotation_matrix = np.matmul(rotation_matrix_dip,rotation_matrix_strike) + stheta = np.sin( strike ) + ctheta = np.cos( strike ) + sdelta = np.sin( dip ) + cdelta = np.cos( dip ) + + rotation_matrix_strike = np.array( [ [ ctheta, -stheta, 0 ], [ stheta, ctheta, 0 ], [ 0, 0, 1 ] ] ) + rotation_matrix_dip = np.array( [ [ cdelta, 0, -sdelta ], [ 0, 1, 0 ], [ sdelta, 0, cdelta ] ] ) + rotation_matrix = np.matmul( rotation_matrix_dip, rotation_matrix_strike ) #print(rotation_matrix) return rotation_matrix class MicroseismicAnalysis(): - def __init__(self): + + def __init__( self ): """ Microseismic Analysis class """ self.joint_sets = [] self.set_names = [] - self.catalog_order = ['t', - 'x', - 'y', - 'z', - 'strike', - 'dip', - 'rake', - 'set_id'] - self.catalog = {k: [] for k in self.catalog_order} + self.catalog_order = [ 't', 'x', 'y', 'z', 'strike', 'dip', 'rake', 'set_id' ] + self.catalog = { k: [] for k in self.catalog_order } self.catalog_requires_sync = False self.catalog_all = {} - def __len__(self): - return len(self.catalog_all['t']) + def __len__( self ): + return len( self.catalog_all[ 't' ] ) - def add_joint_set(self, problem, set_name, **xargs): + def add_joint_set( self, problem, set_name, **xargs ): """ Add a joint set to a given region @@ -395,15 +387,15 @@ def add_joint_set(self, problem, set_name, **xargs): set_name (str): Joint set name xargs: See JointSet.__init__ and JointSet.build_joint_set """ - init_args = ['region_name', 'solid_name', 'fluid_name', 'allow_repeating', 'shear_modulus'] - joint_xargs = {k: xargs[k] for k in xargs.keys() if k in init_args} - build_xargs = {k: xargs[k] for k in xargs.keys() if k not in init_args} + init_args = [ 'region_name', 'solid_name', 'fluid_name', 'allow_repeating', 'shear_modulus' ] + joint_xargs = { k: xargs[ k ] for k in xargs.keys() if k in init_args } + build_xargs = { k: xargs[ k ] for k in xargs.keys() if k not in init_args } - self.joint_sets.append(JointSet(**joint_xargs)) - self.set_names.append(set_name) - self.joint_sets[-1].build_joint_set(problem, **build_xargs) + self.joint_sets.append( JointSet( **joint_xargs ) ) + self.set_names.append( set_name ) + self.joint_sets[ -1 ].build_joint_set( problem, **build_xargs ) - def check_critical_stress(self, problem): + def check_critical_stress( self, problem ): """ Check critical stress for each joint set, updating the stress offset values @@ -412,9 +404,9 @@ def check_critical_stress(self, problem): problem (pygeosx.group): GEOSX problem handle """ for joint_set in self.joint_sets: - joint_set.check_critical_stress(problem) + joint_set.check_critical_stress( problem ) - def check_microseismic_activity(self, problem): + def check_microseismic_activity( self, problem ): """ Check microseismic activity for each joint set, and save any events to the catalog @@ -422,31 +414,31 @@ def check_microseismic_activity(self, problem): Args: problem (pygeosx.group): GEOSX problem handle """ - time = wrapper.get_wrapper(problem, 'Events/time')[0] + time = wrapper.get_wrapper( problem, 'Events/time' )[ 0 ] self.catalog_requires_sync = True - for ii, joint_set in enumerate(self.joint_sets): - events = joint_set.check_failure_criteria(problem) + for ii, joint_set in enumerate( self.joint_sets ): + events = joint_set.check_failure_criteria( problem ) for event in events: - self.catalog['t'].append(time) - self.catalog['x'].append(event[0][0]) - self.catalog['y'].append(event[0][1]) - self.catalog['z'].append(event[0][2]) - self.catalog['strike'].append(event[1] * 180.0 / np.pi) - self.catalog['dip'].append(event[2] * 180.0 / np.pi) - self.catalog['rake'].append(event[3] * 180.0 / np.pi) - self.catalog['set_id'].append(ii) - - def sync_catalog(self): + self.catalog[ 't' ].append( time ) + self.catalog[ 'x' ].append( event[ 0 ][ 0 ] ) + self.catalog[ 'y' ].append( event[ 0 ][ 1 ] ) + self.catalog[ 'z' ].append( event[ 0 ][ 2 ] ) + self.catalog[ 'strike' ].append( event[ 1 ] * 180.0 / np.pi ) + self.catalog[ 'dip' ].append( event[ 2 ] * 180.0 / np.pi ) + self.catalog[ 'rake' ].append( event[ 3 ] * 180.0 / np.pi ) + self.catalog[ 'set_id' ].append( ii ) + + def sync_catalog( self ): if self.catalog_requires_sync: self.catalog_requires_sync = False - if (parallel_io.comm.size == 1): + if ( parallel_io.comm.size == 1 ): # This is a serial run - self.catalog_all = {k: np.ascontiguousarray(v) for k, v in self.catalog.items()} + self.catalog_all = { k: np.ascontiguousarray( v ) for k, v in self.catalog.items() } else: for k, v in self.catalog.items(): - self.catalog_all[k] = np.ascontiguousarray(parallel_io.gather_array(v)) + self.catalog_all[ k ] = np.ascontiguousarray( parallel_io.gather_array( v ) ) - def save_catalog_csv(self, fname): + def save_catalog_csv( self, fname ): """ Save the catalog to a csv format file @@ -454,16 +446,12 @@ def save_catalog_csv(self, fname): fname (str): Name of the file (with no extension) """ self.sync_catalog() - if (parallel_io.rank == 0): - catalog_header = ', '.join(self.catalog_order) - catalog_data = np.transpose(np.array([self.catalog_all[k] for k in self.catalog_order])) - np.savetxt('%s.csv' % (fname), - catalog_data, - fmt='%1.4f', - header=catalog_header, - delimiter=', ') + if ( parallel_io.rank == 0 ): + catalog_header = ', '.join( self.catalog_order ) + catalog_data = np.transpose( np.array( [ self.catalog_all[ k ] for k in self.catalog_order ] ) ) + np.savetxt( '%s.csv' % ( fname ), catalog_data, fmt='%1.4f', header=catalog_header, delimiter=', ' ) - def save_catalog_vtk(self, fname): + def save_catalog_vtk( self, fname ): """ Save the catalog to a vtk format file @@ -471,86 +459,82 @@ def save_catalog_vtk(self, fname): fname (str): Name of the file (with no extension) """ self.sync_catalog() - if (parallel_io.rank == 0): - if len(self.catalog_all['x']): - pointsToVTK(fname, - self.catalog_all['x'], - self.catalog_all['y'], - self.catalog_all['z'], - data=self.catalog_all) + if ( parallel_io.rank == 0 ): + if len( self.catalog_all[ 'x' ] ): + pointsToVTK( fname, + self.catalog_all[ 'x' ], + self.catalog_all[ 'y' ], + self.catalog_all[ 'z' ], + data=self.catalog_all ) else: - empty_catalog = {k: np.zeros(1) for k in self.catalog_order} - pointsToVTK(fname, - np.zeros(1), - np.zeros(1) - 1e-6, - np.zeros(1) + 1e-6, - data=empty_catalog) - - def save_stereonet(self, fname, density=True, poles=True): + empty_catalog = { k: np.zeros( 1 ) for k in self.catalog_order } + pointsToVTK( fname, np.zeros( 1 ), np.zeros( 1 ) - 1e-6, np.zeros( 1 ) + 1e-6, data=empty_catalog ) + + def save_stereonet( self, fname, density=True, poles=True ): import matplotlib.pyplot as plt import mplstereonet - if len(self): + if len( self ): f = plt.figure() - ax = f.add_subplot(111, projection='equal_angle_stereonet') + ax = f.add_subplot( 111, projection='equal_angle_stereonet' ) if density: - cax = ax.density_contourf(self.catalog_all['strike'], self.catalog_all['dip'], measurement='poles') - f.colorbar(cax) + cax = ax.density_contourf( self.catalog_all[ 'strike' ], + self.catalog_all[ 'dip' ], + measurement='poles' ) + f.colorbar( cax ) if poles: - ax.pole(self.catalog_all['strike'], self.catalog_all['dip']) - ax.grid(True) - plt.savefig(fname) - + ax.pole( self.catalog_all[ 'strike' ], self.catalog_all[ 'dip' ] ) + ax.grid( True ) + plt.savefig( fname ) - def save_all_joints(self, fname): + def save_all_joints( self, fname ): """ Save the catalog to a vtk format file Args: fname (str): Name of the file (with no extension) """ - targets = ['strike', 'dip', 'mu', 'cohesion', - 'location', 'fail_count', 'sigma_sdn_offset', - 'sigma_n', 'pressure', 'tau', 'sigma_origin', 'sigma_new'] + targets = [ + 'strike', 'dip', 'mu', 'cohesion', 'location', 'fail_count', 'sigma_sdn_offset', 'sigma_n', 'pressure', + 'tau', 'sigma_origin', 'sigma_new' + ] # Grab all local values - local_values = {k: [] for k in targets} + local_values = { k: [] for k in targets } for j in self.joint_sets: for k in targets: - local_values[k].append(getattr(j, k)) + local_values[ k ].append( getattr( j, k ) ) # Format local_values for k in targets: - local_values[k] = np.concatenate(local_values[k], axis=0) - local_values['strike'] *= 180.0 / np.pi - local_values['dip'] *= 180.0 / np.pi - local_values['rank'] = np.zeros(len(local_values['strike'])) + parallel_io.rank + local_values[ k ] = np.concatenate( local_values[ k ], axis=0 ) + local_values[ 'strike' ] *= 180.0 / np.pi + local_values[ 'dip' ] *= 180.0 / np.pi + local_values[ 'rank' ] = np.zeros( len( local_values[ 'strike' ] ) ) + parallel_io.rank # Handle parallelism, non-scalar values all_values = {} for k, v in local_values.items(): - if (parallel_io.comm.size > 1): - v = parallel_io.gather_array(v) - - N = np.shape(v) - if (len(N) == 1): - all_values[k] = np.ascontiguousarray(v) - elif (len(N) == 2): - for ii in range(N[1]): - all_values['%s_%i' % (k, ii)] = np.ascontiguousarray(np.squeeze(v[:, ii])) - elif (len(N) == 3): - for ii in range(N[1]): - for jj in range(N[2]): - all_values['%s_%i_%i' % (k, ii, jj)] = np.ascontiguousarray(np.squeeze(v[:, ii, jj])) + if ( parallel_io.comm.size > 1 ): + v = parallel_io.gather_array( v ) + + N = np.shape( v ) + if ( len( N ) == 1 ): + all_values[ k ] = np.ascontiguousarray( v ) + elif ( len( N ) == 2 ): + for ii in range( N[ 1 ] ): + all_values[ '%s_%i' % ( k, ii ) ] = np.ascontiguousarray( np.squeeze( v[ :, ii ] ) ) + elif ( len( N ) == 3 ): + for ii in range( N[ 1 ] ): + for jj in range( N[ 2 ] ): + all_values[ '%s_%i_%i' % ( k, ii, jj ) ] = np.ascontiguousarray( np.squeeze( v[ :, ii, jj ] ) ) else: - print('Unrecognized shape!') + print( 'Unrecognized shape!' ) # Write the vtk file - if (parallel_io.rank == 0): - pointsToVTK(fname, - all_values['location_0'], - all_values['location_1'], - all_values['location_2'], - data=all_values) - - + if ( parallel_io.rank == 0 ): + pointsToVTK( fname, + all_values[ 'location_0' ], + all_values[ 'location_1' ], + all_values[ 'location_2' ], + data=all_values ) diff --git a/pygeos-tools/src/geos/pygeos_tools/parallel_io.py b/pygeos-tools/src/geos/pygeos_tools/parallel_io.py new file mode 100644 index 0000000..fa7c488 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/parallel_io.py @@ -0,0 +1,86 @@ +from mpi4py import MPI +import numpy as np + +# Get the MPI rank +comm = MPI.COMM_WORLD +rank = comm.Get_rank() + + +def get_global_array_range( local_values ): + # 1D arrays will return a scalar, ND arrays an array + N = np.shape( local_values ) + local_min = 1e100 + local_max = -1e100 + if ( len( N ) > 1 ): + local_min = np.zeros( N[ 1 ] ) + 1e100 + local_max = np.zeros( N[ 1 ] ) - 1e100 + + # For >1D arrays, keep the last dimension + query_axis = 0 + if ( len( N ) > 2 ): + query_axis = tuple( [ ii for ii in range( 0, len( N ) - 1 ) ] ) + + # Ignore zero-length results + if len( local_values ): + local_min = np.amin( local_values, axis=query_axis ) + local_max = np.amax( local_values, axis=query_axis ) + + # Gather the results onto rank 0 + all_min = comm.gather( local_min, root=0 ) + all_max = comm.gather( local_max, root=0 ) + global_min = 1e100 + global_max = -1e100 + if ( rank == 0 ): + global_min = np.amin( np.array( all_min ), axis=0 ) + global_max = np.amax( np.array( all_max ), axis=0 ) + + # Broadcast + global_min = comm.bcast( global_min, root=0 ) + global_max = comm.bcast( global_max, root=0 ) + + return global_min, global_max + + +def gather_array( local_values, allgather=False, concatenate=True ): + # Find buffer size + N = np.shape( local_values ) + M = np.prod( N ) + all_M = [] + max_M = 0 + if allgather: + all_M = comm.allgather( M ) + max_M = np.amax( all_M ) + else: + all_M = comm.gather( M, root=0 ) + if ( rank == 0 ): + max_M = np.amax( all_M ) + max_M = comm.bcast( max_M, root=0 ) + + # Pack the array into a buffer + send_buff = np.zeros( max_M ) + send_buff[ :M ] = np.reshape( local_values, ( -1 ) ) + receive_buff = np.zeros( ( comm.size, max_M ) ) + + # Gather the buffers + if allgather: + comm.Allgather( [ send_buff, MPI.DOUBLE ], [ receive_buff, MPI.DOUBLE ] ) + else: + comm.Gather( [ send_buff, MPI.DOUBLE ], [ receive_buff, MPI.DOUBLE ], root=0 ) + + # Unpack the buffers + all_values = [] + R = list( N ) + R[ 0 ] = -1 + if ( ( rank == 0 ) | allgather ): + # Reshape each rank's contribution + for ii in range( comm.size ): + if ( all_M[ ii ] > 0 ): + tmp = np.reshape( receive_buff[ ii, :all_M[ ii ] ], R ) + all_values.append( tmp ) + + # Concatenate into a single array + if concatenate: + if ( len( all_values ) ): + all_values = np.concatenate( all_values, axis=0 ) + + return all_values diff --git a/pygeos-tools/src/geos/pygeos_tools/patches.py b/pygeos-tools/src/geos/pygeos_tools/patches.py new file mode 100644 index 0000000..15fbb84 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/patches.py @@ -0,0 +1,32 @@ +def patch_numpy_where(): + """ + Some packages (numpy, etc.) use np.where in a way that isn't + compliant with the API. In most cases this is fine, but when the + geosx environment is initialized this can cause critical errors with + nan inputs. This patch checks and updates the inputs to where + """ + import numpy as np + np.where_original_fn = np.where + print( 'patching np.where', flush=True ) + + def variable_as_array_type( x, target_shape ): + if x is None: + return + + if isinstance( x, ( np.ndarray, list ) ): + return x + + y = np.empty( target_shape ) + y[...] = x + return y + + def flexible_where( condition, x=None, y=None ): + s = np.shape( condition ) + x = variable_as_array_type( x, s ) + y = variable_as_array_type( y, s ) + return np.where_original_fn( condition, x, y ) + + np.where = flexible_where + + +patch_numpy_where() diff --git a/pygeos-tools/src/geos/pygeos_tools/plot_tools.py b/pygeos-tools/src/geos/pygeos_tools/plot_tools.py new file mode 100644 index 0000000..f630cc0 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/plot_tools.py @@ -0,0 +1,115 @@ +import os +import warnings +import matplotlib.pyplot as plt +from matplotlib import cm +import matplotlib.colors as mcolors + + +class HighResPlot(): + + def __init__( self ): + self.handle = plt.figure() + self.isAdjusted = False + self.colorspace = 'RGB' + self.compress = False + self.output_format = 'png' + self.font_weight = 'normal' + self.axis_font_size = 8 + self.legend_font_size = 8 + self.size = ( 3.5, 2.625 ) + self.dpi = 200 + self.apply_tight_layout = True + self.disable_antialiasing = False + self.use_imagemagick = False + self.apply_font_size() + + def __del__( self ): + try: + self.handle.clf() + plt.close( self.handle.number ) + except: + pass + + def set_active( self ): + plt.figure( self.handle.number ) + + def reset( self ): + tmp = self.handle.number + self.set_active() + self.handle.clf() + del self.handle + self.handle = plt.figure( tmp ) + self.apply_font_size() + self.isAdjusted = False + + def apply_font_size( self ): + self.font = { 'weight': self.font_weight, 'size': self.axis_font_size } + plt.rc( 'font', **self.font ) + + def apply_format( self ): + self.set_active() + self.handle.set_size_inches( self.size ) + self.apply_font_size() + + for ax in self.handle.get_axes(): + tmp = ax.get_legend() + if tmp: + ax.legend( loc=tmp._loc, fontsize=self.legend_font_size ) + + if self.apply_tight_layout: + with warnings.catch_warnings(): + warnings.simplefilter( 'ignore' ) + self.handle.tight_layout( pad=1.5 ) + self.apply_tight_layout = False + + def save( self, fname ): + self.apply_format() + if plt.isinteractive(): + plt.draw() + plt.pause( 0.1 ) + + if self.use_imagemagick: + # Matplotlib's non-pdf layout is sometimes odd, + # so use the imagemagick convert as a back-up option + plt.savefig( '%s.pdf' % ( fname ), format='pdf', dpi=self.dpi ) + + if ( self.output_format != 'pdf' ): + cmd = 'convert -density %i %s.pdf' % ( self.dpi, fname ) + + if ( self.colorspace == 'CMYK' ): + cmd += ' -colorspace CMYK' + if ( self.colorspace == 'gray' ): + cmd += ' -colorspace gray' + if ( self.compress ): + cmd += ' -compress lzw' + if ( self.disable_antialiasing ): + cmd += ' +antialias' + + cmd += ' %s.%s' % ( fname, self.output_format ) + os.system( cmd ) + os.system( 'rm %s.pdf' % fname ) + else: + plt.savefig( '%s.%s' % ( fname, self.output_format ), format=self.output_format, dpi=self.dpi ) + + +def get_modified_jet_colormap(): + # Add an updated colormap + cdict = cm.get_cmap( 'jet' ).__dict__[ '_segmentdata' ].copy() + for k in cdict.keys(): + tmp_seq = list( cdict[ k ] ) + tmp_final = list( tmp_seq[ 0 ] ) + tmp_final[ 1 ] = 1.0 + tmp_final[ 2 ] = 1.0 + tmp_seq[ 0 ] = tuple( tmp_final ) + cdict[ k ] = tuple( tmp_seq ) + return mcolors.LinearSegmentedColormap( 'jet_mod', cdict ) + + +def get_periodic_line_style( ii ): + col = [ 'k', 'b', 'c', 'g', 'y', 'r', 'm' ] + mark = [ '-', '--', ':', '-.', 'o', '*', '^', '+' ] + + jj = ii % ( len( col ) * len( mark ) ) + style = col[ jj % len( col ) ] + mark[ int( jj / len( col ) ) ] + + return style diff --git a/pygeos-tools/src/geos/pygeos_tools/wrapper.py b/pygeos-tools/src/geos/pygeos_tools/wrapper.py index 0c154c9..4b2bc53 100644 --- a/pygeos-tools/src/geos/pygeos_tools/wrapper.py +++ b/pygeos-tools/src/geos/pygeos_tools/wrapper.py @@ -1,13 +1,9 @@ import sys import numpy as np -from mpi4py import MPI import matplotlib.pyplot as plt import pylvarray import pygeosx - -# Get the MPI rank -comm = MPI.COMM_WORLD -rank = comm.Get_rank() +from pygeosx_tools import parallel_io def get_wrapper( problem, target_key, write_flag=False ): @@ -52,7 +48,7 @@ def get_wrapper_par( problem, target_key, allgather=False, ghost_key='' ): Returns: np.ndarray: The wrapper as a numpy ndarray """ - if ( comm.size == 1 ): + if ( parallel_io.comm.size == 1 ): # This is a serial problem return get_wrapper( problem, target_key ) @@ -66,45 +62,7 @@ def get_wrapper_par( problem, target_key, allgather=False, ghost_key='' ): ghost_values = get_wrapper( problem, ghost_key ) local_values = local_values[ ghost_values < -0.5 ] - # Find buffer size - N = np.shape( local_values ) - M = np.prod( N ) - all_M = [] - max_M = 0 - if allgather: - all_M = comm.allgather( M ) - max_M = np.amax( all_M ) - else: - all_M = comm.gather( M, root=0 ) - if ( rank == 0 ): - max_M = np.amax( all_M ) - max_M = comm.bcast( max_M, root=0 ) - - # Pack the array into a buffer - send_buff = np.zeros( max_M ) - send_buff[ :M ] = np.reshape( local_values, ( -1 ) ) - receive_buff = np.zeros( ( comm.size, max_M ) ) - - # Gather the buffers - if allgather: - comm.Allgather( [ send_buff, MPI.DOUBLE ], [ receive_buff, MPI.DOUBLE ] ) - else: - comm.Gather( [ send_buff, MPI.DOUBLE ], [ receive_buff, MPI.DOUBLE ], root=0 ) - - # Unpack the buffers - all_values = [] - R = list( N ) - R[ 0 ] = -1 - if ( ( rank == 0 ) | allgather ): - # Reshape each rank's contribution - for ii in range( comm.size ): - if ( all_M[ ii ] > 0 ): - tmp = np.reshape( receive_buff[ ii, :all_M[ ii ] ], R ) - all_values.append( tmp ) - - # Concatenate into a single array - all_values = np.concatenate( all_values, axis=0 ) - return all_values + return parallel_io.gather_array( local_values, allgather=allgather ) def gather_wrapper( problem, key, ghost_key='' ): @@ -147,34 +105,7 @@ def get_global_value_range( problem, key ): tuple: The global min/max of the target """ local_values = get_wrapper( problem, key ) - - # 1D arrays will return a scalar, ND arrays an array - N = np.shape( local_values ) - local_min = 1e100 - local_max = -1e100 - if ( len( N ) > 1 ): - local_min = np.zeros( N[ 1 ] ) + 1e100 - local_max = np.zeros( N[ 1 ] ) - 1e100 - - # For >1D arrays, keep the last dimension - query_axis = 0 - if ( len( N ) > 2 ): - query_axis = tuple( [ ii for ii in range( 0, len( N ) - 1 ) ] ) - - # Ignore zero-length results - if len( local_values ): - local_min = np.amin( local_values, axis=query_axis ) - local_max = np.amax( local_values, axis=query_axis ) - - # Gather the results onto rank 0 - all_min = comm.gather( local_min, root=0 ) - all_max = comm.gather( local_max, root=0 ) - global_min = 1e100 - global_max = -1e100 - if ( rank == 0 ): - global_min = np.amin( np.array( all_min ), axis=0 ) - global_max = np.amax( np.array( all_max ), axis=0 ) - return global_min, global_max + return parallel_io.get_global_array_range( local_values ) def print_global_value_range( problem, key, header, scale=1.0, precision='%1.4f' ): @@ -195,7 +126,7 @@ def print_global_value_range( problem, key, header, scale=1.0, precision='%1.4f' global_min *= scale global_max *= scale - if ( rank == 0 ): + if ( parallel_io.rank == 0 ): if isinstance( global_min, np.ndarray ): min_str = ', '.join( [ precision % ( x ) for x in global_min ] ) max_str = ', '.join( [ precision % ( x ) for x in global_max ] ) @@ -313,12 +244,12 @@ def get_matching_wrapper_path( problem, filters ): search_datastructure_wrappers_recursive( problem, filters, matching_paths ) if ( len( matching_paths ) == 1 ): - if ( rank == 0 ): + if ( parallel_io.rank == 0 ): print( 'Found matching wrapper: %s' % ( matching_paths[ 0 ] ) ) return matching_paths[ 0 ] else: - if ( rank == 0 ): + if ( parallel_io.rank == 0 ): print( 'Error occured while looking for wrappers:' ) print( 'Filters: [%s]' % ( ', '.join( filters ) ) ) print( 'Matching wrappers: [%s]' % ( ', '.join( matching_paths ) ) ) @@ -360,7 +291,7 @@ def plot_history( records, output_root='.', save_figures=True, show_figures=True save_figures (bool): Flag to indicate whether figures should be saved (default = True) show_figures (bool): Flag to indicate whether figures should be drawn (default = False) """ - if ( rank == 0 ): + if ( parallel_io.rank == 0 ): for k in records.keys(): if ( k != 'time' ): # Set the active figure diff --git a/pygeos-tools/src/geos/pygeos_tools/xml.py b/pygeos-tools/src/geos/pygeos_tools/xml.py new file mode 100644 index 0000000..a5d7f48 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/xml.py @@ -0,0 +1,23 @@ +from geosx_xml_tools import xml_processor +from pygeosx_tools import parallel_io + + +def apply_xml_preprocessor( geosx_args ): + """ + Applies the xml preprocessor to the input file + before handing it to GEOSX, and modifies the input + arguments to point to the new file + + Args: + geosx_args (list): User arguments supplied to GEOSX + """ + new_input_file = '' + file_index = geosx_args.index( '-i' ) + 1 + if ( parallel_io.rank == 0 ): + print( 'Applying xml preprocessor...', flush=True ) + new_input_file = xml_processor.process( geosx_args[ file_index ], + '%s.processed' % ( geosx_args[ file_index ] ) ) + print( ' the compiled filename is: %s' % ( new_input_file ), flush=True ) + + # Broadcast and set the new input file name + geosx_args[ file_index ] = parallel_io.comm.bcast( new_input_file, root=0 ) From 9317f2e1bc8a2e8ba44cf192099197adc833c356 Mon Sep 17 00:00:00 2001 From: Christopher Sherman Date: Fri, 16 Aug 2024 10:23:47 -0700 Subject: [PATCH 3/4] Fixing pre-requsite of pygeos_tools --- pygeos-tools/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pygeos-tools/pyproject.toml b/pygeos-tools/pyproject.toml index 2a6f5de..df16ff5 100644 --- a/pygeos-tools/pyproject.toml +++ b/pygeos-tools/pyproject.toml @@ -22,6 +22,7 @@ dependencies = [ "numpy", "scipy", "mpi4py", + "pyevtk" ] [tool.mypy] From f146f3cc033407962e60190353e25dcd6a240a04 Mon Sep 17 00:00:00 2001 From: Chaoyi Wang Date: Wed, 4 Sep 2024 13:32:45 -0700 Subject: [PATCH 4/4] update import path for microseismic analysis module --- pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py | 2 +- pygeos-tools/src/geos/pygeos_tools/wrapper.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py index fcfa990..0c45f79 100644 --- a/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py +++ b/pygeos-tools/src/geos/pygeos_tools/geophysics/microseismic.py @@ -1,5 +1,5 @@ import numpy as np -from pygeosx_tools import wrapper, parallel_io +from geos.pygeos_tools import wrapper, parallel_io from scipy.spatial import Delaunay from pyevtk.hl import pointsToVTK diff --git a/pygeos-tools/src/geos/pygeos_tools/wrapper.py b/pygeos-tools/src/geos/pygeos_tools/wrapper.py index 4b2bc53..722c0f5 100644 --- a/pygeos-tools/src/geos/pygeos_tools/wrapper.py +++ b/pygeos-tools/src/geos/pygeos_tools/wrapper.py @@ -3,7 +3,7 @@ import matplotlib.pyplot as plt import pylvarray import pygeosx -from pygeosx_tools import parallel_io +from geos.pygeos_tools import parallel_io def get_wrapper( problem, target_key, write_flag=False ):