Skip to content

Commit

Permalink
Fix/docs and cleanup (#217)
Browse files Browse the repository at this point in the history
* docs: add pyvista plot directives and update some documentations

* fix: interpolatorbuilder syntax/method naming

* fix: bb global origin should be 0 if it is not being used.

force origin/max to be numpy arrays.
Change default ordering of regular grid to be fortran
renaming centers to centres for english
adding cell centres to structure grid/nodes

* fix: allow fitting of rotation to be disabled for the euclidean transformation and ensure dimensionality is used

* update copy of points

Co-authored-by: Copilot <[email protected]>

* Add Optional back

Co-authored-by: Copilot <[email protected]>

* fix: geoh5 format for grids

* fix: adding spatially varying regularisation for fdi

* fix: interpolator builder use bounding box geometry if no other mesh details are given

* style: style fixes by ruff and autoformatting by black

* fix: change nelements for interpolator using builder kwargs

* style: black

* style: remove unused imports

* fix: allowing nelements to be updated for an interpolator

* fix: allow small structured grid. needed for tetra

* fix: adding distance to bounding box

* style: style fixes by ruff and autoformatting by black

* style: style fixes by ruff and autoformatting by black

* fix: don't scale fdi regularisation

* fix: add helper to get interpolator support as vtk with solution as node values

* fix: if interpolation geometry changed, make sure build args are updated

* style: style fixes by ruff and autoformatting by black

* style: black autoformat

* style: style fixes by ruff and autoformatting by black

* tests: add fdi test for structural frame and make isclose less sensitive

* tests: reducing sensitivity further...

---------

Co-authored-by: Copilot <[email protected]>
Co-authored-by: lachlangrose <[email protected]>
  • Loading branch information
3 people authored Feb 18, 2025
1 parent dc4158b commit 4b9f90f
Show file tree
Hide file tree
Showing 39 changed files with 815 additions and 212 deletions.
1 change: 1 addition & 0 deletions LoopStructural/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
loggers = {}
from .modelling.core.geological_model import GeologicalModel
from .interpolators._api import LoopInterpolator
from .interpolators import InterpolatorBuilder
from .datatypes import BoundingBox
from .utils import log_to_console, log_to_file, getLogger, rng, get_levels

Expand Down
57 changes: 47 additions & 10 deletions LoopStructural/datatypes/_bounding_box.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(
if maximum is None and nsteps is not None and step_vector is not None:
maximum = origin + nsteps * step_vector
if origin is not None and global_origin is None:
global_origin = origin
global_origin = np.zeros(3)
self._origin = np.array(origin)
self._maximum = np.array(maximum)
self.dimensions = dimensions
Expand Down Expand Up @@ -90,7 +90,7 @@ def global_origin(self, global_origin):

@property
def global_maximum(self):
return self.maximum - self.origin + self._global_origin
return self.maximum + self.global_origin

@property
def valid(self):
Expand Down Expand Up @@ -242,6 +242,8 @@ def fit(self, locations: np.ndarray, local_coordinate: bool = False) -> Bounding
)
origin = locations.min(axis=0)
maximum = locations.max(axis=0)
origin = np.array(origin)
maximum = np.array(maximum)
if local_coordinate:
self.global_origin = origin
self.origin = np.zeros(3)
Expand Down Expand Up @@ -273,15 +275,50 @@ def with_buffer(self, buffer: float = 0.2) -> BoundingBox:
if self.origin is None or self.maximum is None:
raise LoopValueError("Cannot create bounding box with buffer, no origin or maximum")
# local coordinates, rescale into the original bounding boxes global coordinates
origin = self.origin - buffer * (self.maximum - self.origin)
maximum = self.maximum + buffer * (self.maximum - self.origin)
origin = self.origin - buffer * np.max(self.maximum - self.origin)
maximum = self.maximum + buffer * np.max(self.maximum - self.origin)
return BoundingBox(
origin=origin,
maximum=maximum,
global_origin=self.global_origin + origin,
global_origin=self.global_origin,
dimensions=self.dimensions,
)

# def __call__(self, xyz):
# xyz = np.array(xyz)
# if len(xyz.shape) == 1:
# xyz = xyz.reshape((1, -1))

# distances = np.maximum(0,
# np.maximum(self.global_origin+self.origin - xyz,
# xyz - self.global_maximum))
# distance = np.linalg.norm(distances, axis=1)
# distance[self.is_inside(xyz)] = -1
# return distance

def __call__(self, xyz):
# Calculate center and half-extents of the box
center = (self.maximum + self.global_origin + self.origin) / 2
half_extents = (self.maximum - self.global_origin + self.origin) / 2

# Calculate the distance from point to center
offset = np.abs(xyz - center) - half_extents

# Inside distance: negative value based on the smallest penetration
inside_distance = np.min(half_extents - np.abs(xyz - center), axis=1)

# Outside distance: length of the positive components of offset
outside_distance = np.linalg.norm(np.maximum(offset, 0))

# If any component of offset is positive, we're outside
# Otherwise, we're inside and return the negative penetration distance
distance = np.zeros(xyz.shape[0])
mask = np.any(offset > 0, axis=1)
distance[mask] = outside_distance
distance[~mask] = -inside_distance[~mask]
return distance
# return outside_distance if np.any(offset > 0) else -inside_distance

def get_value(self, name):
ix, iy = self.name_map.get(name, (-1, -1))
if ix == -1 and iy == -1:
Expand Down Expand Up @@ -319,7 +356,7 @@ def regular_grid(
self,
nsteps: Optional[Union[list, np.ndarray]] = None,
shuffle: bool = False,
order: str = "C",
order: str = "F",
local: bool = True,
) -> np.ndarray:
"""Get the grid of points from the bounding box
Expand Down Expand Up @@ -361,8 +398,8 @@ def regular_grid(
rng.shuffle(locs)
return locs

def cell_centers(self, order: str = "F") -> np.ndarray:
"""Get the cell centers of a regular grid
def cell_centres(self, order: str = "F") -> np.ndarray:
"""Get the cell centres of a regular grid
Parameters
----------
Expand All @@ -372,7 +409,7 @@ def cell_centers(self, order: str = "F") -> np.ndarray:
Returns
-------
np.ndarray
array of cell centers
array of cell centres
"""
locs = self.regular_grid(order=order, nsteps=self.nsteps - 1)

Expand Down Expand Up @@ -434,7 +471,7 @@ def structured_grid(
_cell_data = copy.deepcopy(cell_data)
_vertex_data = copy.deepcopy(vertex_data)
return StructuredGrid(
origin=self.global_origin,
origin=self.global_origin + self.origin,
step_vector=self.step_vector,
nsteps=self.nsteps,
cell_properties=_cell_data,
Expand Down
32 changes: 30 additions & 2 deletions LoopStructural/datatypes/_structured_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def vtk(self):
z,
)
for name, data in self.properties.items():
grid[name] = data.flatten(order="F")
grid[name] = data.reshape((grid.n_points, -1), order="F")
for name, data in self.cell_properties.items():
grid.cell_data[name] = data.flatten(order="F")
grid.cell_data[name] = data.reshape((grid.n_cells, -1), order="F")
return grid

def plot(self, pyvista_kwargs={}):
Expand All @@ -63,6 +63,34 @@ def plot(self, pyvista_kwargs={}):
except ImportError:
logger.error("pyvista is required for vtk")

@property
def cell_centres(self):
x = np.linspace(
self.origin[0] + self.step_vector[0] * 0.5,
self.maximum[0] + self.step_vector[0] * 0.5,
self.nsteps[0] - 1,
)
y = np.linspace(
self.origin[1] + self.step_vector[1] * 0.5,
self.maximum[1] - self.step_vector[1] * 0.5,
self.nsteps[1] - 1,
)
z = np.linspace(
self.origin[2] + self.step_vector[2] * 0.5,
self.maximum[2] - self.step_vector[2] * 0.5,
self.nsteps[2] - 1,
)
x, y, z = np.meshgrid(x, y, z, indexing="ij")
return np.vstack([x.flatten(order='f'), y.flatten(order='f'), z.flatten(order='f')]).T

@property
def nodes(self):
x = np.linspace(self.origin[0], self.maximum[0], self.nsteps[0])
y = np.linspace(self.origin[1], self.maximum[1], self.nsteps[1])
z = np.linspace(self.origin[2], self.maximum[2], self.nsteps[2])
x, y, z = np.meshgrid(x, y, z, indexing="ij")
return np.vstack([x.flatten(order='f'), y.flatten(order='f'), z.flatten(order='f')]).T

def merge(self, other):
if not np.all(np.isclose(self.origin, other.origin)):
raise ValueError("Origin of grids must be the same")
Expand Down
18 changes: 18 additions & 0 deletions LoopStructural/interpolators/_discrete_interpolator.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,20 @@ def __init__(self, support, data={}, c=None, up_to_date=False):
logger.info("Creating discrete interpolator with {} degrees of freedom".format(self.nx))
self.type = InterpolatorType.BASE_DISCRETE

def set_nelements(self, nelements: int) -> int:
return self.support.set_nelements(nelements)

@property
def n_elements(self) -> int:
"""Number of elements in the interpolator
Returns
-------
int
number of elements, positive
"""
return self.support.n_elements

@property
def nx(self) -> int:
"""Number of degrees of freedom for the interpolator
Expand Down Expand Up @@ -161,6 +175,7 @@ def reset(self):
"""
self.constraints = {}
self.c_ = 0
self.regularisation_scale = np.ones(self.nx)
logger.info("Resetting interpolation constraints")

def add_constraints_to_least_squares(self, A, B, idc, w=1.0, name="undefined"):
Expand Down Expand Up @@ -737,3 +752,6 @@ def to_dict(self):
**super().to_dict(),
# 'region_function':self.region_function,
}

def vtk(self):
return self.support.vtk({'c': self.c})
75 changes: 64 additions & 11 deletions LoopStructural/interpolators/_finite_difference_interpolator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,37 @@
from ..utils import get_vectors
from ._discrete_interpolator import DiscreteInterpolator
from ..interpolators import InterpolatorType

from scipy.spatial import KDTree
from LoopStructural.utils import getLogger

logger = getLogger(__name__)


def compute_weighting(grid_points, gradient_constraint_points, alpha=10.0, sigma=1.0):
"""
Compute weights for second derivative regularization based on proximity to gradient constraints.
Parameters:
grid_points (ndarray): (N, 3) array of 3D coordinates for grid cells.
gradient_constraint_points (ndarray): (M, 3) array of 3D coordinates for gradient constraints.
alpha (float): Strength of weighting increase.
sigma (float): Decay parameter for Gaussian-like influence.
Returns:
weights (ndarray): (N,) array of weights for each grid point.
"""
# Build a KDTree with the gradient constraint locations
tree = KDTree(gradient_constraint_points)

# Find the distance from each grid point to the nearest gradient constraint
distances, _ = tree.query(grid_points, k=1)

# Compute weighting function (higher weight for nearby points)
weights = 1 + alpha * np.exp(-(distances**2) / (2 * sigma**2))

return weights


class FiniteDifferenceInterpolator(DiscreteInterpolator):
def __init__(self, grid, data={}):
"""
Expand Down Expand Up @@ -44,6 +69,7 @@ def __init__(self, grid, data={}):
)

self.type = InterpolatorType.FINITE_DIFFERENCE
self.use_regularisation_weight_scale = False

def setup_interpolator(self, **kwargs):
"""
Expand Down Expand Up @@ -76,20 +102,19 @@ def setup_interpolator(self, **kwargs):
for key in kwargs:
self.up_to_date = False
if "regularisation" in kwargs:
self.interpolation_weights["dxy"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dyz"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dxz"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dxx"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dyy"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dzz"] = 0.1 * kwargs["regularisation"]
self.interpolation_weights["dxy"] = kwargs["regularisation"]
self.interpolation_weights["dyz"] = kwargs["regularisation"]
self.interpolation_weights["dxz"] = kwargs["regularisation"]
self.interpolation_weights["dxx"] = kwargs["regularisation"]
self.interpolation_weights["dyy"] = kwargs["regularisation"]
self.interpolation_weights["dzz"] = kwargs["regularisation"]
self.interpolation_weights[key] = kwargs[key]
# either use the default operators or the ones passed to the function
operators = kwargs.get(
"operators", self.support.get_operators(weights=self.interpolation_weights)
)
for k, o in operators.items():
self.assemble_inner(o[0], o[1], name=k)

self.use_regularisation_weight_scale = kwargs.get('use_regularisation_weight_scale', False)
self.add_norm_constraints(self.interpolation_weights["npw"])
self.add_gradient_constraints(self.interpolation_weights["gpw"])
self.add_value_constraints(self.interpolation_weights["cpw"])
Expand All @@ -101,6 +126,8 @@ def setup_interpolator(self, **kwargs):
upper_bound=kwargs.get('inequality_pair_upper_bound', np.finfo(float).eps),
lower_bound=kwargs.get('inequality_pair_lower_bound', -np.inf),
)
for k, o in operators.items():
self.assemble_inner(o[0], o[1], name=k)

def copy(self):
"""
Expand Down Expand Up @@ -271,6 +298,11 @@ def add_gradient_constraints(self, w=1.0):
self.add_constraints_to_least_squares(A, B, idc[inside, :], w=w, name="gradient")
A = np.einsum("ij,ijk->ik", dip_vector.T, T)
self.add_constraints_to_least_squares(A, B, idc[inside, :], w=w, name="gradient")
# self.regularisation_scale += compute_weighting(
# self.support.nodes,
# points[inside, : self.support.dimension],
# sigma=self.support.nsteps[0] * 10,
# )
if np.sum(inside) <= 0:
logger.warning(
f" {np.sum(~inside)} \
Expand Down Expand Up @@ -318,7 +350,24 @@ def add_norm_constraints(self, w=1.0):
)
# T*=np.product(self.support.step_vector)
# T/=self.support.step_vector[0]

# indexes, inside2 = self.support.position_to_nearby_cell_indexes(
# points[inside, : self.support.dimension]
# )
# indexes = indexes[inside2, :]

# corners = self.support.cell_corner_indexes(indexes)
# node_indexes = corners.reshape(-1, 3)
# indexes = self.support.global_node_indices(indexes)
# self.regularisation_scale[indexes] =10

self.regularisation_scale += compute_weighting(
self.support.nodes,
points[inside, : self.support.dimension],
sigma=self.support.nsteps[0] * 10,
)
# global_indexes = self.support.neighbour_global_indexes().T.astype(int)
# close_indexes =
# self.regularisation_scale[global_indexes[idc[inside,:].astype(int),]]=10
w /= 3
for d in range(self.support.dimension):

Expand Down Expand Up @@ -454,7 +503,11 @@ def assemble_inner(self, operator, w, name='regularisation'):
a[inside, :],
B[inside],
idc[inside, :],
w=w,
w=(
self.regularisation_scale[idc[inside, 13].astype(int)] * w
if self.use_regularisation_weight_scale
else w
),
name=name,
)
return
9 changes: 9 additions & 0 deletions LoopStructural/interpolators/_geological_interpolator.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,15 @@ def __init__(self, data={}, up_to_date=False):
self.dimensions = 3 # default to 3d
self.support = None

@abstractmethod
def set_nelements(self, nelements: int) -> int:
pass

@property
@abstractmethod
def n_elements(self) -> int:
pass

@property
def data(self):
return self._data
Expand Down
Loading

0 comments on commit 4b9f90f

Please sign in to comment.