Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions LoopStructural/interpolators/_geological_interpolator.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def set_value_constraints(self, points: np.ndarray):
points = np.hstack([points, np.ones((points.shape[0], 1))])
if points.shape[1] < self.dimensions + 2:
raise ValueError("Value points must at least have X,Y,Z,val,w")
self.data["value"] = points
self.data["value"] = points.copy()
self.n_i = points.shape[0]
self.up_to_date = False

Expand Down Expand Up @@ -282,7 +282,7 @@ def set_gradient_constraints(self, points: np.ndarray):
if points.shape[1] < self.dimensions * 2 + 1:
raise ValueError("Gradient constraints must at least have X,Y,Z,gx,gy,gz")
self.n_g = points.shape[0]
self.data["gradient"] = points
self.data["gradient"] = points.copy()
self.up_to_date = False

def set_normal_constraints(self, points: np.ndarray):
Expand All @@ -308,7 +308,7 @@ def set_normal_constraints(self, points: np.ndarray):
if points.shape[1] < self.dimensions * 2 + 1:
raise ValueError("Normal constraints must at least have X,Y,Z,nx,ny,nz")
self.n_n = points.shape[0]
self.data["normal"] = points
self.data["normal"] = points.copy()
self.up_to_date = False

def set_tangent_constraints(self, points: np.ndarray):
Expand All @@ -328,24 +328,24 @@ def set_tangent_constraints(self, points: np.ndarray):
points = np.hstack([points, np.ones((points.shape[0], 1))])
if points.shape[1] < self.dimensions * 2 + 1:
raise ValueError("Tangent constraints must at least have X,Y,Z,tx,ty,tz")
self.data["tangent"] = points
self.data["tangent"] = points.copy()
self.up_to_date = False

def set_interface_constraints(self, points: np.ndarray):
self.data["interface"] = points
self.data["interface"] = points.copy()
self.up_to_date = False

def set_value_inequality_constraints(self, points: np.ndarray):
if points.shape[1] < self.dimensions + 2:
raise ValueError("Inequality constraints must at least have X,Y,Z,lower,upper")
self.data["inequality"] = points
self.data["inequality"] = points.copy()
self.up_to_date = False

def set_inequality_pairs_constraints(self, points: np.ndarray):
if points.shape[1] < self.dimensions + 1:
raise ValueError("Inequality pairs constraints must at least have X,Y,Z,rock_id")

self.data["inequality_pairs"] = points
self.data["inequality_pairs"] = points.copy()
self.up_to_date = False

def get_value_constraints(self):
Expand Down
70 changes: 55 additions & 15 deletions LoopStructural/modelling/features/builders/_fault_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,29 +295,69 @@ def create_data_from_geometry(
self.fault_dip = fault_dip
if fault_normal_vector is None:
if fault_frame_data.loc[
np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["gx"].notna())].shape[0]>0:
fault_normal_vector = fault_frame_data.loc[
np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["gx"].notna()),
["gx", "gy", "gz"],
].to_numpy().mean(axis=0)
elif fault_frame_data.loc[
np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["nx"].notna())].shape[0]>0:
fault_normal_vector = fault_frame_data.loc[
Copy link

Copilot AI Feb 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new .loc[...] existence checks are hard to read and currently formatted in a way that’s easy to misinterpret (line breaks inside the indexer, no spaces around operators, and the closing brackets are not visually aligned). Please reformat these conditions (e.g., assign the mask to a variable and use .any()/.empty checks) to improve maintainability and reduce the chance of subtle bracket/precedence mistakes later.

Copilot uses AI. Check for mistakes.
np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["nx"].notna()),
["nx", "ny", "nz"],
].to_numpy().mean(axis=0)

else:
fault_surface_pts = fault_frame_data.loc[
np.logical_and(
fault_frame_data["coord"] == 0,
fault_frame_data["val"] == 0,
),
["X", "Y", "Z"],
].to_numpy()
fault_surface_pts = fault_surface_pts[
~np.isnan(fault_surface_pts).any(axis=1)
]

# Calculate fault strike using eigenvectors
pts = fault_trace - fault_trace.mean(axis=0)
# Calculate covariance matrix
cov_matrix = pts.T @ pts
# Get eigenvectors and eigenvalues
eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
# Use eigenvector with largest eigenvalue as strike direction
strike_vector = eigenvectors[:, np.argmax(eigenvalues)]
strike_vector = np.append(strike_vector, 0) # Add z component
strike_vector /= np.linalg.norm(strike_vector)

fault_normal_vector = np.cross(strike_vector, [0, 0, 1])
# Rotate the fault normal vector according to the fault dip
rotation_matrix = rotation(strike_vector[None, :], np.array([90 - fault_dip]))
fault_normal_vector = np.einsum("ijk,ik->ij", rotation_matrix, fault_normal_vector[None, :])[0]
if fault_surface_pts.shape[0] >= 3:
pts_3d = fault_surface_pts - fault_surface_pts.mean(axis=0)
cov_matrix = pts_3d.T @ pts_3d
eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
# If points span a surface, use the smallest eigenvector as plane normal
if eigenvalues[-1] > 0 and (eigenvalues[1] / eigenvalues[-1]) > 0.05:
fault_normal_vector = eigenvectors[:, 0]
Comment on lines +325 to +327
Copy link

Copilot AI Feb 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The planar-vs-line decision uses a hard-coded threshold (eigenvalues[1] / eigenvalues[-1]) > 0.05. Please either (a) define this as a named constant with a brief rationale, or (b) make it a parameter, since this value strongly controls whether 3D points are treated as a surface vs. a trace and will likely need tuning across datasets.

Copilot uses AI. Check for mistakes.
else:
Copy link

Copilot AI Feb 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This introduces a new code path where fault_normal_vector is derived from 3D surface points via PCA (fault_surface_pts.shape[0] >= 3). There are existing unit tests for the trace-based fallback, but no test appears to cover the new plane-normal branch (including the planar/linear discrimination). Adding a focused unit test for this new behavior would help prevent regressions.

Copilot uses AI. Check for mistakes.
# Fall back to line-on-map strike logic
pts = fault_trace - fault_trace.mean(axis=0)
cov_matrix = pts.T @ pts
eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
strike_vector = eigenvectors[:, np.argmax(eigenvalues)]
strike_vector = np.append(strike_vector, 0) # Add z component
strike_vector /= np.linalg.norm(strike_vector)

fault_normal_vector = np.cross(strike_vector, [0, 0, 1])
rotation_matrix = rotation(
strike_vector[None, :], np.array([90 - fault_dip])
)
fault_normal_vector = np.einsum(
"ijk,ik->ij", rotation_matrix, fault_normal_vector[None, :]
)[0]
Copy link

Copilot AI Feb 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The line-on-map strike fallback logic is duplicated in both branches (fault_surface_pts planar/line decision and the <3 points case). Consider extracting that block into a small local helper (e.g., compute_normal_from_trace(fault_trace, fault_dip)) so future changes/fixes don’t have to be applied in two places.

Copilot uses AI. Check for mistakes.
else:
# Fall back to line-on-map strike logic
pts = fault_trace - fault_trace.mean(axis=0)
cov_matrix = pts.T @ pts
eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
strike_vector = eigenvectors[:, np.argmax(eigenvalues)]
strike_vector = np.append(strike_vector, 0) # Add z component
strike_vector /= np.linalg.norm(strike_vector)

fault_normal_vector = np.cross(strike_vector, [0, 0, 1])
rotation_matrix = rotation(
strike_vector[None, :], np.array([90 - fault_dip])
)
fault_normal_vector = np.einsum(
"ijk,ik->ij", rotation_matrix, fault_normal_vector[None, :]
)[0]

if not isinstance(fault_normal_vector, np.ndarray):
fault_normal_vector = np.array(fault_normal_vector)
Expand Down
Loading