Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
b4548f2
feat: Add JSON I/O functionality for surface points
flohorovicic Mar 19, 2025
e27ca15
feat: Add orientation data loading functionality
flohorovicic Mar 19, 2025
cb34e62
feat: Add horizontal stratigraphic model tutorial
flohorovicic Mar 19, 2025
f91fa03
fix: Update JSON loading to use surface names from series data - Add …
flohorovicic Mar 19, 2025
9b78ac2
fix: Update horizontal stratigraphic tutorial with correct data and m…
flohorovicic Mar 19, 2025
49b4f25
fix: correct IDs and positions for fault and rock1 in multiple series…
flohorovicic Mar 19, 2025
1e7b405
Added .json input file
flohorovicic Mar 19, 2025
c926296
Updated .json input file
flohorovicic Mar 19, 2025
f82abf2
Adjustments in stack-mapping for more flexible handling of faults
flohorovicic Mar 19, 2025
0f1734b
Added modules __init__ and minor changes in json module
flohorovicic Mar 19, 2025
6cf9a44
fix: Fix metadata handling in JSON I/O for proper preservation when l…
flohorovicic Mar 22, 2025
4c1d177
Updated .gitignore (only to ignore files generated by new tutorial)
flohorovicic Mar 22, 2025
61b7dec
Extended functionality to save .json and adjusted tests. Simple model…
flohorovicic Mar 23, 2025
6a40125
Added structural relations to .json and fixed error in second example
flohorovicic Mar 23, 2025
2a7d8f8
Fixed problem with loading of surface layer stack
flohorovicic Mar 23, 2025
e6fade1
Fixed stratigraphic pile handling in JSON I/O by reverting to working…
flohorovicic Mar 23, 2025
660ae65
Included name-id mapping in .json
flohorovicic Mar 24, 2025
1637ddb
Fix JSON serialization for NumPy types and update example data
flohorovicic Mar 24, 2025
e31bda4
Adjusted date format
flohorovicic Mar 24, 2025
c92878b
Simplified required json input further and added "minimal working exa…
flohorovicic Mar 25, 2025
6d1e029
Simplified minimal input even further: now only points and orientatio…
flohorovicic Mar 25, 2025
84d3332
Updated minimal json examples and comparison to minimal GemPy model
flohorovicic Mar 25, 2025
0386163
Additional fixes to get defaults right
flohorovicic Mar 25, 2025
360a103
Added default nugget value to minimize input even further
flohorovicic Mar 25, 2025
36fec0d
Updated tests and fixed code to pass tests.
flohorovicic Mar 28, 2025
949f7e0
fix: Update fault model example with correct series mapping and visua…
flohorovicic Apr 5, 2025
601e523
Improve scalar field visualization in fault model example - Add prope…
flohorovicic Apr 6, 2025
94f6fd3
Example model for a combination of series and faults from json
flohorovicic Apr 6, 2025
593dbbd
Add combination model JSON files to gitignore
flohorovicic Apr 6, 2025
a4f4264
fix: preserve colors when loading models from JSON - Added color pres…
flohorovicic Apr 6, 2025
cb5693c
test: update JSON I/O tests to verify color preservation - Added colo…
flohorovicic Apr 6, 2025
0e95fb7
Added TODOs for PR.
javoha Apr 11, 2025
369ef46
Added TODOs for PR.
javoha Apr 11, 2025
20ad605
fix: ensure NotRequired import works for both Python 3.11+ and earlie…
flohorovicic Apr 27, 2025
9d9f304
[BUG] Ensure compatibility with older Python versions
Leguark May 1, 2025
040d84a
Merge branch 'main' into fork/flohorovicic/feature/json_io
Leguark May 1, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Simplified required json input further and added "minimal working exa…
…mple"
  • Loading branch information
flohorovicic committed Mar 25, 2025
commit c92878ba83eb88a545c24517b690769e43596c10
80 changes: 80 additions & 0 deletions examples/tutorials/z_other_tutorials/json_io/05_minimal_json.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
"""
Tutorial: Minimal JSON I/O with default metadata, interpolation options, and grid settings
This tutorial demonstrates how to use the JSON I/O functionality with minimal input,
relying on default metadata values, interpolation options, and grid settings.
"""

# %%
import numpy as np
from datetime import datetime

import gempy as gp
import gempy_viewer as gpv
import json
import pyvista as pv

from gempy.modules.json_io.json_operations import JsonIO # Updated import path


# %%
# Define the model data with minimal required fields
model_data = {
"surface_points": [
{"x": 100.0, "y": 200.0, "z": 600.0, "id": 1, "nugget": 0.00002},
{"x": 500.0, "y": 200.0, "z": 600.0, "id": 1, "nugget": 0.00002},
{"x": 900.0, "y": 800.0, "z": 400.0, "id": 0, "nugget": 0.00002},
],
"orientations": [
{"x": 500.0, "y": 500.0, "z": 600.0, "G_x": 0.0, "G_y": 0.0, "G_z": 1.0, "id": 1, "nugget": 0.01, "polarity": 1},
{"x": 500.0, "y": 500.0, "z": 400.0, "G_x": 0.0, "G_y": 0.0, "G_z": 1.0, "id": 0, "nugget": 0.01, "polarity": 1},
],
"series": [
{
"name": "Strat_Series",
"surfaces": ["surface_1", "surface_2"]
# structural_relation will default to "ONLAP"
# colors are optional
}
]
}

# %%
# Save the minimal model to JSON
with open("minimal_model.json", "w") as f:
json.dump(model_data, f, indent=4)

# Load the model from JSON
geo_model = JsonIO.load_model_from_json("minimal_model.json")

# Compute the geological model
gp.compute_model(geo_model)

p2d = gpv.plot_2d(geo_model)
# %%

# Print the model metadata (should use default values)
print("\nModel Metadata:")
print(f"Name: {geo_model.meta.name}")
print(f"Creation Date: {geo_model.meta.creation_date}")
print(f"Last Modification Date: {geo_model.meta.last_modification_date}")
print(f"Owner: {geo_model.meta.owner}")

# Print the interpolation options (should use default values)
print("\nInterpolation Options:")
print(f"Range: {geo_model.interpolation_options.kernel_options.range}")
print(f"Mesh Extraction: {geo_model.interpolation_options.mesh_extraction}")

# Print the grid settings (should use default values)
print("\nGrid Settings:")
print(f"Resolution: {geo_model.grid._dense_grid.resolution}")
print(f"Extent: {geo_model.grid._dense_grid.extent}")

# Print the structural groups
print("\nStructural Groups:")
for group in geo_model.structural_frame.structural_groups:
print(group)

# Save the loaded model to verify the metadata, interpolation options, and grid settings are preserved
JsonIO.save_model_to_json(geo_model, "minimal_model_loaded.json")

# %%
207 changes: 158 additions & 49 deletions gempy/modules/json_io/json_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,57 @@ def _create_id_to_name(name_to_id: Dict[str, int]) -> Dict[int, str]:
"""Create an id_to_name mapping from a name_to_id mapping."""
return {id: name for name, id in name_to_id.items()}

@staticmethod
def _calculate_default_range(grid_extent: List[float]) -> float:
"""Calculate the default range based on the model extent (room diagonal)."""
# Extract min and max coordinates
x_min, x_max = grid_extent[0], grid_extent[1]
y_min, y_max = grid_extent[2], grid_extent[3]
z_min, z_max = grid_extent[4], grid_extent[5]

# Calculate the room diagonal (Euclidean distance)
return np.sqrt((x_max - x_min)**2 + (y_max - y_min)**2 + (z_max - z_min)**2)

@staticmethod
def _calculate_default_grid_settings(surface_points: List[SurfacePoint], orientations: List[Orientation]) -> Dict[str, Any]:
"""Calculate default grid settings based on data points.

Args:
surface_points: List of surface points
orientations: List of orientations

Returns:
Dict containing grid settings with default values
"""
# Collect all x, y, z coordinates
all_x = [sp['x'] for sp in surface_points] + [ori['x'] for ori in orientations]
all_y = [sp['y'] for sp in surface_points] + [ori['y'] for ori in orientations]
all_z = [sp['z'] for sp in surface_points] + [ori['z'] for ori in orientations]

# Calculate extents
x_min, x_max = min(all_x), max(all_x)
y_min, y_max = min(all_y), max(all_y)
z_min, z_max = min(all_z), max(all_z)

# Calculate ranges
x_range = x_max - x_min
y_range = y_max - y_min
z_range = z_max - z_min

# Add 10% padding to each dimension
x_padding = x_range * 0.1
y_padding = y_range * 0.1
z_padding = z_range * 0.1

return {
"regular_grid_resolution": [10, 10, 10],
"regular_grid_extent": [
x_min - x_padding, x_max + x_padding,
y_min - y_padding, y_max + y_padding,
z_min - z_padding, z_max + z_padding
]
}

@staticmethod
def load_model_from_json(file_path: str):
"""
Expand Down Expand Up @@ -60,71 +111,73 @@ def load_model_from_json(file_path: str):
surface_names = []
for series in data['series']:
surface_names.extend(series['surfaces'])

# Use the id_name_mapping if available, otherwise create one from series data
if 'id_name_mapping' in data and data['id_name_mapping']:
name_to_id = data['id_name_mapping']['name_to_id']
id_to_name = JsonIO._create_id_to_name(name_to_id)

# Create ID to name mapping if not provided
if 'id_name_mapping' in data:
id_to_name = JsonIO._create_id_to_name(data['id_name_mapping']['name_to_id'])
else:
# Create a mapping from surface points to their names
surface_point_names = {}
# First, create a mapping from surface names to their IDs
surface_id_map = {}
for series in data['series']:
for i, name in enumerate(series['surfaces']):
# Find the first surface point with this name
for sp in data['surface_points']:
if sp['id'] not in surface_id_map:
surface_id_map[sp['id']] = name
break
# Create mapping from series data
id_to_name = {i: name for i, name in enumerate(surface_names)}

# Now create the mapping from IDs to names
for sp in data['surface_points']:
surface_point_names[sp['id']] = surface_id_map.get(sp['id'], f"surface_{sp['id']}")
id_to_name = surface_point_names
name_to_id = {name: id for id, name in id_to_name.items()}
# Create surface points table
surface_points_table = JsonIO._load_surface_points(data['surface_points'], id_to_name)

# Load surface points and orientations
surface_points = JsonIO._load_surface_points(data['surface_points'], id_to_name)
orientations = JsonIO._load_orientations(data['orientations'], id_to_name)
# Create orientations table
orientations_table = JsonIO._load_orientations(data['orientations'], id_to_name)

# Create structural frame
structural_frame = StructuralFrame.from_data_tables(surface_points, orientations)
structural_frame = StructuralFrame.from_data_tables(surface_points_table, orientations_table)

# Get grid settings with defaults if not provided
grid_settings = data.get('grid_settings', JsonIO._calculate_default_grid_settings(data['surface_points'], data['orientations']))

# Create grid
grid = Grid(
extent=data['grid_settings']['regular_grid_extent'],
resolution=data['grid_settings']['regular_grid_resolution']
resolution=grid_settings['regular_grid_resolution'],
extent=grid_settings['regular_grid_extent']
)

# Create interpolation options with kernel options
# Calculate default range based on model extent
default_range = JsonIO._calculate_default_range(grid_settings['regular_grid_extent'])

# Create interpolation options with defaults if not provided
interpolation_options = InterpolationOptions(
range=data['interpolation_options'].get('kernel_options', {}).get('range', 1.7),
c_o=data['interpolation_options'].get('kernel_options', {}).get('c_o', 10),
mesh_extraction=data['interpolation_options'].get('mesh_extraction', True),
number_octree_levels=data['interpolation_options'].get('number_octree_levels', 1)
range=data.get('interpolation_options', {}).get('kernel_options', {}).get('range', default_range),
c_o=data.get('interpolation_options', {}).get('kernel_options', {}).get('c_o', 10),
mesh_extraction=data.get('interpolation_options', {}).get('mesh_extraction', True),
number_octree_levels=data.get('interpolation_options', {}).get('number_octree_levels', 1)
)

# Create GeoModel
# Create GeoModel with default metadata if not provided
current_date = datetime.now().strftime("%Y-%m-%d")
model_name = data.get('metadata', {}).get('name', "GemPy Model")

model = GeoModel(
name=data['metadata']['name'],
name=model_name,
structural_frame=structural_frame,
grid=grid,
interpolation_options=interpolation_options
)

# Set the metadata with proper dates
model_meta = GeoModelMeta(
name=data['metadata']['name'],
creation_date=data['metadata'].get('creation_date', datetime.now().isoformat()),
last_modification_date=data['metadata'].get('last_modification_date', datetime.now().isoformat()),
owner=data['metadata'].get('owner', None)
name=model_name,
creation_date=data.get('metadata', {}).get('creation_date', current_date),
last_modification_date=data.get('metadata', {}).get('last_modification_date', current_date),
owner=data.get('metadata', {}).get('owner', "GemPy Modeller")
)
model.meta = model_meta

# Map series to surfaces with structural relations
mapping_object = {series['name']: series['surfaces'] for series in data['series']}
map_stack_to_surfaces(model, mapping_object, series_data=data['series'])
# Ensure each series has structural_relation set to ERODE by default
series_data = []
for series in data['series']:
series_copy = series.copy()
if 'structural_relation' not in series_copy:
series_copy['structural_relation'] = 'ERODE'
series_data.append(series_copy)
map_stack_to_surfaces(model, mapping_object, series_data=series_data)

# Set fault relations after structural groups are set up
if 'fault_relations' in data and data['fault_relations'] is not None:
Expand Down Expand Up @@ -356,9 +409,8 @@ def _validate_json_schema(data: Dict[str, Any]) -> bool:
Returns:
bool: True if valid, False otherwise
"""
# Check required top-level keys
required_keys = {'metadata', 'surface_points', 'orientations', 'series',
'grid_settings', 'interpolation_options'}
# Check required top-level keys (metadata, grid_settings, and interpolation_options are optional)
required_keys = {'surface_points', 'orientations', 'series'}
if not all(key in data for key in required_keys):
return False

Expand Down Expand Up @@ -387,17 +439,74 @@ def _validate_json_schema(data: Dict[str, Any]) -> bool:
return False
if not isinstance(ori['id'], int):
return False
if not isinstance(ori['polarity'], int) or ori['polarity'] not in {-1, 1}:
if not isinstance(ori['polarity'], int):
return False

# Validate id_name_mapping if present
if 'id_name_mapping' in data:
mapping = data['id_name_mapping']
if not isinstance(mapping, dict):
# Validate series
if not isinstance(data['series'], list):
return False

for series in data['series']:
# Only name and surfaces are required
required_series_keys = {'name', 'surfaces'}
if not all(key in series for key in required_series_keys):
return False
if not isinstance(series['name'], str):
return False
if not isinstance(series['surfaces'], list):
return False
# Validate optional fields if present
if 'structural_relation' in series and not isinstance(series['structural_relation'], str):
return False
if 'colors' in series:
if not isinstance(series['colors'], list):
return False
if not all(isinstance(color, str) for color in series['colors']):
return False

# Validate grid settings if present
if 'grid_settings' in data:
if not isinstance(data['grid_settings'], dict):
return False

required_grid_keys = {'regular_grid_resolution', 'regular_grid_extent'}
if not all(key in data['grid_settings'] for key in required_grid_keys):
return False

if not isinstance(data['grid_settings']['regular_grid_resolution'], list):
return False
if not isinstance(data['grid_settings']['regular_grid_extent'], list):
return False

# Validate interpolation options if present
if 'interpolation_options' in data:
if not isinstance(data['interpolation_options'], dict):
return False
if 'kernel_options' in data['interpolation_options']:
kernel_options = data['interpolation_options']['kernel_options']
if not isinstance(kernel_options, dict):
return False
if 'range' in kernel_options and not isinstance(kernel_options['range'], (int, float)):
return False
if 'c_o' in kernel_options and not isinstance(kernel_options['c_o'], (int, float)):
return False
if 'mesh_extraction' in data['interpolation_options'] and not isinstance(data['interpolation_options']['mesh_extraction'], bool):
return False
if 'number_octree_levels' in data['interpolation_options'] and not isinstance(data['interpolation_options']['number_octree_levels'], int):
return False

# Validate metadata if present
if 'metadata' in data:
metadata = data['metadata']
if not isinstance(metadata, dict):
return False
if 'name' in metadata and not isinstance(metadata['name'], str):
return False
if 'creation_date' in metadata and not isinstance(metadata['creation_date'], str):
return False
if 'name_to_id' not in mapping:
if 'last_modification_date' in metadata and not isinstance(metadata['last_modification_date'], str):
return False
if not isinstance(mapping['name_to_id'], dict):
if 'owner' in metadata and not isinstance(metadata['owner'], str):
return False

return True
18 changes: 9 additions & 9 deletions gempy/modules/json_io/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
This module defines the expected structure of JSON files for loading and saving GemPy models.
"""

from typing import TypedDict, List, Dict, Any, Optional, Union, Sequence
from typing import TypedDict, List, Dict, Any, Optional, Union, Sequence, NotRequired

class SurfacePoint(TypedDict):
x: float
Expand Down Expand Up @@ -35,16 +35,16 @@ class Fault(TypedDict):
is_active: bool
surface: Surface

class Series(TypedDict):
name: str
class Series(TypedDict, total=False):
name: str # Required
id: int
is_active: bool
is_fault: bool
order_series: int
surfaces: List[Surface]
surfaces: List[str] # Required, list of surface names
faults: List[Fault]
structural_relation: str
colors: Optional[List[str]]
structural_relation: NotRequired[str] # Optional, defaults to "ONLAP"
colors: NotRequired[Optional[List[str]]] # Optional list of hex color codes

class GridSettings(TypedDict):
regular_grid_resolution: List[int]
Expand All @@ -60,11 +60,11 @@ class ModelMetadata(TypedDict):
class IdNameMapping(TypedDict):
name_to_id: Dict[str, int]

class GemPyModelJson(TypedDict):
class GemPyModelJson(TypedDict, total=False):
metadata: ModelMetadata
surface_points: List[SurfacePoint]
orientations: List[Orientation]
series: List[Series]
grid_settings: GridSettings
interpolation_options: Dict[str, Any]
grid_settings: Optional[GridSettings]
interpolation_options: Optional[Dict[str, Any]]
id_name_mapping: IdNameMapping