Skip to content

Commit

Permalink
Refactor read_trajectories allowing missing platform and/or kickoff
Browse files Browse the repository at this point in the history
  • Loading branch information
verveerpj committed Jul 16, 2024
1 parent c0336e6 commit 4dd8c8c
Show file tree
Hide file tree
Showing 8 changed files with 272 additions and 188 deletions.
9 changes: 2 additions & 7 deletions docs/reference/well_trajectory/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,8 @@ platforms:
# Datatype: number
# Examples: .1, 1., 1, 1.0, 1.34E-5, 1.34e-5
# Required: False
# Default: 0.0
z: 0.0

# Datatype: number
# Examples: .1, 1., 1, 1.0, 1.34E-5, 1.34e-5
# Required: True
k: '...' # ← REPLACE
# Default: null
k: null

# Datatype: [WellConfig map]
# Required: True
Expand Down
3 changes: 1 addition & 2 deletions src/everest_models/jobs/fm_well_trajectory/models/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ class PlatformConfig(ModelConfig):
name: str
x: Annotated[float, Field(description="")]
y: Annotated[float, Field(description="")]
z: Annotated[float, Field(default=0.0, description="")]
k: Annotated[float, Field(description="")]
k: Annotated[float, Field(default=None, description="")]


class WellConfig(ModelConfig):
Expand Down
243 changes: 123 additions & 120 deletions src/everest_models/jobs/fm_well_trajectory/read_trajectories.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import itertools
import logging
from enum import Enum, EnumMeta
from pathlib import Path
from typing import Any, Dict, Iterable, Iterator, Optional, Tuple
from typing import Any, Dict, Final, Iterable, NamedTuple, Optional, Tuple

import numpy

Expand All @@ -12,160 +11,164 @@

logger = logging.getLogger("well_trajectory")

P1 = ("p1_x", "p1_y", "p1_z")
P2 = ("p2_a", "p2_b", "p2_c")
P3 = ("p3_x", "p3_y", "p3_z")


class ConstantEnumMeta(EnumMeta):
def __getattribute__(self, __name: str) -> Any:
# Return the value directly for enum members
return (
attribute.value
if isinstance(attribute := super().__getattribute__(__name), Enum) # type: ignore
else attribute
)
P1: Final = tuple(f"p1_{tag}" for tag in ("x", "y", "z"))
P2: Final = tuple(f"p2_{tag}" for tag in ("a", "b", "c"))
P3: Final = tuple(f"p3_{tag}" for tag in ("x", "y", "z"))
PLATFORMS: Final = tuple(f"platform_{tag}" for tag in ("x", "y", "k"))

ROUND = 3

class PLATFORM_FILES(Enum, metaclass=ConstantEnumMeta):
X = "platform_x"
Y = "platform_y"
Z = "platform_z"
K = "platform_k"

@classmethod
def iter(cls):
return (x.value for x in cls)
class _Point(NamedTuple):
x: float
y: float
z: float


ROUND = 3
def _rescale(point: float, scale: float, reference: float):
return scale * point + reference


def _rescale_point(scale: float, point: float, reference: float):
return scale * point + reference
def _read_files(*args: Tuple[str, ...]) -> Dict[str, Any]:
return {
filename: (load_json(Path(filename).with_suffix(".json")))
for filename in itertools.chain(*args)
if Path(filename).with_suffix(".json").exists()
}


def _read_platforms_and_kickoffs(
trajectory: Dict[str, Any],
def _read_platform_and_kickoff(
input_files: Dict[str, Any],
scales: ScalesConfig,
references: ReferencesConfig,
platform: PlatformConfig,
) -> Tuple[float, float, float, float]:
def _file_value(filename: str) -> Optional[float]:
if filename in trajectory and platform.name in trajectory[filename]:
platform_config: PlatformConfig,
) -> Tuple[_Point, Optional[float]]:
def _get_from_platform_file(platform_file: str, attr: str) -> Optional[float]:
value = input_files.get(platform_file, {}).get(platform_config.name)
if value is not None:
logger.warning(
f"File: {filename}.json found, '{filename.split('_')[1]}' for '{platform.name}' in configuration ignored."
f"File: {platform_file}.json found, overriding '{attr}' "
f"for '{platform_config.name}' in configuration."
)
if filename == PLATFORM_FILES.K and (
references.k is None or scales.k is None
):
scale = getattr(scales, attr)
ref = getattr(references, attr)
if scale is None or ref is None:
raise ValueError(
"Either 'references.k' or 'scales.k' missing in configuration"
f"Either 'references.{attr}' or 'scales.{attr}' missing in configuration"
)
value = _rescale(value, scale, ref)
else:
# If necessary, get the value from the platform configuration:
value = getattr(platform_config, attr)

return trajectory[filename][platform.name]
if value is not None:
value = round(value, ROUND)

px, py = (
(
_rescale_point(scales.x, p_x, references.x),
_rescale_point(scales.y, p_y, references.y),
)
if (p_x := _file_value(PLATFORM_FILES.X))
and (p_y := _file_value(PLATFORM_FILES.Y))
else (
platform.x,
platform.y,
)
)
pz = (
_rescale_point(scales.z, p_z, references.z)
if (p_z := _file_value(PLATFORM_FILES.Z))
else platform.z
)
kz = (
_rescale_point(scales.k, k_z, references.k)
if (k_z := _file_value(PLATFORM_FILES.K))
else platform.k
)
return value

return round(px, ROUND), round(py, ROUND), round(pz, ROUND), round(kz, ROUND)
px = _get_from_platform_file("platform_x", "x")
py = _get_from_platform_file("platform_y", "y")
pk = _get_from_platform_file("platform_k", "k")

# px and py are mandatory, pk may be `None`:
assert px is not None
assert py is not None

def _read_files_from_everest() -> Dict[str, Any]:
return dict(
itertools.chain(
(
(filename, load_json(Path(filename).with_suffix(".json")))
for filename in itertools.chain(P1, P2, P3)
),
(
(filename, load_json(Path(filename).with_suffix(".json")))
for filename in PLATFORM_FILES.iter()
if Path(filename).with_suffix(".json").exists()
),
)
return _Point(x=px, y=py, z=0.0), pk


def _get_rescaled_point(
point_files: Iterable[str],
input_files: Dict[str, Any],
well_name: str,
scales: ScalesConfig,
references: ReferencesConfig,
) -> _Point:
px, py, pz = (input_files[item][well_name] for item in point_files)
return _Point(
x=round(_rescale(px, scales.x, references.x), ROUND),
y=round(_rescale(py, scales.y, references.y), ROUND),
z=round(_rescale(pz, scales.z, references.z), ROUND),
)


def _construct_midpoint(
well: str,
inputs: Dict[str, Any],
x0: float,
x2: float,
y0: float,
y2: float,
z0: float,
z2: float,
well: str, input_files: Dict[str, Any], p1: _Point, p3: _Point
) -> Tuple[float, float, float]:
a1, b1, c1 = [round(inputs[key][well], ROUND) for key in P2]
return tuple(
numpy.around(
a, b, c = [round(input_files[key][well], ROUND) for key in P2]
return _Point._make(
numpy.round(
[
b1 * (y2 - y0) + a1 * (x2 - x0) + x0,
b1 * (x0 - x2) + a1 * (y2 - y0) + y0,
z2 + c1 * (z0 - z2),
b * (p3.y - p1.y) + a * (p3.x - p1.x) + p1.x,
b * (p1.x - p3.x) + a * (p3.y - p1.y) + p1.y,
p3.z + c * (p1.z - p3.z),
],
ROUND,
)
)


def _read_trajectory(
scales: ScalesConfig,
references: ReferencesConfig,
well: WellConfig,
platform_config: Optional[PlatformConfig],
point_files: Dict[str, Any],
platform_files: Dict[str, Any],
) -> Trajectory:
p1 = _get_rescaled_point(P1, point_files, well.name, scales, references)
p3 = _get_rescaled_point(P3, point_files, well.name, scales, references)
p2 = _construct_midpoint(well.name, point_files, p1, p3)

if platform_config is None:
# Add a platform right above the first guide point:
x, y, z = [p1.x], [p1.y], [p1.z]
else:
platform_point, platform_k = _read_platform_and_kickoff(
platform_files, scales, references, platform_config
)
# The platform must be at z=0:
x, y, z = [platform_point.x], [platform_point.y], [0.0]
if platform_k is not None:
# Add the kickoff right below the platform:
x.append(platform_point.x)
y.append(platform_point.y)
z.append(platform_k)

return Trajectory(
x=numpy.array(x + [p1.x, p2.x, p3.x]),
y=numpy.array(y + [p1.y, p2.y, p3.y]),
z=numpy.array(z + [p1.z, p2.z, p3.z]),
)


def read_trajectories(
scales: ScalesConfig,
references: ReferencesConfig,
wells: WellConfig,
wells: Iterable[WellConfig],
platforms: Iterable[PlatformConfig],
) -> Dict[str, Trajectory]:
def _construct_trajectory(inputs: Dict[str, Any], well: WellConfig) -> Trajectory:
def generate_rescaled_points(values: Iterable[str]) -> Iterator[float]:
return (
_rescale_point(scale, inputs[value][well.name], reference)
for value, scale, reference in zip(
values,
scales.model_dump(exclude={"k"}).values(),
references.model_dump(exclude={"k"}).values(),
)
)

whx, why, whz, koz = _read_platforms_and_kickoffs(
inputs,
scales,
references,
platform=next(
platform for platform in platforms if platform.name == well.platform
point_files = _read_files(P1, P2, P3)
missing_files = [
point_file
for point_file in itertools.chain(P1, P2, P3)
if point_file not in point_files
]
if missing_files:
raise ValueError(f"Missing point files: {missing_files}")

platform_files = _read_files(PLATFORMS)

return {
well.name: _read_trajectory(
scales=scales,
references=references,
well=well,
platform_config=next(
(item for item in platforms if item.name == well.platform), None
),
point_files=point_files,
platform_files=platform_files,
)
x0, y0, z0 = [round(value, ROUND) for value in generate_rescaled_points(P1)]
x2, y2, z2 = [round(value, ROUND) for value in generate_rescaled_points(P3)]

x1, y1, z1 = _construct_midpoint(well.name, inputs, x0, x2, y0, y2, z0, z2)

return Trajectory(
x=numpy.array([whx, whx, x0, x1, x2]),
y=numpy.array([why, why, y0, y1, y2]),
z=numpy.array([whz, koz, z0, z1, z2]),
)

inputs = _read_files_from_everest()

return {well.name: _construct_trajectory(inputs, well) for well in wells}
for well in wells
}
Loading

0 comments on commit 4dd8c8c

Please sign in to comment.