Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
148 changes: 100 additions & 48 deletions doc/src/hexdoc_hexcasting/metadata.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import logging
import re
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Iterable, Literal

from hexdoc.core import IsVersion, ModResourceLoader, Properties, ResourceLocation
from hexdoc.minecraft import Tag
from hexdoc.model import HexdocModel, StripHiddenModel, ValidationContextModel
from hexdoc.utils import TRACE, RelativePath
from pydantic import Field
from pydantic import Field, TypeAdapter
from typing_extensions import override

from .utils.pattern import Direction, PatternInfo

Expand All @@ -23,16 +26,101 @@ def path(cls, modid: str) -> Path:
return Path(f"{modid}.patterns.hexdoc.json")


class PatternStubProps(StripHiddenModel):
class BasePatternStubProps(StripHiddenModel, ABC):
type: Any
path: RelativePath
regex: re.Pattern[str]
per_world_value: str | None = "true"
required: bool = True
"""If `True` (the default), raise an error if no patterns were loaded from here."""

def load_patterns(
self,
props: Properties,
per_world_tag: Tag | None,
) -> list[PatternInfo]:
logger.debug(f"Load {self.type} pattern stub from {self.path}")

patterns = list[PatternInfo]()

try:
for pattern in self._iter_patterns(props):
if per_world_tag is not None:
pattern.is_per_world = pattern.id in per_world_tag.values
patterns.append(pattern)
except Exception as e:
# hack: notes don't seem to be working on pydantic exceptions :/
logger.error(f"Failed to load {self.type} pattern stub from {self.path}.")
raise e

pretty_path = self.path.resolve().relative_to(Path.cwd())

if self.required and not patterns:
raise ValueError(self._no_patterns_error.format(path=pretty_path))

logger.info(f"Loaded {len(patterns)} patterns from {pretty_path}")
return patterns

@abstractmethod
def _iter_patterns(self, props: Properties) -> Iterable[PatternInfo]:
"""Loads and iterates over the patterns from this stub.

Note: the `is_per_world` value returned by this function should be **ignored**
in 0.11+, since that information can be found in the per world tag.
"""

@property
def _no_patterns_error(self) -> str:
return "No patterns found in {path}, but required is True"


class RegexPatternStubProps(BasePatternStubProps):
"""Fetches pattern info by scraping source code with regex."""

type: Literal["regex"] = "regex"
regex: re.Pattern[str]
per_world_value: str | None = "true"

@override
def _iter_patterns(self, props: Properties) -> Iterable[PatternInfo]:
stub_text = self.path.read_text("utf-8")

for match in self.regex.finditer(stub_text):
groups = match.groupdict()

if ":" in groups["name"]:
id = ResourceLocation.from_str(groups["name"])
else:
id = props.mod_loc(groups["name"])

yield PatternInfo(
id=id,
startdir=Direction[groups["startdir"]],
signature=groups["signature"],
is_per_world=groups.get("is_per_world") == self.per_world_value,
)

@property
@override
def _no_patterns_error(self):
return super()._no_patterns_error + " (check the pattern regex)"


class JsonPatternStubProps(BasePatternStubProps):
"""Fetches pattern info from a JSON file."""

type: Literal["json"]

@override
def _iter_patterns(self, props: Properties) -> Iterable[PatternInfo]:
data = self.path.read_bytes()
return TypeAdapter(list[PatternInfo]).validate_json(data)


PatternStubProps = RegexPatternStubProps | JsonPatternStubProps


class HexProperties(StripHiddenModel):
pattern_stubs: list[PatternStubProps]
pattern_stubs: list[PatternStubProps] = Field(default_factory=list)
allow_duplicates: bool = False


# conthext, perhaps
Expand Down Expand Up @@ -84,7 +172,7 @@ def _add_patterns_0_11(

# for each stub, load all the patterns in the file
for stub in self.hex_props.pattern_stubs:
for pattern in self._load_stub_patterns(loader.props, stub, per_world):
for pattern in stub.load_patterns(loader.props, per_world):
self._add_pattern(pattern, signatures)

def _add_patterns_0_10(
Expand All @@ -93,7 +181,7 @@ def _add_patterns_0_10(
props: Properties,
):
for stub in self.hex_props.pattern_stubs:
for pattern in self._load_stub_patterns(props, stub, None):
for pattern in stub.load_patterns(props, None):
self._add_pattern(pattern, signatures)

def _add_pattern(self, pattern: PatternInfo, signatures: dict[str, PatternInfo]):
Expand All @@ -103,47 +191,11 @@ def _add_pattern(self, pattern: PatternInfo, signatures: dict[str, PatternInfo])
if duplicate := (
self.patterns.get(pattern.id) or signatures.get(pattern.signature)
):
raise ValueError(f"Duplicate pattern {pattern.id}\n{pattern}\n{duplicate}")
message = f"pattern {pattern.id}\n{pattern}\n{duplicate}"
if self.hex_props.allow_duplicates:
logger.warning("Ignoring duplicate " + message)
return
raise ValueError("Duplicate" + message)

self.patterns[pattern.id] = pattern
signatures[pattern.signature] = pattern

def _load_stub_patterns(
self,
props: Properties,
stub: PatternStubProps,
per_world_tag: Tag | None,
):
# TODO: add Gradle task to generate json with this data. this is dumb and fragile.
logger.debug(f"Load pattern stub from {stub.path}")
stub_text = stub.path.read_text("utf-8")

patterns = list[PatternInfo]()

for match in stub.regex.finditer(stub_text):
groups = match.groupdict()
id = props.mod_loc(groups["name"])

if per_world_tag is not None:
is_per_world = id in per_world_tag.values
else:
is_per_world = groups.get("is_per_world") == stub.per_world_value

patterns.append(
PatternInfo(
id=id,
startdir=Direction[groups["startdir"]],
signature=groups["signature"],
is_per_world=is_per_world,
)
)

pretty_path = stub.path.resolve().relative_to(Path.cwd())

if stub.required and not patterns:
raise ValueError(
f"No patterns found in {pretty_path} (check the pattern regex)"
)

logger.info(f"Loaded {len(patterns)} patterns from {pretty_path}")
return patterns
5 changes: 2 additions & 3 deletions doc/src/hexdoc_hexcasting/utils/pattern.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from enum import Enum
from typing import Annotated, Any

from pydantic import BeforeValidator, PlainSerializer

from hexdoc.core import ResourceLocation
from hexdoc.model import HexdocModel
from pydantic import BeforeValidator, PlainSerializer


class Direction(Enum):
Expand Down Expand Up @@ -49,7 +48,7 @@ class RawPatternInfo(BasePatternInfo):
r: int | None = None


class PatternInfo(BasePatternInfo):
class PatternInfo(BasePatternInfo, extra="allow"):
"""Pattern info used and exported by hexdoc for lookups."""

id: ResourceLocation
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ enableExperimentalFeatures = true
# mostly we use strict mode
# but pyright doesn't allow decreasing error severity in strict mode
# so we need to manually specify all of the strict mode overrides so we can do that :/
typeCheckingMode = "basic"
typeCheckingMode = "standard"

strictDictionaryInference = true
strictListInference = true
Expand Down
Loading