Merge pull request #181 from duncathan/typing

typing fixes and schema validation
This commit is contained in:
duncathan salt 2024-12-18 10:32:06 -06:00 committed by GitHub
commit 140f892222
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 519 additions and 34 deletions

View file

@ -1,6 +1,8 @@
name: Python Package
on:
workflow_dispatch:
pull_request:
push:
branches:
- '*'
@ -8,6 +10,74 @@ on:
- '*'
jobs:
mypy-required:
runs-on: 'ubuntu-latest'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Workaround for worktree config
run: git config --unset-all extensions.worktreeConfig || true
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "pip"
- name: Install Python packages
run: |
python -m pip install -e .
python -m pip install mypy
- name: Mypy on required files
uses: tsuyoshicho/action-mypy@v5
with:
github_token: ${{ secrets.github_token }}
# Change reviewdog reporter if you need [github-pr-check,github-check,github-pr-review].
reporter: github-check
setup_method: nothing
target: --config-file=pyproject.toml
fail_on_error: true
install_types: false
mypy-modified:
runs-on: 'ubuntu-latest'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Workaround for worktree config
run: git config --unset-all extensions.worktreeConfig || true
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "pip"
- name: Install Python packages
run: |
python -m pip install -e .
python -m pip install mypy
- name: Mypy on modified files
uses: tsuyoshicho/action-mypy@v5
with:
github_token: ${{ secrets.github_token }}
# Change reviewdog reporter if you need [github-pr-check,github-check,github-pr-review].
reporter: github-check
setup_method: nothing
level: warning
fail_on_error: false
install_types: false
build:
runs-on: ubuntu-latest
name: Wheel
@ -22,7 +92,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.9"
python-version: "3.10"
- name: Install Python packages
run: python -m pip install --upgrade build pip
@ -42,7 +112,9 @@ jobs:
pypi:
runs-on: 'ubuntu-latest'
needs:
- mypy-required
- build
if: ${{ github.event_name != 'pull_request' }}
steps:
- name: Download all the dists

33
.github/workflows/workflow-validate.yml vendored Normal file
View file

@ -0,0 +1,33 @@
name: Check Workflow Files
on:
push:
branches:
- main
merge_group:
pull_request:
defaults:
run:
shell: bash
jobs:
actions:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
- name: Install action-validator with asdf
uses: asdf-vm/actions/install@v3
with:
tool_versions: |
action-validator 0.5.1
- name: Lint Actions
run: |
find .github/workflows -type f \( -iname \*.yaml -o -iname \*.yml \) \
| xargs -I {} action-validator --verbose {}

18
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,18 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.8.3
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
- id: ruff-format
- repo: https://github.com/henriquegemignani/jsonschema-to-typeddict
rev: v1.1.1
hooks:
- id: jsonschema-to-typeddict
files: caver/schema/schema.json
args: [ --output-path, caver/schema/types.py, --root-name, CaverData ]

View file

@ -1,26 +1,46 @@
from __future__ import annotations
import json
import logging
import platform as pl
import shutil
import sys
import textwrap
import typing
from enum import Enum
from pathlib import Path
from typing import Callable, Optional
from randovania_lupa import LuaRuntime # type: ignore
import logging
import shutil
import textwrap
import sys
import platform as pl
from uuid import UUID
import pre_edited_cs
from randovania_lupa import LuaRuntime # type: ignore
from caver.schema.validator_with_default import DefaultValidatingDraft7Validator
LuaFile = typing.Any
if typing.TYPE_CHECKING:
from collections.abc import Callable
from caver.schema import (
CaverData,
CaverdataMaps,
CaverdataOtherTsc,
EventNumber,
MapName,
)
CSVERSION = 5
class CaverException(Exception):
pass
class CSPlatform(Enum):
FREEWARE = "freeware"
TWEAKED = "tweaked"
def get_path() -> Path:
if getattr(sys, "frozen", False):
file_dir = Path(getattr(sys, "_MEIPASS"))
@ -28,34 +48,46 @@ def get_path() -> Path:
file_dir = Path(__file__).parent.parent
return file_dir.joinpath("caver")
def patch_files(patch_data: dict, output_dir: Path, platform: CSPlatform, progress_update: Callable[[str, float], None]):
def validate(patch_data: dict) -> None:
with Path(__file__).parent.joinpath("schema/schema.json").open() as f:
schema = json.load(f)
DefaultValidatingDraft7Validator(schema).validate(patch_data)
def patch_files(
patch_data: CaverData, output_dir: Path, platform: CSPlatform, progress_update: Callable[[str, float], None]
) -> None:
progress_update("Validating schema...", -1)
validate(typing.cast(dict, patch_data))
progress_update("Copying base files...", -1)
ensure_base_files_exist(platform, output_dir)
total = len(patch_data["maps"].keys()) + len(patch_data["other_tsc"].keys()) + 3
lua_file = get_path().joinpath("tsc_file.lua").read_text()
TscFile = LuaRuntime().execute(lua_file)
TscFile = typing.cast(LuaFile, LuaRuntime().execute(lua_file))
for i, (mapname, mapdata) in enumerate(patch_data["maps"].items()):
progress_update(f"Patching {mapname}...", i/total)
progress_update(f"Patching {mapname}...", i / total)
patch_map(mapname, mapdata, TscFile, output_dir)
for filename, scripts in patch_data["other_tsc"].items():
i += 1
progress_update(f"Patching {filename}.tsc...", i/total)
progress_update(f"Patching {filename}.tsc...", i / total)
patch_other(filename, scripts, TscFile, output_dir)
i += 1
progress_update("Copying MyChar...", i/total)
progress_update("Copying MyChar...", i / total)
patch_mychar(patch_data["mychar"], output_dir, platform is CSPlatform.TWEAKED)
i += 1
progress_update("Copying hash...", i/total)
progress_update("Copying hash...", i / total)
patch_hash(patch_data["hash"], output_dir)
i += 1
progress_update("Copying UUID...", i/total)
progress_update("Copying UUID...", i / total)
patch_uuid(patch_data["uuid"], output_dir)
if platform == CSPlatform.TWEAKED:
@ -64,27 +96,35 @@ def patch_files(patch_data: dict, output_dir: Path, platform: CSPlatform, progre
else:
output_dir.joinpath("CSTweaked").unlink()
def ensure_base_files_exist(platform: CSPlatform, output_dir: Path):
def ensure_base_files_exist(platform: CSPlatform, output_dir: Path) -> None:
internal_copy = pre_edited_cs.get_path()
version = output_dir.joinpath("data", "Stage", "_version.txt")
keep_existing_files = version.exists() and int(version.read_text()) >= CSVERSION
def should_ignore(path: str, names: list[str]):
def should_ignore(path: str, names: list[str]) -> list[str]:
base = ["__init__.py", "__pycache__", "ScriptSource", "__pyinstaller"]
if keep_existing_files:
p = Path(path)
base.extend([str(p.joinpath(name)) for name in names if p.joinpath(name).exists() and p.joinpath(name).is_file()])
base.extend(
[str(p.joinpath(name)) for name in names if p.joinpath(name).exists() and p.joinpath(name).is_file()]
)
return base
try:
shutil.copytree(internal_copy.joinpath(platform.value), output_dir, ignore=should_ignore, dirs_exist_ok=True)
shutil.copytree(internal_copy.joinpath("data"), output_dir.joinpath("data"), ignore=should_ignore, dirs_exist_ok=True)
shutil.copytree(
internal_copy.joinpath("data"), output_dir.joinpath("data"), ignore=should_ignore, dirs_exist_ok=True
)
except shutil.Error:
raise CaverException("Error copying base files. Ensure the directory is not read-only, and that Doukutsu.exe is closed")
raise CaverException(
"Error copying base files. Ensure the directory is not read-only, and that Doukutsu.exe is closed"
)
output_dir.joinpath("data", "Plaintext").mkdir(exist_ok=True)
def patch_map(mapname: str, mapdata: dict[str, dict], TscFile, output_dir: Path):
def patch_map(mapname: MapName, mapdata: CaverdataMaps, TscFile: LuaFile, output_dir: Path) -> None:
mappath = output_dir.joinpath("data", "Stage", f"{mapname}.tsc")
tsc_file = TscFile.new(TscFile, mappath.read_bytes(), logging.getLogger("caver"))
@ -95,7 +135,7 @@ def patch_map(mapname: str, mapdata: dict[str, dict], TscFile, output_dir: Path)
TscFile.placeSongAtCue(tsc_file, song["song_id"], event, song["original_id"], mapname)
for event, script in mapdata["entrances"].items():
needle = "<EVE...." # TODO: create a proper pattern
needle = "<EVE...." # TODO: create a proper pattern
TscFile.placeScriptAtEvent(tsc_file, script, event, mapname, needle)
for event, hint in mapdata["hints"].items():
@ -106,7 +146,10 @@ def patch_map(mapname: str, mapdata: dict[str, dict], TscFile, output_dir: Path)
mappath.write_bytes(bytes(chars))
output_dir.joinpath("data", "Plaintext", f"{mapname}.txt").write_text(TscFile.getPlaintext(tsc_file))
def patch_other(filename: str, scripts: dict[str, dict[str, str]], TscFile, output_dir: Path):
def patch_other(
filename: MapName, scripts: dict[EventNumber, CaverdataOtherTsc], TscFile: LuaFile, output_dir: Path
) -> None:
filepath = output_dir.joinpath("data", f"{filename}.tsc")
tsc_file = TscFile.new(TscFile, filepath.read_bytes(), logging.getLogger("caver"))
@ -117,7 +160,8 @@ def patch_other(filename: str, scripts: dict[str, dict[str, str]], TscFile, outp
filepath.write_bytes(bytes(chars))
output_dir.joinpath("data", "Plaintext", f"{filename}.txt").write_text(TscFile.getPlaintext(tsc_file))
def patch_mychar(mychar: Optional[str], output_dir: Path, add_upscale: bool):
def patch_mychar(mychar: str | None, output_dir: Path, add_upscale: bool) -> None:
if mychar is None:
return
mychar_img = Path(mychar).read_bytes()
@ -129,15 +173,18 @@ def patch_mychar(mychar: Optional[str], output_dir: Path, add_upscale: bool):
output_dir.joinpath("data", "sprites_up", "MyChar.bmp").write_bytes(mychar_up_img)
def patch_hash(hash: list[int], output_dir: Path):
def patch_hash(hash: list[int], output_dir: Path) -> None:
hash_strings = [f"{num:04d}" for num in hash]
hash_string = ",".join(hash_strings)
output_dir.joinpath("data", "hash.txt").write_text(hash_string)
def patch_uuid(uuid: str, output_dir: Path):
def patch_uuid(uuid: str, output_dir: Path) -> None:
uuid = f"{{{UUID(uuid)}}}"
output_dir.joinpath("data", "uuid.txt").write_text(uuid)
def wrap_msg_text(text: str, facepic: bool, *, ending: str = "<NOD", max_text_boxes: Optional[int] = 1) -> str:
def wrap_msg_text(text: str, facepic: bool, *, ending: str = "<NOD", max_text_boxes: int | None = 1) -> str:
hard_limit = 35
msgbox_limit = 26 if facepic else hard_limit
@ -145,19 +192,21 @@ def wrap_msg_text(text: str, facepic: bool, *, ending: str = "<NOD", max_text_bo
lines = textwrap.wrap(text, width=msgbox_limit, max_lines=max_lines)
text = ""
for i, l in enumerate(lines):
text += l
if i < len(lines)-1:
for i, line in enumerate(lines):
text += line
if i < len(lines) - 1:
if i % 3 == 2:
text += "<NOD"
if len(l) != hard_limit:
if len(line) != hard_limit:
text += "\r\n"
text += ending
return text
def create_hint_script(text: str, facepic: bool, ending: str) -> str:
"""
A desperate attempt to generate valid <MSG text. Fills one text box (up to three lines). Attempts to wrap words elegantly.
A desperate attempt to generate valid <MSG text.
Fills one text box (up to three lines). Attempts to wrap words elegantly.
"""
return f"<PRI<MSG<TUR{wrap_msg_text(text, facepic, ending=ending)}"

23
caver/schema/__init__.py Normal file
View file

@ -0,0 +1,23 @@
from caver.schema.types import (
CaverData,
CaverdataMaps,
CaverdataMapsHints,
CaverdataMapsMusic,
CaverdataOtherTsc,
EventNumber,
MapName,
TscScript,
TscValue,
)
__all__ = [
"CaverData",
"TscValue",
"EventNumber",
"MapName",
"TscScript",
"CaverdataMapsHints",
"CaverdataMapsMusic",
"CaverdataMaps",
"CaverdataOtherTsc",
]

195
caver/schema/schema.json Normal file
View file

@ -0,0 +1,195 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"$schema": {
"type": "string",
"format": "uri"
},
"maps": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/map_name"
},
"additionalProperties": {
"type": "object",
"properties": {
"pickups": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/event_number"
},
"additionalProperties": {
"$ref": "#/$defs/tsc_script"
},
"default": {}
},
"hints": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/event_number"
},
"additionalProperties": {
"type": "object",
"properties": {
"text": {
"type": "string"
},
"facepic": {
"$ref": "#/$defs/tsc_value",
"default": "0000"
},
"ending": {
"$ref": "#/$defs/tsc_script",
"default": "<END"
}
},
"required": [
"text",
"facepic",
"ending"
],
"additionalProperties": false
},
"default": {}
},
"music": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/event_number"
},
"additionalProperties": {
"type": "object",
"properties": {
"original_id": {
"$ref": "#/$defs/tsc_value"
},
"song_id": {
"$ref": "#/$defs/tsc_value"
}
},
"required": [
"original_id",
"song_id"
],
"additionalProperties": false
},
"default": {}
},
"entrances": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/event_number"
},
"additionalProperties": {
"$ref": "#/$defs/tsc_script"
},
"default": {}
}
},
"additionalProperties": false,
"required": [
"pickups",
"hints",
"music",
"entrances"
]
},
"default": {}
},
"other_tsc": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/map_name"
},
"additionalProperties": {
"type": "object",
"propertyNames": {
"$ref": "#/$defs/event_number"
},
"additionalProperties": {
"type": "object",
"properties": {
"needle": {
"type": "string",
"description": "Lua pattern to search for and replace within the event"
},
"script": {
"$ref": "#/$defs/tsc_script"
}
},
"required": [
"needle",
"script"
],
"additionalProperties": false
}
},
"default": {}
},
"mychar": {
"description": "A path to the mychar.bmp file to use (Optional)",
"anyOf": [
{
"type": "null"
},
{
"type": "string",
"pattern": "^.*\\.bmp$"
}
],
"default": null
},
"hash": {
"description": "An array of five item IDs to display on the title screen, within [1, 39]",
"type": "array",
"items": {
"type": "integer",
"minimum": 1,
"maximum": 39
},
"minItems": 5,
"maxItems": 5,
"default": [1,1,1,1,1]
},
"uuid": {
"description": "The UUID for the world in a multiworld session, or the empty UUID if single player.",
"type": "string",
"pattern": "^\\{[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\}$",
"default": "{00000000-0000-0000-0000-000000000000}"
},
"platform": {
"description": "Which supported platform to export to.",
"$comment": "Not actually used by the patcher.",
"type": "string",
"enum": [
"freeware",
"tweaked"
]
}
},
"required": [
"maps",
"other_tsc",
"mychar",
"hash",
"uuid"
],
"additionalProperties": false,
"$defs": {
"tsc_value": {
"type": "string",
"pattern": "^[ -~]{4}$"
},
"event_number": {
"$ref": "#/$defs/tsc_value",
"$comment": "Semantic alias for tsc_value"
},
"map_name": {
"type": "string"
},
"tsc_script": {
"type": "string"
}
}
}

54
caver/schema/types.py Normal file
View file

@ -0,0 +1,54 @@
# This file is generated. Manual changes will be lost
# fmt: off
# ruff: noqa
from __future__ import annotations
import typing_extensions as typ
# Definitions
TscValue: typ.TypeAlias = str
EventNumber: typ.TypeAlias = TscValue
MapName: typ.TypeAlias = str
TscScript: typ.TypeAlias = str
# Schema entries
@typ.final
class CaverdataMapsHints(typ.TypedDict):
text: str
facepic: TscValue
ending: TscScript
@typ.final
class CaverdataMapsMusic(typ.TypedDict):
original_id: TscValue
song_id: TscValue
@typ.final
class CaverdataMaps(typ.TypedDict):
pickups: dict[EventNumber, TscScript]
hints: dict[EventNumber, CaverdataMapsHints]
music: dict[EventNumber, CaverdataMapsMusic]
entrances: dict[EventNumber, TscScript]
@typ.final
class CaverdataOtherTsc(typ.TypedDict):
needle: str
script: TscScript
@typ.final
class Caverdata(typ.TypedDict):
maps: dict[MapName, CaverdataMaps]
other_tsc: dict[MapName, dict[EventNumber, CaverdataOtherTsc]]
mychar: None | str
hash: list[int]
uuid: str
platform: typ.NotRequired[str]
CaverData: typ.TypeAlias = Caverdata

View file

@ -0,0 +1,25 @@
from jsonschema import Draft7Validator, validators
def extend_with_default(validator_class):
validate_properties = validator_class.VALIDATORS["properties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.items():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
yield from validate_properties(
validator,
properties,
instance,
schema,
)
return validators.extend(
validator_class,
{"properties": set_defaults},
)
DefaultValidatingDraft7Validator = extend_with_default(Draft7Validator)

View file

@ -5,3 +5,18 @@ build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
local_scheme = "no-local-version"
write_to = "caver/version.py"
[tool.ruff]
line-length = 120
lint.select = ["E", "F", "W", "C90", "I", "UP"]
src = ["src"]
# Version to target for generated code.
target-version = "py310"
[tool.mypy]
files = [
"caver/patcher.py",
]
follow_imports = "silent"
disallow_untyped_defs = true

View file

@ -10,17 +10,18 @@ classifiers =
License :: OSI Approved :: zlib/libpng License
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Other Scripting Engines
[options]
packages = find:
install_requires =
randovania-lupa>=1.10
jsonschema>=4.0.0
include_package_data = True
zip_safe = False
python_requires = >=3.9
python_requires = >=3.10
#
# Entry Points for PyInstaller