Compare commits
No commits in common. "master" and "1.0.0" have entirely different histories.
32
.github/workflows/daily.yml
vendored
|
|
@ -1,32 +0,0 @@
|
|||
name: "MALCO's Daily Challenge"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '53 15 * * *' # 9:53 AM, UTC-6
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Build:
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: 'stable'
|
||||
- name: Install dependencies (Ubuntu)
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo add-apt-repository -y ppa:bartbes/love-stable
|
||||
sudo apt-get -q update
|
||||
sudo apt-get install -y xvfb love
|
||||
- name: Generate Daily
|
||||
env:
|
||||
WEBHOOK: ${{ secrets.WEBHOOK }}
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE
|
||||
sudo xvfb-run -a --server-args="-screen 0 1024x768x24" love src --daily
|
||||
cat daily.txt
|
||||
curl -H "Content-Type: application/json" -X POST -d @daily.txt "$WEBHOOK"
|
||||
78
.github/workflows/python.yml
vendored
|
|
@ -1,8 +1,6 @@
|
|||
name: Python Package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
|
|
@ -10,74 +8,6 @@ on:
|
|||
- '*'
|
||||
|
||||
jobs:
|
||||
mypy-required:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Workaround for worktree config
|
||||
run: git config --unset-all extensions.worktreeConfig || true
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install -e .
|
||||
python -m pip install mypy
|
||||
|
||||
- name: Mypy on required files
|
||||
uses: tsuyoshicho/action-mypy@v5
|
||||
with:
|
||||
github_token: ${{ secrets.github_token }}
|
||||
# Change reviewdog reporter if you need [github-pr-check,github-check,github-pr-review].
|
||||
reporter: github-check
|
||||
setup_method: nothing
|
||||
target: --config-file=pyproject.toml
|
||||
fail_on_error: true
|
||||
install_types: false
|
||||
|
||||
mypy-modified:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Workaround for worktree config
|
||||
run: git config --unset-all extensions.worktreeConfig || true
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install -e .
|
||||
python -m pip install mypy
|
||||
|
||||
- name: Mypy on modified files
|
||||
uses: tsuyoshicho/action-mypy@v5
|
||||
with:
|
||||
github_token: ${{ secrets.github_token }}
|
||||
# Change reviewdog reporter if you need [github-pr-check,github-check,github-pr-review].
|
||||
reporter: github-check
|
||||
setup_method: nothing
|
||||
level: warning
|
||||
fail_on_error: false
|
||||
install_types: false
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Wheel
|
||||
|
|
@ -92,7 +22,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install Python packages
|
||||
run: python -m pip install --upgrade build pip
|
||||
|
|
@ -104,7 +34,7 @@ jobs:
|
|||
run: python -m build --sdist
|
||||
|
||||
- name: Store the packages
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist
|
||||
|
|
@ -112,13 +42,11 @@ jobs:
|
|||
pypi:
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs:
|
||||
- mypy-required
|
||||
- build
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
|
|
|||
33
.github/workflows/workflow-validate.yml
vendored
|
|
@ -1,33 +0,0 @@
|
|||
name: Check Workflow Files
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
merge_group:
|
||||
pull_request:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
actions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
|
||||
- name: Install action-validator with asdf
|
||||
uses: asdf-vm/actions/install@v3
|
||||
with:
|
||||
tool_versions: |
|
||||
action-validator 0.5.1
|
||||
|
||||
- name: Lint Actions
|
||||
run: |
|
||||
find .github/workflows -type f \( -iname \*.yaml -o -iname \*.yml \) \
|
||||
| xargs -I {} action-validator --verbose {}
|
||||
17
.gitignore
vendored
|
|
@ -1,6 +1,12 @@
|
|||
data/*
|
||||
notes/*
|
||||
|
||||
pre_edited_cs/Doukutsu\.exe\.blbkp
|
||||
|
||||
pre_edited_cs/Profile*
|
||||
|
||||
pre_edited_cs/window\.rect
|
||||
|
||||
*Copy/
|
||||
|
||||
venv/
|
||||
|
|
@ -11,12 +17,5 @@ dist/
|
|||
|
||||
cave_story_randomizer.egg-info/
|
||||
|
||||
**/__pycache__
|
||||
|
||||
pre_edited_cs/**/Doukutsu\.exe\.blbkp
|
||||
pre_edited_cs/**/Profile*
|
||||
pre_edited_cs/**/window\.rect
|
||||
|
||||
pre_edited_cs/data/version.txt
|
||||
|
||||
pre_edited_cs/freeware/Doukutsu_backup.exe
|
||||
*/__pycache__
|
||||
caver/version.py
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.8.3
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix, --exit-non-zero-on-fix ]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/henriquegemignani/jsonschema-to-typeddict
|
||||
rev: v1.1.1
|
||||
hooks:
|
||||
- id: jsonschema-to-typeddict
|
||||
files: caver/schema/schema.json
|
||||
args: [ --output-path, caver/schema/types.py, --root-name, CaverData ]
|
||||
51
README.md
|
|
@ -1,3 +1,52 @@
|
|||
# Cave Story Randomizer [Open Mode]
|
||||
|
||||
A patcher for randomizing Cave Story. If you want to play, check out [Randovania](https://github.com/randovania/randovania)!
|
||||
The Cave Story Randomizer shuffles the location of every item in Cave Story, creating a new experience each time you play! The randomizer has logic in place to ensure that you can always reach every item and finish the game. Get started by heading to the [releases page](https://github.com/cave-story-randomizer/cave-story-randomizer/releases) and downloading the most recent version! If you find yourself stuck, wanting to talk about the hilarious location you found the panties in, or just plain enjoying the game please consider joining our [official Discord server](https://discord.gg/7zUdPEn) and hanging out!
|
||||
|
||||
## Main differences
|
||||
Note that there are a few key differences from the vanilla game in order to improve the playing experience:
|
||||
|
||||
- All 5 teleporter locations in Arthur's House are active from the beginning of the game
|
||||
- All other teleporters from the vanilla game are active and linked to one another at all times
|
||||
- A teleporter between Sand Zone (near the Storehouse) and Labyrinth I has been placed and can be activated in one of two ways:
|
||||
1. Defeating Toroko+
|
||||
2. Using the teleporter from the Labyrinth I side
|
||||
- Most cutscenes have been abridged or skipped entirely
|
||||
- Jellyfish Juice can be used an infinite number of times
|
||||
- You can carry as many as 5 puppies at once: Jenka will only accept them once you've collected all 5
|
||||
- By the way, all 5 puppies will be located somewhere in the Sand Zone
|
||||
- Certain items that are received from NPCs have been placed in chests:
|
||||
- Labyrinth B (Fallen Booster)
|
||||
- Labyrinth Shop
|
||||
- One requiring the Machine Gun to open
|
||||
- One requiring the Fireball to open
|
||||
- One requiring the Spur to open
|
||||
- Jail no. 1
|
||||
- Storage? (Ma Pignon)
|
||||
- This chest requires saving Curly in the Waterway to open
|
||||
- If you don't have Curly's Air Tank after defeating the Core, the water will not rise and you may leave without dying
|
||||
- Curly cannot be left behind permanently in the Core; the shutter will never close once the boss has been defeated
|
||||
- The jump in the Waterway to save Curly has been made much easier
|
||||
- Ironhead will always give you his item on defeat (but there's still a special surprise if you defeat him without taking damage!)
|
||||
- Kazuma will only open the door between Egg no. 0 and the Outer Wall if you save him in Grasstown
|
||||
- Kazuma's door can be blown down from both the outside and the inside
|
||||
- Entering the Throne Room to complete the game requires doing three things:
|
||||
1. Saving Sue in the Egg Corridor
|
||||
2. Obtaining the Booster 2.0
|
||||
3. Obtaining the Iron Bond
|
||||
|
||||
## Help me!
|
||||
If you find yourself stuck, here are a few common pitfalls:
|
||||
- Remember that the Jellyfish Juice can quench more than one fireplace
|
||||
- The Graveyard can only be accessed if you obtain the Silver Locket and see Toroko get kidnapped
|
||||
- The Hermit Gunsmith will wake up and give you an item if you defeat the Core and show him his gun
|
||||
- The western side of the Labyrinth can be accessed without flight if you defeat Toroko+
|
||||
- The Plantation can be accessed without the Teleporter Room Key if you save Kazuma and teleport in or climb the Outer Wall
|
||||
- The Waterway can be accessed without the Cure-All by using the teleporter in the Labyrinth Shop
|
||||
- There may be a required item in the Last Cave (Hidden) as a reward for defeating the Red Demon
|
||||
|
||||
If you're still stuck, join our [official Discord server](https://discord.gg/7zUdPEn) and ask for help in there!
|
||||
|
||||
## Credits
|
||||
- Original Cave Story Randomizer by shru: https://shru.itch.io/cave-story-randomizer
|
||||
- Font: https://datagoblin.itch.io/monogram
|
||||
- Icon: Bubbler (@Ethan#6397)
|
||||
|
|
|
|||
182
caver/patcher.py
|
|
@ -1,44 +1,19 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import platform as pl
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
import typing
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from uuid import UUID
|
||||
from typing import Callable, Optional
|
||||
from lupa import LuaRuntime
|
||||
import logging
|
||||
import shutil
|
||||
import textwrap
|
||||
import sys
|
||||
|
||||
import pre_edited_cs
|
||||
from randovania_lupa import LuaRuntime # type: ignore
|
||||
|
||||
from caver.schema.validator_with_default import DefaultValidatingDraft7Validator
|
||||
|
||||
LuaFile = typing.Any
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from caver.schema import (
|
||||
CaverData,
|
||||
CaverdataMaps,
|
||||
CaverdataOtherTsc,
|
||||
EventNumber,
|
||||
MapName,
|
||||
)
|
||||
|
||||
CSVERSION = 4
|
||||
|
||||
class CaverException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CSPlatform(Enum):
|
||||
FREEWARE = "freeware"
|
||||
TWEAKED = "tweaked"
|
||||
|
||||
|
||||
def get_path() -> Path:
|
||||
if getattr(sys, "frozen", False):
|
||||
file_dir = Path(getattr(sys, "_MEIPASS"))
|
||||
|
|
@ -46,104 +21,65 @@ def get_path() -> Path:
|
|||
file_dir = Path(__file__).parent.parent
|
||||
return file_dir.joinpath("caver")
|
||||
|
||||
|
||||
def validate(patch_data: dict) -> None:
|
||||
with Path(__file__).parent.joinpath("schema/schema.json").open() as f:
|
||||
schema = json.load(f)
|
||||
DefaultValidatingDraft7Validator(schema).validate(patch_data)
|
||||
|
||||
|
||||
def patch_files(
|
||||
patch_data: CaverData, output_dir: Path, platform: CSPlatform, progress_update: Callable[[str, float], None]
|
||||
) -> None:
|
||||
progress_update("Validating schema...", -1)
|
||||
validate(typing.cast(dict, patch_data))
|
||||
|
||||
def patch_files(patch_data: dict, output_dir: Path, progress_update: Callable[[str, float], None]):
|
||||
progress_update("Copying base files...", -1)
|
||||
ensure_base_files_exist(platform, output_dir)
|
||||
ensure_base_files_exist(output_dir)
|
||||
|
||||
total = len(patch_data["maps"].keys()) + len(patch_data["other_tsc"].keys()) + 3
|
||||
total = len(patch_data["maps"].keys()) + len(patch_data["other_tsc"].keys()) + 2
|
||||
|
||||
lua_file = get_path().joinpath("tsc_file.lua").read_text()
|
||||
TscFile = typing.cast(LuaFile, LuaRuntime().execute(lua_file))
|
||||
TscFile = LuaRuntime().execute(lua_file)
|
||||
|
||||
for i, (mapname, mapdata) in enumerate(patch_data["maps"].items()):
|
||||
progress_update(f"Patching {mapname}...", i / total)
|
||||
progress_update(f"Patching {mapname}...", i/total)
|
||||
patch_map(mapname, mapdata, TscFile, output_dir)
|
||||
|
||||
|
||||
for filename, scripts in patch_data["other_tsc"].items():
|
||||
i += 1
|
||||
progress_update(f"Patching {filename}.tsc...", i / total)
|
||||
progress_update(f"Patching {filename}.tsc...", i/total)
|
||||
patch_other(filename, scripts, TscFile, output_dir)
|
||||
|
||||
i += 1
|
||||
progress_update("Copying MyChar...", i / total)
|
||||
patch_mychar(patch_data["mychar"], output_dir, platform is CSPlatform.TWEAKED)
|
||||
progress_update("Copying MyChar...", i/total)
|
||||
patch_mychar(patch_data["mychar"], output_dir)
|
||||
|
||||
i += 1
|
||||
progress_update("Copying hash...", i / total)
|
||||
progress_update("Copying hash...", i/total)
|
||||
patch_hash(patch_data["hash"], output_dir)
|
||||
|
||||
i += 1
|
||||
progress_update("Copying UUID...", i / total)
|
||||
patch_uuid(patch_data["uuid"], output_dir)
|
||||
|
||||
if platform == CSPlatform.TWEAKED:
|
||||
if pl.system() == "Linux":
|
||||
output_dir.joinpath("CSTweaked.exe").unlink()
|
||||
else:
|
||||
output_dir.joinpath("CSTweaked").unlink()
|
||||
|
||||
|
||||
def ensure_base_files_exist(platform: CSPlatform, output_dir: Path) -> None:
|
||||
def ensure_base_files_exist(output_dir: Path):
|
||||
internal_copy = pre_edited_cs.get_path()
|
||||
|
||||
with internal_copy.joinpath("data", "version.txt").open() as version_file:
|
||||
latest_version = version_file.readline()
|
||||
version = output_dir.joinpath("data", "Stage", "_version.txt")
|
||||
keep_existing_files = version.exists() and int(version.read_text()) >= CSVERSION
|
||||
|
||||
version = output_dir.joinpath("data", "version.txt")
|
||||
current_version = "v0.0.0.0"
|
||||
if version.exists():
|
||||
with version.open() as version_file:
|
||||
current_version = version_file.readline()
|
||||
|
||||
keep_existing_files = current_version >= latest_version
|
||||
|
||||
def should_ignore(path: str, names: list[str]) -> list[str]:
|
||||
def should_ignore(path: str, names: list[str]):
|
||||
base = ["__init__.py", "__pycache__", "ScriptSource", "__pyinstaller"]
|
||||
if keep_existing_files:
|
||||
p = Path(path)
|
||||
base.extend(
|
||||
[str(p.joinpath(name)) for name in names if p.joinpath(name).exists() and p.joinpath(name).is_file()]
|
||||
)
|
||||
base.extend([p.joinpath(name) for name in names if p.joinpath(name).exists() and p.joinpath(name).is_file()])
|
||||
return base
|
||||
|
||||
|
||||
try:
|
||||
shutil.copytree(internal_copy.joinpath(platform.value), output_dir, ignore=should_ignore, dirs_exist_ok=True)
|
||||
shutil.copytree(
|
||||
internal_copy.joinpath("data"), output_dir.joinpath("data"), ignore=should_ignore, dirs_exist_ok=True
|
||||
)
|
||||
shutil.copytree(internal_copy, output_dir, ignore=should_ignore, dirs_exist_ok=True)
|
||||
except shutil.Error:
|
||||
raise CaverException(
|
||||
"Error copying base files. Ensure the directory is not read-only, and that Doukutsu.exe is closed"
|
||||
)
|
||||
raise CaverException("Error copying base files. Ensure the directory is not read-only, and that Doukutsu.exe is closed")
|
||||
output_dir.joinpath("data", "Plaintext").mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def patch_map(mapname: MapName, mapdata: CaverdataMaps, TscFile: LuaFile, output_dir: Path) -> None:
|
||||
def patch_map(mapname: str, mapdata: dict[str, dict], TscFile, output_dir: Path):
|
||||
mappath = output_dir.joinpath("data", "Stage", f"{mapname}.tsc")
|
||||
tsc_file = TscFile.new(TscFile, mappath.read_bytes(), logging.getLogger("caver"))
|
||||
|
||||
for event, script in mapdata["pickups"].items():
|
||||
TscFile.placeScriptAtEvent(tsc_file, script, event, mapname)
|
||||
|
||||
|
||||
for event, song in mapdata["music"].items():
|
||||
TscFile.placeSongAtCue(tsc_file, song["song_id"], event, song["original_id"], mapname)
|
||||
|
||||
|
||||
for event, script in mapdata["entrances"].items():
|
||||
needle = "<EVE...." # TODO: create a proper pattern
|
||||
needle = "<EVE...." # TODO: create a proper pattern
|
||||
TscFile.placeScriptAtEvent(tsc_file, script, event, mapname, needle)
|
||||
|
||||
|
||||
for event, hint in mapdata["hints"].items():
|
||||
script = create_hint_script(hint["text"], hint.get("facepic", "0000") != "0000", hint.get("ending", "<END"))
|
||||
TscFile.placeScriptAtEvent(tsc_file, script, event, mapname)
|
||||
|
|
@ -152,67 +88,39 @@ def patch_map(mapname: MapName, mapdata: CaverdataMaps, TscFile: LuaFile, output
|
|||
mappath.write_bytes(bytes(chars))
|
||||
output_dir.joinpath("data", "Plaintext", f"{mapname}.txt").write_text(TscFile.getPlaintext(tsc_file))
|
||||
|
||||
|
||||
def patch_other(
|
||||
filename: MapName, scripts: dict[EventNumber, CaverdataOtherTsc], TscFile: LuaFile, output_dir: Path
|
||||
) -> None:
|
||||
def patch_other(filename: str, scripts: dict[str, dict[str, str]], TscFile, output_dir: Path):
|
||||
filepath = output_dir.joinpath("data", f"{filename}.tsc")
|
||||
tsc_file = TscFile.new(TscFile, filepath.read_bytes(), logging.getLogger("caver"))
|
||||
|
||||
for event, script in scripts.items():
|
||||
TscFile.placeScriptAtEvent(tsc_file, script["script"], event, filename, script.get("needle", "<EVE...."))
|
||||
|
||||
|
||||
chars = TscFile.getText(tsc_file).values()
|
||||
filepath.write_bytes(bytes(chars))
|
||||
output_dir.joinpath("data", "Plaintext", f"{filename}.txt").write_text(TscFile.getPlaintext(tsc_file))
|
||||
|
||||
|
||||
def patch_mychar(mychar: str | None, output_dir: Path, add_upscale: bool) -> None:
|
||||
def patch_mychar(mychar: Optional[str], output_dir: Path):
|
||||
if mychar is None:
|
||||
return
|
||||
mychar_img = Path(mychar).read_bytes()
|
||||
output_dir.joinpath("data", "MyChar.bmp").write_bytes(mychar_img)
|
||||
|
||||
if add_upscale:
|
||||
mychar_name = Path(mychar).name
|
||||
mychar_up_img = Path(mychar).parent.joinpath("2x", mychar_name).read_bytes()
|
||||
output_dir.joinpath("data", "sprites_up", "MyChar.bmp").write_bytes(mychar_up_img)
|
||||
|
||||
|
||||
def patch_hash(hash: list[int], output_dir: Path) -> None:
|
||||
def patch_hash(hash: list[int], output_dir: Path):
|
||||
hash_strings = [f"{num:04d}" for num in hash]
|
||||
hash_string = ",".join(hash_strings)
|
||||
output_dir.joinpath("data", "hash.txt").write_text(hash_string)
|
||||
|
||||
|
||||
def patch_uuid(uuid: str, output_dir: Path) -> None:
|
||||
uuid = f"{{{UUID(uuid)}}}"
|
||||
output_dir.joinpath("data", "uuid.txt").write_text(uuid)
|
||||
|
||||
|
||||
def wrap_msg_text(text: str, facepic: bool, *, ending: str = "<NOD", max_text_boxes: int | None = 1) -> str:
|
||||
hard_limit = 35
|
||||
msgbox_limit = 26 if facepic else hard_limit
|
||||
|
||||
max_lines = max_text_boxes * 3 if max_text_boxes is not None else None
|
||||
lines = textwrap.wrap(text, width=msgbox_limit, max_lines=max_lines)
|
||||
|
||||
text = ""
|
||||
for i, line in enumerate(lines):
|
||||
text += line
|
||||
if i < len(lines) - 1:
|
||||
if i % 3 == 2:
|
||||
text += "<NOD"
|
||||
if len(line) != hard_limit:
|
||||
text += "\r\n"
|
||||
text += ending
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def create_hint_script(text: str, facepic: bool, ending: str) -> str:
|
||||
"""
|
||||
A desperate attempt to generate valid <MSG text.
|
||||
Fills one text box (up to three lines). Attempts to wrap words elegantly.
|
||||
A desperate attempt to generate valid <MSG text. Fills one text box (up to three lines). Attempts to wrap words elegantly.
|
||||
"""
|
||||
return f"<PRI<MSG<TUR{wrap_msg_text(text, facepic, ending=ending)}"
|
||||
hard_limit = 35
|
||||
msgbox_limit = 26 if facepic else hard_limit
|
||||
|
||||
lines = textwrap.wrap(text, width=msgbox_limit, max_lines=3)
|
||||
text = ""
|
||||
for i, l in enumerate(lines):
|
||||
text += l
|
||||
if len(l) != hard_limit and i < len(lines)-1:
|
||||
text += "\r\n"
|
||||
return f"<PRI<MSG<TUR{text}<NOD{ending}"
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
from caver.schema.types import (
|
||||
CaverData,
|
||||
CaverdataMaps,
|
||||
CaverdataMapsHints,
|
||||
CaverdataMapsMusic,
|
||||
CaverdataOtherTsc,
|
||||
EventNumber,
|
||||
MapName,
|
||||
TscScript,
|
||||
TscValue,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"CaverData",
|
||||
"TscValue",
|
||||
"EventNumber",
|
||||
"MapName",
|
||||
"TscScript",
|
||||
"CaverdataMapsHints",
|
||||
"CaverdataMapsMusic",
|
||||
"CaverdataMaps",
|
||||
"CaverdataOtherTsc",
|
||||
]
|
||||
|
|
@ -1,195 +0,0 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"maps": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/map_name"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pickups": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/event_number"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/tsc_script"
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"hints": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/event_number"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"text": {
|
||||
"type": "string"
|
||||
},
|
||||
"facepic": {
|
||||
"$ref": "#/$defs/tsc_value",
|
||||
"default": "0000"
|
||||
},
|
||||
"ending": {
|
||||
"$ref": "#/$defs/tsc_script",
|
||||
"default": "<END"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text",
|
||||
"facepic",
|
||||
"ending"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"music": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/event_number"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"original_id": {
|
||||
"$ref": "#/$defs/tsc_value"
|
||||
},
|
||||
"song_id": {
|
||||
"$ref": "#/$defs/tsc_value"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"original_id",
|
||||
"song_id"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"entrances": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/event_number"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/tsc_script"
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"pickups",
|
||||
"hints",
|
||||
"music",
|
||||
"entrances"
|
||||
]
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"other_tsc": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/map_name"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"$ref": "#/$defs/event_number"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"needle": {
|
||||
"type": "string",
|
||||
"description": "Lua pattern to search for and replace within the event"
|
||||
},
|
||||
"script": {
|
||||
"$ref": "#/$defs/tsc_script"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"needle",
|
||||
"script"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"mychar": {
|
||||
"description": "A path to the mychar.bmp file to use (Optional)",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "^.*\\.bmp$"
|
||||
}
|
||||
],
|
||||
"default": null
|
||||
},
|
||||
"hash": {
|
||||
"description": "An array of five item IDs to display on the title screen, within [1, 39]",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"maximum": 39
|
||||
},
|
||||
"minItems": 5,
|
||||
"maxItems": 5,
|
||||
"default": [1,1,1,1,1]
|
||||
},
|
||||
"uuid": {
|
||||
"description": "The UUID for the world in a multiworld session, or the empty UUID if single player.",
|
||||
"type": "string",
|
||||
"pattern": "^\\{[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\}$",
|
||||
"default": "{00000000-0000-0000-0000-000000000000}"
|
||||
},
|
||||
"platform": {
|
||||
"description": "Which supported platform to export to.",
|
||||
"$comment": "Not actually used by the patcher.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"freeware",
|
||||
"tweaked"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"maps",
|
||||
"other_tsc",
|
||||
"mychar",
|
||||
"hash",
|
||||
"uuid"
|
||||
],
|
||||
"additionalProperties": false,
|
||||
"$defs": {
|
||||
"tsc_value": {
|
||||
"type": "string",
|
||||
"pattern": "^[ -~]{4}$"
|
||||
},
|
||||
"event_number": {
|
||||
"$ref": "#/$defs/tsc_value",
|
||||
"$comment": "Semantic alias for tsc_value"
|
||||
},
|
||||
"map_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"tsc_script": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
# This file is generated. Manual changes will be lost
|
||||
# fmt: off
|
||||
# ruff: noqa
|
||||
from __future__ import annotations
|
||||
|
||||
import typing_extensions as typ
|
||||
|
||||
|
||||
# Definitions
|
||||
TscValue: typ.TypeAlias = str
|
||||
EventNumber: typ.TypeAlias = TscValue
|
||||
MapName: typ.TypeAlias = str
|
||||
TscScript: typ.TypeAlias = str
|
||||
|
||||
|
||||
# Schema entries
|
||||
@typ.final
|
||||
class CaverdataMapsHints(typ.TypedDict):
|
||||
text: str
|
||||
facepic: TscValue
|
||||
ending: TscScript
|
||||
|
||||
|
||||
@typ.final
|
||||
class CaverdataMapsMusic(typ.TypedDict):
|
||||
original_id: TscValue
|
||||
song_id: TscValue
|
||||
|
||||
|
||||
@typ.final
|
||||
class CaverdataMaps(typ.TypedDict):
|
||||
pickups: dict[EventNumber, TscScript]
|
||||
hints: dict[EventNumber, CaverdataMapsHints]
|
||||
music: dict[EventNumber, CaverdataMapsMusic]
|
||||
entrances: dict[EventNumber, TscScript]
|
||||
|
||||
|
||||
@typ.final
|
||||
class CaverdataOtherTsc(typ.TypedDict):
|
||||
needle: str
|
||||
script: TscScript
|
||||
|
||||
|
||||
|
||||
@typ.final
|
||||
class Caverdata(typ.TypedDict):
|
||||
maps: dict[MapName, CaverdataMaps]
|
||||
other_tsc: dict[MapName, dict[EventNumber, CaverdataOtherTsc]]
|
||||
mychar: None | str
|
||||
hash: list[int]
|
||||
uuid: str
|
||||
platform: typ.NotRequired[str]
|
||||
|
||||
CaverData: typ.TypeAlias = Caverdata
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
from jsonschema import Draft7Validator, validators
|
||||
|
||||
|
||||
def extend_with_default(validator_class):
|
||||
validate_properties = validator_class.VALIDATORS["properties"]
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for property, subschema in properties.items():
|
||||
if "default" in subschema:
|
||||
instance.setdefault(property, subschema["default"])
|
||||
|
||||
yield from validate_properties(
|
||||
validator,
|
||||
properties,
|
||||
instance,
|
||||
schema,
|
||||
)
|
||||
|
||||
return validators.extend(
|
||||
validator_class,
|
||||
{"properties": set_defaults},
|
||||
)
|
||||
|
||||
|
||||
DefaultValidatingDraft7Validator = extend_with_default(Draft7Validator)
|
||||
|
|
@ -112,7 +112,7 @@ function TscFile:_getLabelPositionRange(label)
|
|||
end
|
||||
|
||||
if labelStart == nil then
|
||||
error(("Could not find label: %s"):format(label))
|
||||
error(("%s: Could not find label: %s"):format(self.mapName, label))
|
||||
labelStart = 1
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -41,8 +41,6 @@ xor esi, esi
|
|||
:$LL4@RestoreStr
|
||||
; Line 153
|
||||
mov eax, [Strip_addr]
|
||||
test eax, eax
|
||||
je :return
|
||||
test [edi+eax], 128 ; 00000080H
|
||||
je :$LN2@RestoreStr
|
||||
; Line 155
|
||||
|
|
@ -79,8 +77,7 @@ add edi, 50 ; 00000050H
|
|||
cmp esi, MAX_STRIPx10 ; 000007f0H
|
||||
jl :$LL4@RestoreStr
|
||||
; Line 164
|
||||
:return
|
||||
pop edi
|
||||
pop esi
|
||||
leave
|
||||
retn
|
||||
retn
|
||||
|
|
|
|||
|
|
@ -1,4 +0,0 @@
|
|||
0x40CD91
|
||||
03
|
||||
0x40CDC8
|
||||
03
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
0x40149E
|
||||
8B 4D F8 49 79 01 41 B8 68 9C 49 00 31 D2 39 10
|
||||
7D 02 89 08 39 08 7E 14 89 10 90 90 90 90 90 90
|
||||
90 90 90 90 90 90 90 90 90 90 90 90
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
0x41DB17
|
||||
95 EA
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
this file isn't a real hex patch; it's a copy and paste of peri's discord message on 5 dec 2024:
|
||||
|
||||
---
|
||||
|
||||
There is an undocumented hex edit to increase the size of the Casts.bmp surface (0x411569: F0 00 -> E0 01, which I presume we just used BL's hacks/intro/surface_bitmap_sizes.xml here);
|
||||
|
||||
There is an undocumented hex edit to increase the TSC buffer size (0x421545: 00 50 00 -> 20 A1 07, again this was probably from the BL hack and we never wrote it down);
|
||||
|
||||
Randomly the byte at 0x414B24 was changed from 0x80 to 0x5F, which is the number of bytes to memset when initializing the map flag array. Not sure why this was changed, but the multiworld DLL overwrites this function anyways so it doesn't really make a difference (anymore).
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
; Reads version number from data\version.txt
|
||||
; The file must start with the version string in the format shown at the bottom of this file
|
||||
; Made by periwinkle
|
||||
|
||||
#define
|
||||
gDataPath=49E220
|
||||
gVersionString=48C2BC
|
||||
sprintf=481010
|
||||
fopen=480FFD
|
||||
rb=48C28C
|
||||
fread=480F55
|
||||
fclose=480E1B
|
||||
sscanf=4817E8
|
||||
call VirtualProtect=data ff 15 84 c0 48 00
|
||||
filePath=[ebp-114]
|
||||
bufchk=[ebp-10]
|
||||
tmp=[ebp-c]
|
||||
oldProtect=[ebp-8]
|
||||
fp=[ebp-4]
|
||||
v1=[ebp+8]
|
||||
v2=[ebp+c]
|
||||
v3=[ebp+10]
|
||||
v4=[ebp+14]
|
||||
#enddefine
|
||||
|
||||
offset 410990 ; GetCompileVersion
|
||||
push ebp
|
||||
mov ebp, esp
|
||||
sub esp, 114
|
||||
mov eax, [498B20]
|
||||
mov bufchk, eax
|
||||
|
||||
; Get path to data\version.txt file
|
||||
push gDataPath
|
||||
push :VersionPath ; "%s\version.txt"
|
||||
lea edx, filePath
|
||||
push edx
|
||||
call sprintf
|
||||
add esp, c
|
||||
|
||||
; Open file
|
||||
push rb ; "rb"
|
||||
lea ecx, filePath
|
||||
push ecx
|
||||
call fopen
|
||||
add esp, 8
|
||||
test eax, eax
|
||||
jz :ReadVersion
|
||||
mov fp, eax
|
||||
|
||||
; Mark gVersionString as read/write (it's normally in a read-only segment)
|
||||
lea eax, oldProtect
|
||||
push eax
|
||||
push 4 ; PAGE_READWRITE
|
||||
push 40 ; Max size of gVersionString (including null character)
|
||||
push gVersionString
|
||||
call VirtualProtect
|
||||
test eax, eax
|
||||
jz :CloseFile
|
||||
|
||||
; Read contents of file into gVersionString
|
||||
push fp
|
||||
|
||||
push 3F ; count (excluding null character)
|
||||
push 1 ; size
|
||||
push gVersionString
|
||||
call fread
|
||||
add esp, 10
|
||||
mov byte [eax+gVersionString], 0 ; Write null terminator
|
||||
|
||||
; Restore previous protection status
|
||||
lea eax, tmp
|
||||
push eax
|
||||
push oldProtect
|
||||
push 40
|
||||
push gVersionString
|
||||
call VirtualProtect
|
||||
|
||||
:CloseFile
|
||||
push fp
|
||||
call fclose
|
||||
pop ecx
|
||||
|
||||
:ReadVersion
|
||||
; Parse the version string
|
||||
push v4
|
||||
push v3
|
||||
push v2
|
||||
push v1
|
||||
push :VersionString
|
||||
push gVersionString
|
||||
call sscanf
|
||||
add esp, 18
|
||||
|
||||
mov ecx, bufchk
|
||||
call 480DC1
|
||||
leave
|
||||
retn
|
||||
|
||||
:VersionPath
|
||||
data 25 73 5C 76 65 72 73 69 6F 6E 2E 74 78 74 00 00 ; "%s\version.txt"
|
||||
:VersionString
|
||||
data 76 25 64 2E 25 64 2E 25 64 2E 25 64 00 ; "v%d.%d.%d.%d"
|
||||
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 69 KiB After Width: | Height: | Size: 69 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 134 B After Width: | Height: | Size: 134 B |
|
Before Width: | Height: | Size: 180 KiB After Width: | Height: | Size: 180 KiB |
|
Before Width: | Height: | Size: 638 B After Width: | Height: | Size: 638 B |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 5.1 KiB After Width: | Height: | Size: 5.1 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
|
Before Width: | Height: | Size: 25 KiB After Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 7.6 KiB After Width: | Height: | Size: 7.6 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 76 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |