Skip to content

Commit

Permalink
Merge branch 'main' into instruction_patch_kdl3
Browse files Browse the repository at this point in the history
  • Loading branch information
Silvris authored May 20, 2024
2 parents f26a53b + fe7bc87 commit d5a0ef4
Show file tree
Hide file tree
Showing 55 changed files with 7,001 additions and 874 deletions.
8 changes: 8 additions & 0 deletions AHITClient.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from worlds.ahit.Client import launch
import Utils
import ModuleUpdate
ModuleUpdate.update()

if __name__ == "__main__":
Utils.init_logging("AHITClient", exception_logger="Client")
launch()
7 changes: 4 additions & 3 deletions MultiServer.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ def init_save(self, enabled: bool = True):
self.logger.exception(e)
self._start_async_saving()

def _start_async_saving(self):
def _start_async_saving(self, atexit_save: bool = True):
if not self.auto_saver_thread:
def save_regularly():
# time.time() is platform dependent, so using the expensive datetime method instead
Expand All @@ -532,8 +532,9 @@ def get_datetime_second():
self.auto_saver_thread = threading.Thread(target=save_regularly, daemon=True)
self.auto_saver_thread.start()

import atexit
atexit.register(self._save, True) # make sure we save on exit too
if atexit_save:
import atexit
atexit.register(self._save, True) # make sure we save on exit too

def get_save(self) -> dict:
self.recheck_hints()
Expand Down
1 change: 1 addition & 0 deletions Options.py
Original file line number Diff line number Diff line change
Expand Up @@ -746,6 +746,7 @@ def from_text(cls, text: str) -> Range:

class FreezeValidKeys(AssembleOptions):
def __new__(mcs, name, bases, attrs):
assert not "_valid_keys" in attrs, "'_valid_keys' gets set by FreezeValidKeys, define 'valid_keys' instead."
if "valid_keys" in attrs:
attrs["_valid_keys"] = frozenset(attrs["valid_keys"])
return super(FreezeValidKeys, mcs).__new__(mcs, name, bases, attrs)
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ Currently, the following games are supported:
* Yoshi's Island
* Mario & Luigi: Superstar Saga
* Bomb Rush Cyberfunk
* Aquaria
* Yu-Gi-Oh! Ultimate Masters: World Championship Tournament 2006
* A Hat in Time

For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
Expand Down
10 changes: 9 additions & 1 deletion WebHost.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def create_ordered_tutorials_file() -> typing.List[typing.Dict[str, typing.Any]]
logging.basicConfig(format='[%(asctime)s] %(message)s', level=logging.INFO)

from WebHostLib.lttpsprites import update_sprites_lttp
from WebHostLib.autolauncher import autohost, autogen
from WebHostLib.autolauncher import autohost, autogen, stop
from WebHostLib.options import create as create_options_files

try:
Expand All @@ -138,3 +138,11 @@ def create_ordered_tutorials_file() -> typing.List[typing.Dict[str, typing.Any]]
else:
from waitress import serve
serve(app, port=app.config["PORT"], threads=app.config["WAITRESS_THREADS"])
else:
from time import sleep
try:
while True:
sleep(1) # wait for process to be killed
except (SystemExit, KeyboardInterrupt):
pass
stop() # stop worker threads
30 changes: 19 additions & 11 deletions WebHostLib/autolauncher.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,26 @@
import json
import logging
import multiprocessing
import time
import typing
from uuid import UUID
from datetime import timedelta, datetime
from threading import Event, Thread
from uuid import UUID

from pony.orm import db_session, select, commit

from Utils import restricted_loads
from .locker import Locker, AlreadyRunningException

_stop_event = Event()


def stop():
"""Stops previously launched threads"""
global _stop_event
stop_event = _stop_event
_stop_event = Event() # new event for new threads
stop_event.set()


def handle_generation_success(seed_id):
logging.info(f"Generation finished for seed {seed_id}")
Expand Down Expand Up @@ -63,6 +73,7 @@ def cleanup():

def autohost(config: dict):
def keep_running():
stop_event = _stop_event
try:
with Locker("autohost"):
cleanup()
Expand All @@ -72,26 +83,25 @@ def keep_running():
hosters.append(hoster)
hoster.start()

while 1:
time.sleep(0.1)
while not stop_event.wait(0.1):
with db_session:
rooms = select(
room for room in Room if
room.last_activity >= datetime.utcnow() - timedelta(days=3))
for room in rooms:
# we have to filter twice, as the per-room timeout can't currently be PonyORM transpiled.
if room.last_activity >= datetime.utcnow() - timedelta(seconds=room.timeout):
if room.last_activity >= datetime.utcnow() - timedelta(seconds=room.timeout + 5):
hosters[room.id.int % len(hosters)].start_room(room.id)

except AlreadyRunningException:
logging.info("Autohost reports as already running, not starting another.")

import threading
threading.Thread(target=keep_running, name="AP_Autohost").start()
Thread(target=keep_running, name="AP_Autohost").start()


def autogen(config: dict):
def keep_running():
stop_event = _stop_event
try:
with Locker("autogen"):

Expand All @@ -112,8 +122,7 @@ def keep_running():
commit()
select(generation for generation in Generation if generation.state == STATE_ERROR).delete()

while 1:
time.sleep(0.1)
while not stop_event.wait(0.1):
with db_session:
# for update locks the database row(s) during transaction, preventing writes from elsewhere
to_start = select(
Expand All @@ -124,8 +133,7 @@ def keep_running():
except AlreadyRunningException:
logging.info("Autogen reports as already running, not starting another.")

import threading
threading.Thread(target=keep_running, name="AP_Autogen").start()
Thread(target=keep_running, name="AP_Autogen").start()


multiworlds: typing.Dict[type(Room.id), MultiworldInstance] = {}
Expand Down
129 changes: 76 additions & 53 deletions WebHostLib/customserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def __init__(self, static_server_data: dict, logger: logging.Logger):

def _load_game_data(self):
for key, value in self.static_server_data.items():
# NOTE: attributes are mutable and shared, so they will have to be copied before being modified
setattr(self, key, value)
self.non_hintable_names = collections.defaultdict(frozenset, self.non_hintable_names)

Expand Down Expand Up @@ -101,18 +102,37 @@ def load(self, room_id: int):

multidata = self.decompress(room.seed.multidata)
game_data_packages = {}

static_gamespackage = self.gamespackage # this is shared across all rooms
static_item_name_groups = self.item_name_groups
static_location_name_groups = self.location_name_groups
self.gamespackage = {"Archipelago": static_gamespackage["Archipelago"]} # this may be modified by _load
self.item_name_groups = {}
self.location_name_groups = {}

for game in list(multidata.get("datapackage", {})):
game_data = multidata["datapackage"][game]
if "checksum" in game_data:
if self.gamespackage.get(game, {}).get("checksum") == game_data["checksum"]:
# non-custom. remove from multidata
if static_gamespackage.get(game, {}).get("checksum") == game_data["checksum"]:
# non-custom. remove from multidata and use static data
# games package could be dropped from static data once all rooms embed data package
del multidata["datapackage"][game]
else:
row = GameDataPackage.get(checksum=game_data["checksum"])
if row: # None if rolled on >= 0.3.9 but uploaded to <= 0.3.8. multidata should be complete
game_data_packages[game] = Utils.restricted_loads(row.data)

continue
else:
self.logger.warning(f"Did not find game_data_package for {game}: {game_data['checksum']}")
self.gamespackage[game] = static_gamespackage.get(game, {})
self.item_name_groups[game] = static_item_name_groups.get(game, {})
self.location_name_groups[game] = static_location_name_groups.get(game, {})

if not game_data_packages:
# all static -> use the static dicts directly
self.gamespackage = static_gamespackage
self.item_name_groups = static_item_name_groups
self.location_name_groups = static_location_name_groups
return self._load(multidata, game_data_packages, True)

@db_session
Expand All @@ -122,7 +142,7 @@ def init_save(self, enabled: bool = True):
savegame_data = Room.get(id=self.room_id).multisave
if savegame_data:
self.set_save(restricted_loads(Room.get(id=self.room_id).multisave))
self._start_async_saving()
self._start_async_saving(atexit_save=False)
threading.Thread(target=self.listen_to_db_commands, daemon=True).start()

@db_session
Expand Down Expand Up @@ -212,59 +232,62 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
loop = asyncio.get_event_loop()

async def start_room(room_id):
try:
logger = set_up_logging(room_id)
ctx = WebHostContext(static_server_data, logger)
ctx.load(room_id)
ctx.init_save()
with Locker(f"RoomLocker {room_id}"):
try:
ctx.server = websockets.serve(functools.partial(server, ctx=ctx), ctx.host, ctx.port, ssl=ssl_context)

await ctx.server
except OSError: # likely port in use
ctx.server = websockets.serve(functools.partial(server, ctx=ctx), ctx.host, 0, ssl=ssl_context)

await ctx.server
port = 0
for wssocket in ctx.server.ws_server.sockets:
socketname = wssocket.getsockname()
if wssocket.family == socket.AF_INET6:
# Prefer IPv4, as most users seem to not have working ipv6 support
if not port:
logger = set_up_logging(room_id)
ctx = WebHostContext(static_server_data, logger)
ctx.load(room_id)
ctx.init_save()
try:
ctx.server = websockets.serve(
functools.partial(server, ctx=ctx), ctx.host, ctx.port, ssl=ssl_context)

await ctx.server
except OSError: # likely port in use
ctx.server = websockets.serve(
functools.partial(server, ctx=ctx), ctx.host, 0, ssl=ssl_context)

await ctx.server
port = 0
for wssocket in ctx.server.ws_server.sockets:
socketname = wssocket.getsockname()
if wssocket.family == socket.AF_INET6:
# Prefer IPv4, as most users seem to not have working ipv6 support
if not port:
port = socketname[1]
elif wssocket.family == socket.AF_INET:
port = socketname[1]
elif wssocket.family == socket.AF_INET:
port = socketname[1]
if port:
ctx.logger.info(f'Hosting game at {host}:{port}')
if port:
ctx.logger.info(f'Hosting game at {host}:{port}')
with db_session:
room = Room.get(id=ctx.room_id)
room.last_port = port
else:
ctx.logger.exception("Could not determine port. Likely hosting failure.")
with db_session:
room = Room.get(id=ctx.room_id)
room.last_port = port
else:
ctx.logger.exception("Could not determine port. Likely hosting failure.")
with db_session:
ctx.auto_shutdown = Room.get(id=room_id).timeout
ctx.shutdown_task = asyncio.create_task(auto_shutdown(ctx, []))
await ctx.shutdown_task
ctx.auto_shutdown = Room.get(id=room_id).timeout
ctx.shutdown_task = asyncio.create_task(auto_shutdown(ctx, []))
await ctx.shutdown_task

# ensure auto launch is on the same page in regard to room activity.
with db_session:
room: Room = Room.get(id=ctx.room_id)
room.last_activity = datetime.datetime.utcnow() - datetime.timedelta(seconds=room.timeout + 60)

except (KeyboardInterrupt, SystemExit):
with db_session:
room = Room.get(id=room_id)
# ensure the Room does not spin up again on its own, minute of safety buffer
room.last_activity = datetime.datetime.utcnow() - datetime.timedelta(minutes=1, seconds=room.timeout)
except Exception:
with db_session:
room = Room.get(id=room_id)
room.last_port = -1
# ensure the Room does not spin up again on its own, minute of safety buffer
room.last_activity = datetime.datetime.utcnow() - datetime.timedelta(minutes=1, seconds=room.timeout)
raise
finally:
rooms_shutting_down.put(room_id)
except (KeyboardInterrupt, SystemExit):
pass
except Exception:
with db_session:
room = Room.get(id=room_id)
room.last_port = -1
raise
finally:
try:
ctx._save()
with (db_session):
# ensure the Room does not spin up again on its own, minute of safety buffer
room = Room.get(id=room_id)
room.last_activity = datetime.datetime.utcnow() - \
datetime.timedelta(minutes=1, seconds=room.timeout)
logging.info(f"Shutting down room {room_id} on {name}.")
finally:
await asyncio.sleep(5)
rooms_shutting_down.put(room_id)

class Starter(threading.Thread):
def run(self):
Expand Down
56 changes: 30 additions & 26 deletions WebHostLib/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,35 +70,39 @@ def generate(race=False):
flash(options)
else:
meta = get_meta(request.form, race)
results, gen_options = roll_options(options, set(meta["plando_options"]))

if any(type(result) == str for result in results.values()):
return render_template("checkResult.html", results=results)
elif len(gen_options) > app.config["MAX_ROLL"]:
flash(f"Sorry, generating of multiworlds is limited to {app.config['MAX_ROLL']} players. "
f"If you have a larger group, please generate it yourself and upload it.")
elif len(gen_options) >= app.config["JOB_THRESHOLD"]:
gen = Generation(
options=pickle.dumps({name: vars(options) for name, options in gen_options.items()}),
# convert to json compatible
meta=json.dumps(meta),
state=STATE_QUEUED,
owner=session["_id"])
commit()
return start_generation(options, meta)

return redirect(url_for("wait_seed", seed=gen.id))
else:
try:
seed_id = gen_game({name: vars(options) for name, options in gen_options.items()},
meta=meta, owner=session["_id"].int)
except BaseException as e:
from .autolauncher import handle_generation_failure
handle_generation_failure(e)
return render_template("seedError.html", seed_error=(e.__class__.__name__ + ": " + str(e)))
return render_template("generate.html", race=race, version=__version__)

return redirect(url_for("view_seed", seed=seed_id))

return render_template("generate.html", race=race, version=__version__)
def start_generation(options: Dict[str, Union[dict, str]], meta: Dict[str, Any]):
results, gen_options = roll_options(options, set(meta["plando_options"]))

if any(type(result) == str for result in results.values()):
return render_template("checkResult.html", results=results)
elif len(gen_options) > app.config["MAX_ROLL"]:
flash(f"Sorry, generating of multiworlds is limited to {app.config['MAX_ROLL']} players. "
f"If you have a larger group, please generate it yourself and upload it.")
elif len(gen_options) >= app.config["JOB_THRESHOLD"]:
gen = Generation(
options=pickle.dumps({name: vars(options) for name, options in gen_options.items()}),
# convert to json compatible
meta=json.dumps(meta),
state=STATE_QUEUED,
owner=session["_id"])
commit()

return redirect(url_for("wait_seed", seed=gen.id))
else:
try:
seed_id = gen_game({name: vars(options) for name, options in gen_options.items()},
meta=meta, owner=session["_id"].int)
except BaseException as e:
from .autolauncher import handle_generation_failure
handle_generation_failure(e)
return render_template("seedError.html", seed_error=(e.__class__.__name__ + ": " + str(e)))

return redirect(url_for("view_seed", seed=seed_id))


def gen_game(gen_options: dict, meta: Optional[Dict[str, Any]] = None, owner=None, sid=None):
Expand Down
Loading

0 comments on commit d5a0ef4

Please sign in to comment.