Skip to content

Commit

Permalink
Merge pull request #340 from kentjhall/bopenai-race-fix
Browse files Browse the repository at this point in the history
Fix batched openai request worker race
  • Loading branch information
lbeurerkellner authored Apr 28, 2024
2 parents 969af9b + 3afecf8 commit eaf03be
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions src/lmql/runtime/bopenai/batched_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,6 +579,8 @@ def __init__(self):
self.complete_request_workers = [asyncio.create_task(self.complete_request_worker(self.complete_request_queue)) for i in range(5)]

self.worker_loops = set()
self.worker_loops_started = 0
self.worker_loop_added = asyncio.Event()

self.stats_logger = None

Expand Down Expand Up @@ -666,6 +668,8 @@ def is_definitive_error(self, e):

async def complete_request_worker(self, queue: asyncio.Queue):
self.worker_loops.add(asyncio.get_running_loop())
self.worker_loops_started += 1
self.worker_loop_added.set()

while True:
try:
Expand Down Expand Up @@ -725,6 +729,10 @@ async def complete(self, request_id=None, **kwargs):
if len(self.complete_request_workers) == 0:
raise APIShutDownException(f"bopenai requires at least one worker to be running to issue new complete requests.")

while self.worker_loops_started < len(self.complete_request_workers):
await self.worker_loop_added.wait()
self.worker_loop_added.clear()

loop = asyncio.get_running_loop()

result_fut = loop.create_future()
Expand Down

0 comments on commit eaf03be

Please sign in to comment.