Skip to content

Commit

Permalink
updated substitution exception logging within the assistant's interna…
Browse files Browse the repository at this point in the history
…l monologue
  • Loading branch information
fractalego committed Nov 24, 2024
1 parent 9ca70f6 commit 1de7c42
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 11 deletions.
4 changes: 4 additions & 0 deletions todo.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
* user prior text and response from failed substitutions between [] instead of just iteration number (line 89, dialogue_answerer.py)
* remove dead code
* re-fine-tune phi to get better performance

* delete rules and memory from discourse_answerer

* This is wrong - from wafl_ll
Expand Down
11 changes: 9 additions & 2 deletions wafl/answerer/dialogue_answerer.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@ async def answer(self, query_text: str) -> Answer:
rules_text=rules_text,
dialogue=conversation,
)
await self._interface.add_fact(f"The bot predicts: {original_answer_text}")
await self._interface.add_fact(
f"The bot predicts: {original_answer_text}"
)
answer_text, memories = await self._apply_substitutions(
original_answer_text
)
Expand All @@ -86,7 +88,12 @@ async def answer(self, query_text: str) -> Answer:
except RuntimeError as e:
if self._logger:
self._logger.write(f"Error in generating answer: {e}")
conversation.add_utterance(Utterance(speaker="bot", text=f"[Trying again for the {num_attempts + 2} time.]\n"))
conversation.add_utterance(
Utterance(
speaker="bot",
text=f"[when using the answer {original_answer_text} the system says {e}]\n",
)
)

if not is_finished:
final_answer_text += "I was unable to generate a full answer. Please see the logs for more information."
Expand Down
8 changes: 6 additions & 2 deletions wafl/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import os
import shutil

from wafl.connectors.remote.remote_configuration_connector import RemoteConfigurationConnector
from wafl.connectors.remote.remote_configuration_connector import (
RemoteConfigurationConnector,
)

_path = os.path.dirname(__file__)

Expand All @@ -23,7 +25,9 @@ def __init__(self, filename):
with open(filename) as file:
self._data = json.load(file)

self._remote_config = RemoteConfigurationConnector(self._data["backend"]["host"], self._data["backend"]["port"])
self._remote_config = RemoteConfigurationConnector(
self._data["backend"]["host"], self._data["backend"]["port"]
)

def get_value(self, key):
if key in self._data:
Expand Down
8 changes: 5 additions & 3 deletions wafl/connectors/remote/remote_configuration_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Dict
from wafl.variables import get_variables


class RemoteConfigurationConnector:
_max_tries = 3

Expand All @@ -19,7 +20,9 @@ def __init__(self, host: str, port: int):
if (not loop or (loop and not loop.is_running())) and not asyncio.run(
self.check_connection()
):
raise RuntimeError("Cannot connect a running Configuration handler. Is WAFL-LLM running?")
raise RuntimeError(
"Cannot connect a running Configuration handler. Is WAFL-LLM running?"
)

async def predict(self) -> Dict[str, str]:
payload = {"version": get_variables()["version"]}
Expand All @@ -45,7 +48,6 @@ async def predict(self) -> Dict[str, str]:

return {}


async def check_connection(self) -> bool:
try:
async with aiohttp.ClientSession(
Expand All @@ -56,4 +58,4 @@ async def check_connection(self) -> bool:
return response.status == 200

except Exception:
return False
return False
5 changes: 1 addition & 4 deletions wafl/events/conversation_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,7 @@ async def _process_query(self, text: str):
):
await self._interface.output("I don't know what to reply")

if (
not text_is_question
and not self._interface.get_utterances_list()
):
if not text_is_question and not self._interface.get_utterances_list():
await self._interface.output("I don't know what to reply")

if (
Expand Down

0 comments on commit 1de7c42

Please sign in to comment.