Skip to content

Commit

Permalink
fix styles etc
Browse files Browse the repository at this point in the history
  • Loading branch information
jameswnl committed Oct 17, 2024
1 parent 7d0a210 commit 77f3490
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 16 deletions.
2 changes: 1 addition & 1 deletion ols/app/endpoints/ols.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
SummarizerResponse,
UnauthorizedResponse,
)
from ols.customize import prompts, keywords
from ols.customize import keywords, prompts
from ols.src.llms.llm_loader import LLMConfigurationError, resolve_provider_config
from ols.src.query_helpers.attachment_appender import append_attachments_to_query
from ols.src.query_helpers.docs_summarizer import DocsSummarizer
Expand Down
10 changes: 6 additions & 4 deletions ols/app/models/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -902,9 +902,11 @@ def __init__(
if data is None:
return

self.conversation_cache = ConversationCacheConfig(
data.get("conversation_cache")
) if data.get("conversation_cache") else None
self.conversation_cache = (
ConversationCacheConfig(data.get("conversation_cache"))
if data.get("conversation_cache")
else None
)
self.logging_config = LoggingConfig(**data.get("logging_config", {}))
if data.get("reference_content") is not None:
self.reference_content = ReferenceContent(data.get("reference_content"))
Expand Down Expand Up @@ -933,7 +935,7 @@ def __init__(
self.certificate_directory = data.get(
"certificate_directory", constants.DEFAULT_CERTIFICATE_DIRECTORY
)
self.customize = data.get('customize')
self.customize = data.get("customize")

def __eq__(self, other: object) -> bool:
"""Compare two objects for equality."""
Expand Down
10 changes: 6 additions & 4 deletions ols/customize/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import os
"""Contains customization packages for individual projects (for prompts/keyvords)."""

import importlib
import os

project = os.getenv('PROJECT', 'ols')
prompts = importlib.import_module(f'ols.customize.{project}.prompts')
keywords = importlib.import_module(f'ols.customize.{project}.keywords')
project = os.getenv("PROJECT", "ols")
prompts = importlib.import_module(f"ols.customize.{project}.prompts")
keywords = importlib.import_module(f"ols.customize.{project}.keywords")
1 change: 1 addition & 0 deletions ols/customize/aap/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Customized prompts/keyvords for Ansible Automation Platform (aap)."""
1 change: 1 addition & 0 deletions ols/customize/ols/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Customized prompts/keyvords for OpenShift Lightspeed Service (ols)."""
17 changes: 13 additions & 4 deletions ols/src/prompts/prompt_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from ols.constants import ModelFamily
from ols.customize import prompts


def restructure_rag_context_pre(text: str, model: str) -> str:
"""Restructure rag text - pre truncation."""
if ModelFamily.GRANITE in model:
Expand Down Expand Up @@ -62,7 +63,9 @@ def _generate_prompt_gpt(self) -> tuple[ChatPromptTemplate, dict]:

if len(self._rag_context) > 0:
llm_input_values["context"] = "".join(self._rag_context)
sys_intruction = sys_intruction + "\n" + prompts.USE_CONTEXT_INSTRUCTION.strip()
sys_intruction = (
sys_intruction + "\n" + prompts.USE_CONTEXT_INSTRUCTION.strip()
)

if len(self._history) > 0:
chat_history = []
Expand All @@ -73,7 +76,9 @@ def _generate_prompt_gpt(self) -> tuple[ChatPromptTemplate, dict]:
chat_history.append(AIMessage(content=h.removeprefix("ai: ")))
llm_input_values["chat_history"] = chat_history

sys_intruction = sys_intruction + "\n" + prompts.USE_HISTORY_INSTRUCTION.strip()
sys_intruction = (
sys_intruction + "\n" + prompts.USE_HISTORY_INSTRUCTION.strip()
)

if "context" in llm_input_values:
sys_intruction = sys_intruction + "\n{context}"
Expand All @@ -93,10 +98,14 @@ def _generate_prompt_granite(self) -> tuple[PromptTemplate, dict]:

if len(self._rag_context) > 0:
llm_input_values["context"] = "".join(self._rag_context)
prompt_message = prompt_message + "\n" + prompts.USE_CONTEXT_INSTRUCTION.strip()
prompt_message = (
prompt_message + "\n" + prompts.USE_CONTEXT_INSTRUCTION.strip()
)

if len(self._history) > 0:
prompt_message = prompt_message + "\n" + prompts.USE_HISTORY_INSTRUCTION.strip()
prompt_message = (
prompt_message + "\n" + prompts.USE_HISTORY_INSTRUCTION.strip()
)
llm_input_values["chat_history"] = "".join(self._history)

if "context" in llm_input_values:
Expand Down
2 changes: 1 addition & 1 deletion ols/src/query_helpers/docs_summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
from ols.app.models.config import ProviderConfig
from ols.app.models.models import SummarizerResponse
from ols.constants import RAG_CONTENT_LIMIT, GenericLLMParameters
from ols.customize import prompts
from ols.src.prompts.prompt_generator import GeneratePrompt
from ols.src.query_helpers.query_helper import QueryHelper
from ols.utils.token_handler import TokenHandler
from ols.customize import prompts

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion ols/src/query_helpers/question_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
from ols import config
from ols.app.metrics import TokenMetricUpdater
from ols.constants import SUBJECT_REJECTED, GenericLLMParameters
from ols.customize import prompts
from ols.src.query_helpers.query_helper import QueryHelper
from ols.utils.token_handler import TokenHandler
from ols.customize import prompts

logger = logging.getLogger(__name__)

Expand Down
1 change: 0 additions & 1 deletion runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

import ols.app.models.config as config_model
from ols import constants
from ols.utils.auth_dependency import K8sClientSingleton
from ols.utils.logging import configure_logging


Expand Down

0 comments on commit 77f3490

Please sign in to comment.