Skip to content

Commit

Permalink
chore: cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
phil65 committed Nov 29, 2024
1 parent ff206df commit dc6a680
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 72 deletions.
7 changes: 2 additions & 5 deletions src/llmling/config/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,8 @@ def load_config(path: str | os.PathLike[str]) -> Config:
msg = f"Failed to validate configuration from {path}"
raise exceptions.ConfigError(msg) from exc
else:
logger.debug(
"Loaded raw configuration: version=%s, resources=%d",
config.version,
len(config.resources),
)
msg = "Loaded raw configuration: version=%s, resources=%d"
logger.debug(msg, config.version, len(config.resources))
return config


Expand Down
11 changes: 4 additions & 7 deletions src/llmling/config/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,8 @@ def _validate_prompts(self) -> list[str]:
else:
# Try to import the module
try:
importlib.import_module(
prompt_config.import_path.split(".")[0]
)
name = prompt_config.import_path.split(".")[0]
importlib.import_module(name)
except ImportError:
warnings.append(
f"Cannot import module for prompt {name}: "
Expand Down Expand Up @@ -161,10 +160,8 @@ def _validate_resources(self) -> list[str]:
for resource in self.config.resources.values():
if hasattr(resource, "path"):
path = UPath(resource.path)
if not path.exists() and not path.as_uri().startswith((
"http://",
"https://",
)):
prefixes = ("http://", "https://")
if not path.exists() and not path.as_uri().startswith(prefixes):
warnings.append(f"Resource path not found: {path}")

return warnings
Expand Down
11 changes: 4 additions & 7 deletions src/llmling/config/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,8 @@ def from_config(cls, config: Config) -> Self:
if name not in tool_registry:
tool_registry[name] = tool
else:
logger.warning(
"Tool %s from toolset overlaps with configured tool",
name,
)
msg = "Tool %s from toolset overlaps with configured tool"
logger.warning(msg, name)

for name, prompt_config in config.prompts.items():
match prompt_config:
Expand All @@ -179,9 +177,8 @@ def from_config(cls, config: Config) -> Self:
func = importing.import_callable(path)
completion_funcs[arg_name] = func
except Exception:
logger.exception(
"Failed to import completion function: %s", path
)
msg = "Failed to import completion function: %s"
logger.exception(msg, path)

prompt = create_prompt_from_callable(
prompt_config.import_path,
Expand Down
10 changes: 3 additions & 7 deletions src/llmling/prompts/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,14 +166,10 @@ def _get_type_completions(
if len(args) == 2 and type(None) in args: # noqa: PLR2004
other_type = next(arg for arg in args if arg is not type(None))
# Process the non-None type directly instead of using replace
return self._get_type_completions(
ExtendedPromptArgument(
name=arg.name,
type_hint=other_type,
description=arg.description,
),
current_value,
arg = ExtendedPromptArgument(
name=arg.name, type_hint=other_type, description=arg.description
)
return self._get_type_completions(arg, current_value)

# Handle bool
if type_hint is bool:
Expand Down
18 changes: 4 additions & 14 deletions src/llmling/server/mcp_inproc_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,8 @@ async def read_stderr():
assert self.process.stderr
while True:
try:
line = await asyncio.get_event_loop().run_in_executor(
None, self.process.stderr.readline
)
fn = self.process.stderr.readline
line = await asyncio.get_event_loop().run_in_executor(None, fn)
if not line:
break
print(
Expand Down Expand Up @@ -109,12 +108,7 @@ async def send_request(
msg = "Server not started"
raise RuntimeError(msg)

request = {
"jsonrpc": "2.0",
"method": method,
"params": params or {},
"id": 1,
}
request = {"jsonrpc": "2.0", "method": method, "params": params or {}, "id": 1}

request_str = json.dumps(request) + "\n"
logger.debug("Sending request: %s", request_str.strip())
Expand Down Expand Up @@ -142,11 +136,7 @@ async def send_notification(
msg = "Server not started"
raise RuntimeError(msg)

notification = {
"jsonrpc": "2.0",
"method": method,
"params": params or {},
}
notification = {"jsonrpc": "2.0", "method": method, "params": params or {}}

notification_str = json.dumps(notification) + "\n"
logger.debug("Sending notification: %s", notification_str.strip())
Expand Down
45 changes: 13 additions & 32 deletions src/llmling/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,8 @@ async def handle_set_level(level: mcp.LoggingLevel) -> None:
try:
python_level = level_map[level]
logger.setLevel(python_level)
await self.current_session.send_log_message(
level="info",
data=f"Log level set to {level}",
logger=self.name,
)
data = f"Log level set to {level}"
await self.current_session.send_log_message(data=data, logger=self.name)
except Exception as exc:
error = mcp.McpError("Error setting log level")
error.error = mcp.ErrorData(code=INTERNAL_ERROR, message=str(exc))
Expand Down Expand Up @@ -153,9 +150,7 @@ async def handle_call_tool(
@self.server.list_prompts()
async def handle_list_prompts() -> list[mcp.types.Prompt]:
"""Handle prompts/list request."""
return [
conversions.to_mcp_prompt(prompt) for prompt in self.runtime.get_prompts()
]
return [conversions.to_mcp_prompt(p) for p in self.runtime.get_prompts()]

@self.server.get_prompt()
async def handle_get_prompt(
Expand All @@ -166,18 +161,13 @@ async def handle_get_prompt(
try:
prompt = self.runtime.get_prompt(name)
messages = await self.runtime.render_prompt(name, arguments)

return GetPromptResult(
description=prompt.description,
messages=[conversions.to_mcp_message(msg) for msg in messages],
)
mcp_msgs = [conversions.to_mcp_message(msg) for msg in messages]
return GetPromptResult(description=prompt.description, messages=mcp_msgs)
except exceptions.LLMLingError as exc:
msg = str(exc)
error = mcp.McpError(msg)
error.error = mcp.ErrorData(
code=INVALID_PARAMS if "not found" in msg else INTERNAL_ERROR,
message=msg,
)
code = INVALID_PARAMS if "not found" in msg else INTERNAL_ERROR
error.error = mcp.ErrorData(code=code, message=msg)
raise error from exc

@self.server.list_resources()
Expand All @@ -188,10 +178,8 @@ async def handle_list_resources() -> list[mcp.types.Resource]:
try:
# First get URI and basic info without loading
uri = self.runtime.get_resource_uri(name)
resource_config = self.runtime._config.resources[
name
] # Get raw config

# Get raw config
resource_config = self.runtime._config.resources[name]
mcp_resource = mcp.types.Resource(
uri=conversions.to_mcp_uri(uri),
name=name,
Expand All @@ -201,11 +189,8 @@ async def handle_list_resources() -> list[mcp.types.Resource]:
resources.append(mcp_resource)

except Exception:
logger.exception(
"Failed to create resource listing for %r. Config: %r",
name,
self.runtime._config.resources.get(name),
)
msg = "Failed to create resource listing for %r. Config: %r"
logger.exception(msg, name, self.runtime._config.resources.get(name))
continue

return resources
Expand Down Expand Up @@ -266,12 +251,8 @@ async def handle_progress(
total: float | None,
) -> None:
"""Handle progress notifications from client."""
logger.debug(
"Progress notification: %s %.1f/%.1f",
token,
progress,
total or 0.0,
)
msg = "Progress notification: %s %.1f/%.1f"
logger.debug(msg, token, progress, total or 0.0)

def _setup_observers(self) -> None:
"""Set up registry observers for MCP notifications."""
Expand Down

0 comments on commit dc6a680

Please sign in to comment.