Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

updated with llama examples including ollama wrapper #48

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,7 @@ venv.bak/
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
dmypy.json

# venv
swarmenv/
41 changes: 41 additions & 0 deletions examples/basic_llama/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Swarm basic - Ollama examples

This folder contains basic examples demonstrating core Swarm capabilities. the original basic examples are modified to work with Ollama. These examples show the simplest implementations of Swarm, with one input message, and a corresponding output. The `simple_loop_no_helpers` has a while loop to demonstrate how to create an interactive Swarm session.

### Examples

1. **agent_handoff.py**

- Demonstrates how to transfer a conversation from one agent to another.
- **Usage**: Transfers Spanish-speaking users from an English agent to a Spanish agent.

2. **bare_minimum.py**

- A bare minimum example showing the basic setup of an agent.
- **Usage**: Sets up an agent that responds to a simple user message.

3. **context_variables.py**

- Shows how to use context variables within an agent.
- **Usage**: Uses context variables to greet a user by name and print account details.

4. **function_calling.py**

- Demonstrates how to define and call functions from an agent.
- **Usage**: Sets up an agent that can respond with weather information for a given location.

5. **simple_loop_no_helpers.py**
- An example of a simple interaction loop without using helper functions.
- **Usage**: Sets up a loop where the user can continuously interact with the agent, printing the conversation.

6. **all_in_one.py**
- A more complex example that demonstrates how to use all the features above in one go.
- **Usage**: Sets up an agent that can transfer conversations, provide weather information, and print account details.

## Running the Examples

To run any of the examples, use the following command:

```shell
python3 <example_name>.py
```
101 changes: 101 additions & 0 deletions examples/basic_llama/agent_handoff.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
from swarm.core import Swarm, Agent
import ollama
from typing import List, Dict, Any
import json

class OllamaWrapper:
def __init__(self, client):
self.client = client
self.chat = self.ChatCompletions(client)

class ChatCompletions:
def __init__(self, client):
self.client = client
self.completions = self

def create(self, **kwargs):
# Map Swarm parameters to Ollama parameters
ollama_kwargs = {
"model": kwargs.get("model"),
"messages": kwargs.get("messages"),
"stream": kwargs.get("stream", False),
}

response = self.client.chat(**ollama_kwargs)

# Wrap the Ollama response to match OpenAI's structure
class WrappedResponse:
def __init__(self, ollama_response):
self.choices = [
type('Choice', (), {
'message': type('Message', (), {
'content': ollama_response['message']['content'],
'role': ollama_response['message']['role'],
'tool_calls': None, # Ollama doesn't support tool calls
'model_dump_json': lambda: json.dumps({
'content': ollama_response['message']['content'],
'role': ollama_response['message']['role'],
})
})
})
]

return WrappedResponse(response)

def __getattr__(self, name):
return getattr(self.client, name)

# Initialize Ollama client
ollama_client = ollama.Client(host="http://localhost:11434")

# Wrap Ollama client
wrapped_client = OllamaWrapper(ollama_client)

# Initialize Swarm with wrapped client
client = Swarm(client=wrapped_client)

english_agent = Agent(
name="English Agent",
model="llama3.2:3b",
instructions="You only speak English.",
)

spanish_agent = Agent(
name="Spanish Agent",
model="llama3.2:3b",
instructions="You only speak Spanish.",
)

def transfer_to_spanish_agent():
"""Transfer spanish speaking users immediately."""
return spanish_agent

llama_tool_call = """[{
"name": "transfer_to_spanish_agent",
"description": "Transfer the conversation to a Spanish-speaking agent",
"parameters": {
"type": "object",
"properties": {}
}
]"""

english_agent.functions.append(transfer_to_spanish_agent)
english_agent.instructions = f"""You only speak English. You can use the following functions:

{llama_tool_call}

If you need to use a function, format your response as:
[function_name()]

Only use the functions when necessary, and provide a natural language response after using them.
If the user speaks Spanish or requests Spanish assistance, use the transfer_to_spanish_agent function."""

messages = [{"role": "user", "content": "Hola. ¿Como estás?"}]
response = client.run(agent=english_agent, messages=messages)


if "[transfer_to_spanish_agent(" in response.messages[-1]["content"]:
transfer_agent = transfer_to_spanish_agent()
result = client.run(agent=transfer_agent, messages=response.messages)
print(result.messages[-1]["content"])
#print(response.messages[-1]["content"])
206 changes: 206 additions & 0 deletions examples/basic_llama/all_in_one.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
from swarm.core import Swarm, Agent
import ollama
from typing import List, Dict, Any
import json
from colorama import Fore, Style, init

class OllamaWrapper:
def __init__(self, client):
self.client = client
self.chat = self.ChatCompletions(client)

class ChatCompletions:
def __init__(self, client):
self.client = client
self.completions = self

def create(self, **kwargs):
# Map Swarm parameters to Ollama parameters
ollama_kwargs = {
"model": kwargs.get("model"),
"messages": kwargs.get("messages"),
"stream": kwargs.get("stream", False),
}

response = self.client.chat(**ollama_kwargs)

# Wrap the Ollama response to match OpenAI's structure
class WrappedResponse:
def __init__(self, ollama_response):
self.choices = [
type('Choice', (), {
'message': type('Message', (), {
'content': ollama_response['message']['content'],
'role': ollama_response['message']['role'],
'tool_calls': None, # Ollama doesn't support tool calls
'model_dump_json': lambda: json.dumps({
'content': ollama_response['message']['content'],
'role': ollama_response['message']['role'],
})
})
})
]

return WrappedResponse(response)

def __getattr__(self, name):
return getattr(self.client, name)

# Initialize Ollama client
ollama_client = ollama.Client(host="http://localhost:11434")

# Wrap Ollama client
wrapped_client = OllamaWrapper(ollama_client)

# Initialize Swarm with wrapped client
client = Swarm(client=wrapped_client)


# FUNCTIONS
def transfer_to_spanish_agent():
"""Transfer spanish speaking users immediately."""
return spanish_agent

def get_weather(location) -> str:
return "{'temp':67, 'unit':'F'}"

def print_account_details(context_variables: dict):
user_id = context_variables.get("user_id", None)
name = context_variables.get("name", None)
return f"Account Details: {name} {user_id}"

llama_tool_call = """[
{
"name": "transfer_to_spanish_agent",
"description": "Transfer the conversation to a Spanish-speaking agent",
"parameters": {
"type": "object",
"properties": {}
}
},
{
"name": "get_weather",
"description": "Get the weather for a specific location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The location to get weather for"
}
},
"required": ["location"]
}
},
{
"name": "print_account_details",
"description": "Print account details for a user",
"parameters": {
"type": "object",
"properties": {
"context_variables": {
"type": "object",
"properties": {
"user_id": {
"type": "string",
"description": "The user's ID"
},
"name": {
"type": "string",
"description": "The user's name"
}
}
}
},
"required": ["context_variables"]
}
}
]"""

# AGENTS
english_agent = Agent(
name="English Agent",
model="llama3.2:3b",
instructions="You only speak English.",
functions=[transfer_to_spanish_agent, get_weather, print_account_details],
)

spanish_agent = Agent(
name="Spanish Agent",
model="llama3.2:3b",
instructions="You only speak Spanish.",

)
def instructions(context_variables):
context = context_variables.copy()
tool_call = context.pop('llama_tool_call', '')
name = context.get("name", "User")
return f"""You only speak English. Your user's name is {name}. You can use the following functions when necessary:

{tool_call}
These are the context variables, could be important to use in certain functions: {context}
To use a function, format your response as follows:
[function_name({{"param1": "value1", "param2": "value2"}})]

Guidelines:
1. Only use functions when they are directly relevant to the user's request.
2. Provide a natural language response after using a function.
3. If the user speaks Spanish or requests Spanish assistance, use the transfer_to_spanish_agent function.
4. For weather requests, use the get_weather function with the specified location.
5. To display account details, use the print_account_details function with the appropriate context variables.

Remember to ALWAYS respond in English, unless using the transfer_to_spanish_agent function."""

context_variables = {
"name": "edward hicksford",
"user_id": "@citizenhicks",
"llama_tool_call": llama_tool_call
}

def check_for_tool_call(messages, context_variables):
last_message = messages[-1]["content"]

# Check for transfer_to_spanish_agent
if "to_spanish_agent" in last_message:
transfer_agent = transfer_to_spanish_agent()
result = client.run(agent=transfer_agent, messages=messages[:-1])
return result.messages[-1]["content"], "Spanish Agent"

# Check for get_weather
elif "[get_weather(" in last_message:
start = last_message.index("[get_weather(") + len("[get_weather(")
end = last_message.index(")]", start)
call_args = last_message[start:end]
# Extract the location from the call arguments
location = call_args.split("=")[1].strip('"')
weather = get_weather(location)
return f"Weather in {location}: {weather}", "English Agent"

# Check for print_account_details
elif "[print_account_details(" in last_message:
context_variables_ = context_variables.copy()
context_variables_.pop('llama_tool_call', None)
result = print_account_details(context_variables_)
return f"{result}", "English Agent"

# If no function call is detected, return the message as is
else:
return last_message, "English Agent"


messages = []
english_agent.instructions = instructions
agent = english_agent
while True:
user_input = input(f"{Fore.BLUE}> {Style.RESET_ALL}")
messages.append({"role": "user", "content": user_input})

response = client.run(agent=agent,
messages=messages,
context_variables=context_variables)
messages.append({"role": "assistant", "content": response.messages[-1]["content"]})

# Use the updated check_for_tool_call function
result, role = check_for_tool_call(messages, context_variables)
print(f"{role}: {result}\n")

Loading