Skip to content

Commit

Permalink
enable streaming
Browse files Browse the repository at this point in the history
  • Loading branch information
ericcccsliu committed Mar 29, 2024
1 parent 998d448 commit 551de1a
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 31 deletions.
Binary file modified api/utils/llm_providers/__pycache__/anthropic.cpython-311.pyc
Binary file not shown.
Binary file modified api/utils/llm_providers/__pycache__/openai.cpython-311.pyc
Binary file not shown.
38 changes: 19 additions & 19 deletions api/utils/llm_providers/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,40 @@
import asyncio
from anthropic import Anthropic
from anthropic import AsyncAnthropic
from starlette.config import Config

config = Config('.env')
client = Anthropic(api_key=config("ANTHROPIC_API_KEY"))
client = AsyncAnthropic(api_key=config("ANTHROPIC_API_KEY"))

async def anthropic_generate_response(conversation):
messages = [
{"role": message.role, "content": message.content}
for message in conversation.messages
]

with client.messages.stream(
stream = await client.messages.create(
model=conversation.model.name,
messages=messages,
max_tokens=1024,
) as stream:
for text in stream.text_stream:
yield text
stream=True,
)

async for event in stream:
if event.type == "content_block_delta":
content = event.delta.text
yield content

async def generate_conversation_name(conversation):
messages = [
{"role": message.role, "content": message.content}
for message in conversation.messages
if message.content.strip() # Filter out messages with empty content
]
messages.append({"role": "user", "content": "Please give a short, concise name for the above conversation."})

def sync_create_message():
response = client.messages.create(
model="claude-3-haiku-20240307",
system="You are a conversation namer. Give a short, concise name for the given conversation.",
messages=messages,
max_tokens=10,
)
return response

response = await asyncio.to_thread(sync_create_message)


response = await client.messages.create(
model="claude-3-haiku-20240307",
system="You are a conversation namer. Give a short, concise name for the given conversation.",
messages=messages,
max_tokens=10,
)

return response.content[0].text
23 changes: 12 additions & 11 deletions api/utils/llm_providers/openai.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
from openai import OpenAI
from openai import AsyncOpenAI
from starlette.config import Config

config = Config('.env')

client = OpenAI(api_key=config("OPENAI_API_KEY"))
client = AsyncOpenAI(api_key=config("OPENAI_API_KEY"))

async def openai_generate_response(conversation):

messages = [
{"role": message.role, "content": message.content}
for message in conversation.messages
]

response = client.chat.completions.create(model=conversation.model.name,
messages=messages,
stream=True)
stream = await client.chat.completions.create(
model=conversation.model.name,
messages=messages,
stream=True,
)

for chunk in response:
# Extract the content from the chunk
content = chunk.choices[0].delta.content
yield content
async for chunk in stream:
content = chunk.choices[0].delta.content
if content is None:
content = ""
yield content
24 changes: 23 additions & 1 deletion app/components/ConversationMessages.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,35 @@ const ConversationMessages: React.FC<ConversationMessagesProps> = ({
userName = "User",
}) => {
const messagesEndRef = useRef<HTMLDivElement>(null);
// const messagesContainerRef = useRef<HTMLDivElement>(null);

useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages.length]);

// useEffect(() => {
// const messagesContainer = messagesContainerRef.current;
// if (messagesContainer) {
// const scrollHeight = messagesContainer.scrollHeight;
// const scrollTop = messagesContainer.scrollTop;
// const clientHeight = messagesContainer.clientHeight;
// const scrollPosition = scrollTop + clientHeight;
// const scrollThreshold = scrollHeight * 0.9; // Bottom 10% of the page

// if (scrollPosition >= scrollThreshold) {
// messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
// }
// }
// }, [messages]);

return (
<VStack spacing={4} align="stretch" width="100%" mb={10}>
<VStack
spacing={4}
align="stretch"
width="100%"
mb={10}
// ref={messagesContainerRef}
>
{messages.map((message, index) => (
<Box
key={index}
Expand Down

0 comments on commit 551de1a

Please sign in to comment.