Skip to content

Commit

Permalink
Merge branch 'master' into bagatur/3_12_ci
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Jan 27, 2024
2 parents b522d11 + 27665e3 commit 554b424
Show file tree
Hide file tree
Showing 10 changed files with 6 additions and 122 deletions.
13 changes: 0 additions & 13 deletions .github/workflows/langchain_cli_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_community_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_core_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_experimental_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_experimental_test_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_openai_release.yml

This file was deleted.

27 changes: 0 additions & 27 deletions .github/workflows/langchain_release.yml

This file was deleted.

13 changes: 0 additions & 13 deletions .github/workflows/langchain_test_release.yml

This file was deleted.

File renamed without changes.
10 changes: 6 additions & 4 deletions libs/community/langchain_community/chat_models/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,10 @@ def _stream(
stream_resp = self.client.completions.create(**params, stream=True)
for data in stream_resp:
delta = data.completion
yield ChatGenerationChunk(message=AIMessageChunk(content=delta))
chunk = ChatGenerationChunk(message=AIMessageChunk(content=delta))
yield chunk
if run_manager:
run_manager.on_llm_new_token(delta)
run_manager.on_llm_new_token(delta, chunk=chunk)

async def _astream(
self,
Expand All @@ -161,9 +162,10 @@ async def _astream(
stream_resp = await self.async_client.completions.create(**params, stream=True)
async for data in stream_resp:
delta = data.completion
yield ChatGenerationChunk(message=AIMessageChunk(content=delta))
chunk = ChatGenerationChunk(message=AIMessageChunk(content=delta))
yield chunk
if run_manager:
await run_manager.on_llm_new_token(delta)
await run_manager.on_llm_new_token(delta, chunk=chunk)

def _generate(
self,
Expand Down

0 comments on commit 554b424

Please sign in to comment.