Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perf: [LS-2561] Stream compress multipart runs #1316

Merged
merged 57 commits into from
Dec 21, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
57 commits
Select commit Hold shift + click to select a range
a5efbb8
start of compression
angus-langchain Dec 10, 2024
c10a8ad
manually encode
angus-langchain Dec 10, 2024
d456776
set boundary
angus-langchain Dec 10, 2024
0359235
add zstandard
angus-langchain Dec 10, 2024
08bce98
set limits from config
angus-langchain Dec 10, 2024
800472a
add slots
angus-langchain Dec 10, 2024
d4e45e1
lint
angus-langchain Dec 10, 2024
46c8740
fix mypy
angus-langchain Dec 10, 2024
a5cba8d
implement correct timeouts
angus-langchain Dec 10, 2024
c32cb0c
stream instead of read data from buffer
angus-langchain Dec 10, 2024
2aafc20
fix client type
angus-langchain Dec 10, 2024
03e4e02
separate compressed buffer from tracing queue
angus-langchain Dec 10, 2024
c952536
clean up update multipart
angus-langchain Dec 10, 2024
c3fdd36
address comments
angus-langchain Dec 10, 2024
d6b186d
just write directly to compressor instead of streaming
angus-langchain Dec 10, 2024
f061c0b
set trcing queue to none
angus-langchain Dec 10, 2024
8a0a60e
send multipart req
angus-langchain Dec 10, 2024
b2113ba
remove flush
angus-langchain Dec 11, 2024
982429a
remove print
angus-langchain Dec 11, 2024
5b58940
set compression level
angus-langchain Dec 11, 2024
f2e5ed9
remove prints
angus-langchain Dec 11, 2024
e7dc6bc
pass buffer directly to request
angus-langchain Dec 11, 2024
3ab6b39
resolve conflict
angus-langchain Dec 11, 2024
fb8fa96
lint
angus-langchain Dec 11, 2024
9d0a4ec
my fixes
angus-langchain Dec 11, 2024
f3e7971
black reformatting
angus-langchain Dec 11, 2024
066b9b6
multithreaded compression
angus-langchain Dec 11, 2024
ec15660
add parallel works for sending multipart req
angus-langchain Dec 12, 2024
35e9843
reformatting
angus-langchain Dec 12, 2024
59b5f27
black reformat
angus-langchain Dec 12, 2024
ce44b25
mypy
angus-langchain Dec 12, 2024
181f839
increase payload size to 20mb
angus-langchain Dec 12, 2024
0120dce
use multipart
angus-langchain Dec 12, 2024
e2ada65
use threadpoolexecutor
angus-langchain Dec 12, 2024
25f4f19
fix thread garbage collection
angus-langchain Dec 12, 2024
5b0cca4
add flush method
angus-langchain Dec 12, 2024
f59f7be
return early
angus-langchain Dec 12, 2024
b76e662
lint
angus-langchain Dec 13, 2024
efa4bd6
wait
angus-langchain Dec 13, 2024
0e06bde
signal bg threads data is available instead of sleeping
angus-langchain Dec 13, 2024
f997895
improve buffer checks
angus-langchain Dec 13, 2024
fbc217f
mypy
angus-langchain Dec 13, 2024
7b6c201
Use more threads for backend requests
angus-langchain Dec 19, 2024
35d46ed
fix futures waiting
angus-langchain Dec 19, 2024
dbef2ec
remove unused slot
angus-langchain Dec 19, 2024
6fad596
Flush background threads
angus-langchain Dec 19, 2024
5cc947a
make boundary constant
angus-langchain Dec 19, 2024
d4b2aa4
Remove slot for bool val
angus-langchain Dec 19, 2024
63e55f7
Use a single join() rather than copying the header strings
angus-langchain Dec 19, 2024
874c748
Add zstandard license
angus-langchain Dec 19, 2024
0b3d6b8
lint
angus-langchain Dec 20, 2024
7739939
Create compressed runs object
angus-langchain Dec 20, 2024
3ec9b6e
Make zstd optional
angus-langchain Dec 20, 2024
f9aac67
Make zstandard level configurable
angus-langchain Dec 20, 2024
2ac7a35
mypy ignore optional imports
angus-langchain Dec 20, 2024
3b291c3
lint
angus-langchain Dec 20, 2024
c64fb92
poetry lock
angus-langchain Dec 20, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions python/langsmith/_internal/_background_thread.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
from __future__ import annotations

import functools
import io
import logging
import sys
import threading
import time
import weakref
import zstandard as zstd
from queue import Empty, Queue
from typing import (
TYPE_CHECKING,
List,
Optional,
Union,
cast,
)
Expand Down Expand Up @@ -88,6 +92,34 @@ def _tracing_thread_drain_queue(
return next_batch


def _tracing_thread_drain_compressed_buffer(
client: Client,
runs_limit: int = 100,
max_buffer_size: int = 50 * 1024 * 1024
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
) -> Optional[bytes]:
with client._buffer_lock:
current_size = client.tracing_queue.tell()

# Check if we should send now
if not (client._run_count >= runs_limit or current_size >= max_buffer_size):
return None

# Write final boundary and close compression stream
client.compressor_writer.write(f'--{client.boundary}--\r\n'.encode())
client.compressor_writer.flush()
client.compressor_writer.close()
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved

client.tracing_queue.seek(0)
data = client.tracing_queue.getvalue()
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved

# Reinitialize for next batch
client.tracing_queue = io.BytesIO()
client.compressor = zstd.ZstdCompressor()
client.compressor_writer = client.compressor.stream_writer(
client.tracing_queue, closefd=False)
client._run_count = 0
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
return data

def _tracing_thread_handle_batch(
client: Client,
tracing_queue: Queue,
Expand Down Expand Up @@ -199,6 +231,19 @@ def keep_thread_active() -> bool:
):
_tracing_thread_handle_batch(client, tracing_queue, next_batch, use_multipart)

def tracing_control_thread_func_compress(client_ref: weakref.ref[Client]) -> None:
client = client_ref()
if client is None:
return

while True:
result = _tracing_thread_drain_compressed_buffer(client)
if result is not None:
time.sleep(0.150) # Simulate call to backend
else:
time.sleep(0.1) # Avoid busy-waiting if no data ready
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved



def _tracing_sub_thread_func(
client_ref: weakref.ref[Client],
Expand Down
35 changes: 35 additions & 0 deletions python/langsmith/_internal/_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import itertools
import logging
import uuid
import io
import zstandard
from typing import Literal, Optional, Union, cast

from langsmith import schemas as ls_schemas
Expand Down Expand Up @@ -271,3 +273,36 @@ def serialized_run_operation_to_multipart_parts_and_context(
acc_parts,
f"trace={op.trace_id},id={op.id}",
)


def compress_multipart_parts_and_context(
hinthornw marked this conversation as resolved.
Show resolved Hide resolved
parts_and_context: MultipartPartsAndContext,
compressor_writer: zstandard.ZstdCompressorWriter,
boundary: str
) -> None:
for part_name, (filename, data, content_type, headers) in parts_and_context.parts:
part_header = f'--{boundary}\r\n'
part_header += f'Content-Disposition: form-data; name="{part_name}"'
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved

if filename:
part_header += f'; filename="{filename}"'

part_header += f'\r\nContent-Type: {content_type}\r\n'

for header_name, header_value in headers.items():
part_header += f'{header_name}: {header_value}\r\n'

part_header += '\r\n'
compressor_writer.write(part_header.encode())

if isinstance(data, (bytes, bytearray)):
with memoryview(data) as view:
chunk_size = 1024 * 1024 # 1MB chunks
for i in range(0, len(view), chunk_size):
chunk = view[i:i + chunk_size]
compressor_writer.write(chunk)
else:
compressor_writer.write(str(data).encode())
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved

# Write part terminator
compressor_writer.write(b'\r\n')
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
49 changes: 42 additions & 7 deletions python/langsmith/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import uuid
import warnings
import weakref
import zstandard
from inspect import signature
from queue import PriorityQueue
from typing import (
Expand Down Expand Up @@ -75,6 +76,7 @@
)
from langsmith._internal._background_thread import (
tracing_control_thread_func as _tracing_control_thread_func,
tracing_control_thread_func_compress as _tracing_control_thread_func_compress
)
from langsmith._internal._beta_decorator import warn_beta
from langsmith._internal._constants import (
Expand All @@ -94,6 +96,7 @@
serialize_run_dict,
serialized_feedback_operation_to_multipart_parts_and_context,
serialized_run_operation_to_multipart_parts_and_context,
compress_multipart_parts_and_context,
)
from langsmith._internal._serde import dumps_json as _dumps_json

Expand Down Expand Up @@ -489,6 +492,16 @@ def __init__(
# Create a session and register a finalizer to close it
session_ = session if session else requests.Session()
self.session = session_
self.compress_traces = os.getenv("LANGSMITH_COMPRESS_TRACES") == "true"
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
if self.compress_traces:
self.boundary = BOUNDARY
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
self.compressor = zstandard.ZstdCompressor()
self.compressor_writer = self.compressor.stream_writer(
self.tracing_queue, closefd=False)
self.tracing_queue = io.BytesIO()
self._buffer_lock = threading.Lock()
self._run_count = 0

self._info = (
info
if info is None or isinstance(info, ls_schemas.LangSmithInfo)
Expand All @@ -497,7 +510,14 @@ def __init__(
weakref.finalize(self, close_session, self.session)
atexit.register(close_session, session_)
# Initialize auto batching
if auto_batch_tracing:
if auto_batch_tracing and self.compress_traces:
threading.Thread(
target=_tracing_control_thread_func_compress,
# arg must be a weakref to self to avoid the Thread object
# preventing garbage collection of the Client object
args=(weakref.ref(self),),
).start()
elif auto_batch_tracing:
self.tracing_queue: Optional[PriorityQueue] = PriorityQueue()

threading.Thread(
Expand Down Expand Up @@ -1291,9 +1311,17 @@ def create_run(
self._pyo3_client.create_run(run_create)
elif self.tracing_queue is not None:
serialized_op = serialize_run_dict("post", run_create)
self.tracing_queue.put(
TracingQueueItem(run_create["dotted_order"], serialized_op)
)
if self.compress_traces:
multipart_form = self.serialized_run_operation_to_multipart_parts_and_context(
serialized_op)
with self._buffer_lock:
compress_multipart_parts_and_context(
multipart_form, self.compressor_writer, self.boundary)
self._run_count += 1
else:
self.tracing_queue.put(
TracingQueueItem(run_create["dotted_order"], serialized_op)
)
else:
# Neither Rust nor Python batch ingestion is configured,
# fall back to the non-batch approach.
Expand Down Expand Up @@ -1755,9 +1783,16 @@ def update_run(
if use_multipart and self.tracing_queue is not None:
# not collecting attachments currently, use empty dict
serialized_op = serialize_run_dict(operation="patch", payload=data)
self.tracing_queue.put(
TracingQueueItem(data["dotted_order"], serialized_op)
)
if self.compress_traces:
multipart_form = serialized_run_operation_to_multipart_parts_and_context(serialized_op)
with self._buffer_lock:
compress_multipart_parts_and_context(
multipart_form, self.compressor_writer, self.boundary)
self._run_count += 1
angus-langchain marked this conversation as resolved.
Show resolved Hide resolved
else:
self.tracing_queue.put(
TracingQueueItem(data["dotted_order"], serialized_op)
)
else:
self._update_run(data)

Expand Down
Loading