From 33b2bcb5ab763ea51e90dd2a789c12d729b736eb Mon Sep 17 00:00:00 2001 From: DarkSky Date: Wed, 9 Oct 2024 17:12:06 +0800 Subject: [PATCH] feat: unified event name --- .../server/src/fundamentals/metrics/utils.ts | 13 +++++-- .../server/src/plugins/copilot/controller.ts | 34 +++++++++++-------- .../src/plugins/copilot/providers/fal.ts | 16 ++++----- .../src/plugins/copilot/providers/openai.ts | 20 +++++------ 4 files changed, 47 insertions(+), 36 deletions(-) diff --git a/packages/backend/server/src/fundamentals/metrics/utils.ts b/packages/backend/server/src/fundamentals/metrics/utils.ts index 3e3febb6fae7f..2883e227d6f41 100644 --- a/packages/backend/server/src/fundamentals/metrics/utils.ts +++ b/packages/backend/server/src/fundamentals/metrics/utils.ts @@ -12,6 +12,7 @@ import { type KnownMetricScopes, metrics } from './metrics'; export const CallMetric = ( scope: KnownMetricScopes, name: string, + record?: { timer?: boolean; count?: boolean; error?: boolean }, attrs?: Attributes ): MethodDecorator => { // @ts-expect-error allow @@ -43,13 +44,19 @@ export const CallMetric = ( }; try { - count.add(1, attrs); + if (!record || !!record.count) { + count.add(1, attrs); + } return await originalMethod.apply(this, args); } catch (err) { - errorCount.add(1, attrs); + if (!record || !!record.error) { + errorCount.add(1, attrs); + } throw err; } finally { - end(); + if (!record || !!record.timer) { + end(); + } } }; diff --git a/packages/backend/server/src/plugins/copilot/controller.ts b/packages/backend/server/src/plugins/copilot/controller.ts index 25ed980d18661..827c4d68573a9 100644 --- a/packages/backend/server/src/plugins/copilot/controller.ts +++ b/packages/backend/server/src/plugins/copilot/controller.ts @@ -179,8 +179,8 @@ export class CopilotController { return merge(source$.pipe(finalize(() => subject$.next(null))), ping$); } - @CallMetric('ai', 'chat') @Get('/chat/:sessionId') + @CallMetric('ai', 'chat', { timer: true }) async chat( @CurrentUser() user: CurrentUser, @Req() req: Request, @@ -197,7 +197,7 @@ export class CopilotController { const session = await this.appendSessionMessage(sessionId, messageId); try { - metrics.ai.counter('chat').add(1, { model: session.model }); + metrics.ai.counter('chat_calls').add(1, { model: session.model }); const content = await provider.generateText( session.finish(params), session.model, @@ -217,13 +217,13 @@ export class CopilotController { return content; } catch (e: any) { - metrics.ai.counter('chat_error').add(1, { model: session.model }); + metrics.ai.counter('chat_errors').add(1, { model: session.model }); throw new CopilotFailedToGenerateText(e.message); } } @Sse('/chat/:sessionId/stream') - @CallMetric('ai', 'chat_stream') + @CallMetric('ai', 'chat_stream', { timer: true }) async chatStream( @CurrentUser() user: CurrentUser, @Req() req: Request, @@ -240,7 +240,7 @@ export class CopilotController { const session = await this.appendSessionMessage(sessionId, messageId); try { - metrics.ai.counter('chat_stream').add(1, { model: session.model }); + metrics.ai.counter('chat_stream_calls').add(1, { model: session.model }); const source$ = from( provider.generateTextStream(session.finish(params), session.model, { ...session.config.promptConfig, @@ -271,7 +271,7 @@ export class CopilotController { ), catchError(e => { metrics.ai - .counter('chat_stream_error') + .counter('chat_stream_errors') .add(1, { model: session.model }); return mapSseError(e); }) @@ -279,13 +279,13 @@ export class CopilotController { return this.mergePingStream(messageId, source$); } catch (err) { - metrics.ai.counter('chat_stream_error').add(1, { model: session.model }); + metrics.ai.counter('chat_stream_errors').add(1, { model: session.model }); return mapSseError(err); } } @Sse('/chat/:sessionId/workflow') - @CallMetric('ai', 'chat_workflow') + @CallMetric('ai', 'chat_workflow', { timer: true }) async chatWorkflow( @CurrentUser() user: CurrentUser, @Req() req: Request, @@ -296,7 +296,7 @@ export class CopilotController { const session = await this.appendSessionMessage(sessionId, messageId); try { - metrics.ai.counter('workflow').add(1, { model: session.model }); + metrics.ai.counter('workflow_calls').add(1, { model: session.model }); const latestMessage = session.stashMessages.findLast( m => m.role === 'user' ); @@ -364,20 +364,22 @@ export class CopilotController { ) ), catchError(e => { - metrics.ai.counter('workflow_error').add(1, { model: session.model }); + metrics.ai + .counter('workflow_errors') + .add(1, { model: session.model }); return mapSseError(e); }) ); return this.mergePingStream(messageId, source$); } catch (err) { - metrics.ai.counter('workflow_error').add(1, { model: session.model }); + metrics.ai.counter('workflow_errors').add(1, { model: session.model }); return mapSseError(err); } } @Sse('/chat/:sessionId/images') - @CallMetric('ai', 'chat_images') + @CallMetric('ai', 'chat_images', { timer: true }) async chatImagesStream( @CurrentUser() user: CurrentUser, @Req() req: Request, @@ -403,7 +405,9 @@ export class CopilotController { const session = await this.appendSessionMessage(sessionId, messageId); try { - metrics.ai.counter('images_stream').add(1, { model: session.model }); + metrics.ai + .counter('images_stream_calls') + .add(1, { model: session.model }); const handleRemoteLink = this.storage.handleRemoteLink.bind( this.storage, user.id, @@ -447,7 +451,7 @@ export class CopilotController { ), catchError(e => { metrics.ai - .counter('images_stream_error') + .counter('images_stream_errors') .add(1, { model: session.model }); return mapSseError(e); }) @@ -456,7 +460,7 @@ export class CopilotController { return this.mergePingStream(messageId, source$); } catch (err) { metrics.ai - .counter('images_stream_error') + .counter('images_stream_errors') .add(1, { model: session.model }); return mapSseError(err); } diff --git a/packages/backend/server/src/plugins/copilot/providers/fal.ts b/packages/backend/server/src/plugins/copilot/providers/fal.ts index d6bfed57be085..5fd02ba2a1c5a 100644 --- a/packages/backend/server/src/plugins/copilot/providers/fal.ts +++ b/packages/backend/server/src/plugins/copilot/providers/fal.ts @@ -218,7 +218,7 @@ export class FalProvider // by default, image prompt assumes there is only one message const prompt = this.extractPrompt(messages.pop()); try { - metrics.ai.counter('chat_text').add(1, { model }); + metrics.ai.counter('chat_text_calls').add(1, { model }); const response = await fetch(`https://fal.run/fal-ai/${model}`, { method: 'POST', headers: { @@ -239,7 +239,7 @@ export class FalProvider } return data.output; } catch (e: any) { - metrics.ai.counter('chat_text_failed').add(1, { model }); + metrics.ai.counter('chat_text_errors').add(1, { model }); throw this.handleError(e); } } @@ -250,7 +250,7 @@ export class FalProvider options: CopilotChatOptions = {} ): AsyncIterable { try { - metrics.ai.counter('chat_text_stream').add(1, { model }); + metrics.ai.counter('chat_text_stream_calls').add(1, { model }); const result = await this.generateText(messages, model, options); for await (const content of result) { @@ -262,7 +262,7 @@ export class FalProvider } } } catch (e) { - metrics.ai.counter('chat_text_stream_failed').add(1, { model }); + metrics.ai.counter('chat_text_stream_errors').add(1, { model }); throw e; } } @@ -308,7 +308,7 @@ export class FalProvider } try { - metrics.ai.counter('generate_images').add(1, { model }); + metrics.ai.counter('generate_images_calls').add(1, { model }); const data = await this.buildResponse(messages, model, options); @@ -326,7 +326,7 @@ export class FalProvider .map(image => image.url) || [] ); } catch (e: any) { - metrics.ai.counter('generate_images_failed').add(1, { model }); + metrics.ai.counter('generate_images_errors').add(1, { model }); throw this.handleError(e); } } @@ -337,13 +337,13 @@ export class FalProvider options: CopilotImageOptions = {} ): AsyncIterable { try { - metrics.ai.counter('generate_images_stream').add(1, { model }); + metrics.ai.counter('generate_images_stream_calls').add(1, { model }); const ret = await this.generateImages(messages, model, options); for (const url of ret) { yield url; } } catch (e) { - metrics.ai.counter('generate_images_stream_failed').add(1, { model }); + metrics.ai.counter('generate_images_stream_errors').add(1, { model }); throw e; } } diff --git a/packages/backend/server/src/plugins/copilot/providers/openai.ts b/packages/backend/server/src/plugins/copilot/providers/openai.ts index e255ca29e5726..40ded39c0c8d8 100644 --- a/packages/backend/server/src/plugins/copilot/providers/openai.ts +++ b/packages/backend/server/src/plugins/copilot/providers/openai.ts @@ -207,7 +207,7 @@ export class OpenAIProvider this.checkParams({ messages, model, options }); try { - metrics.ai.counter('chat_text').add(1, { model }); + metrics.ai.counter('chat_text_calls').add(1, { model }); const result = await this.instance.chat.completions.create( { messages: this.chatToGPTMessage(messages), @@ -225,7 +225,7 @@ export class OpenAIProvider if (!content) throw new Error('Failed to generate text'); return content.trim(); } catch (e: any) { - metrics.ai.counter('chat_text_failed').add(1, { model }); + metrics.ai.counter('chat_text_errors').add(1, { model }); throw this.handleError(e); } } @@ -238,7 +238,7 @@ export class OpenAIProvider this.checkParams({ messages, model, options }); try { - metrics.ai.counter('chat_text_stream').add(1, { model }); + metrics.ai.counter('chat_text_stream_calls').add(1, { model }); const result = await this.instance.chat.completions.create( { stream: true, @@ -272,7 +272,7 @@ export class OpenAIProvider } } } catch (e: any) { - metrics.ai.counter('chat_text_stream_failed').add(1, { model }); + metrics.ai.counter('chat_text_stream_errors').add(1, { model }); throw this.handleError(e); } } @@ -288,7 +288,7 @@ export class OpenAIProvider this.checkParams({ embeddings: messages, model, options }); try { - metrics.ai.counter('generate_embedding').add(1, { model }); + metrics.ai.counter('generate_embedding_calls').add(1, { model }); const result = await this.instance.embeddings.create({ model: model, input: messages, @@ -299,7 +299,7 @@ export class OpenAIProvider .map(e => e?.embedding) .filter(v => v && Array.isArray(v)); } catch (e: any) { - metrics.ai.counter('generate_embedding_failed').add(1, { model }); + metrics.ai.counter('generate_embedding_errors').add(1, { model }); throw this.handleError(e); } } @@ -314,7 +314,7 @@ export class OpenAIProvider if (!prompt) throw new CopilotPromptInvalid('Prompt is required'); try { - metrics.ai.counter('generate_images').add(1, { model }); + metrics.ai.counter('generate_images_calls').add(1, { model }); const result = await this.instance.images.generate( { prompt, @@ -329,7 +329,7 @@ export class OpenAIProvider .map(image => image.url) .filter((v): v is string => !!v); } catch (e: any) { - metrics.ai.counter('generate_images_failed').add(1, { model }); + metrics.ai.counter('generate_images_errors').add(1, { model }); throw this.handleError(e); } } @@ -340,13 +340,13 @@ export class OpenAIProvider options: CopilotImageOptions = {} ): AsyncIterable { try { - metrics.ai.counter('generate_images_stream').add(1, { model }); + metrics.ai.counter('generate_images_stream_calls').add(1, { model }); const ret = await this.generateImages(messages, model, options); for (const url of ret) { yield url; } } catch (e) { - metrics.ai.counter('generate_images_stream_failed').add(1, { model }); + metrics.ai.counter('generate_images_stream_errors').add(1, { model }); throw e; } }