From 68c60de49ac38a4d757c3606ff1ee85177db3a58 Mon Sep 17 00:00:00 2001 From: Francesco Coppola Date: Mon, 6 Mar 2023 15:06:32 +0100 Subject: [PATCH] feat: models for chat api --- example/lib/main.dart | 13 +++ lib/src/chat.dart | 85 ++++++++++++++++++++ lib/src/client.dart | 4 + lib/src/model/openai_chat/chat.dart | 89 +++++++++++++++++++++ lib/src/model/openai_chat/chat_choice.dart | 71 ++++++++++++++++ lib/src/model/openai_chat/chat_message.dart | 61 ++++++++++++++ lib/src/model/openai_chat/openai_chat.dart | 3 + lib/src/network/endpoints.dart | 3 + 8 files changed, 329 insertions(+) create mode 100644 lib/src/chat.dart create mode 100644 lib/src/model/openai_chat/chat.dart create mode 100644 lib/src/model/openai_chat/chat_choice.dart create mode 100644 lib/src/model/openai_chat/chat_message.dart create mode 100644 lib/src/model/openai_chat/openai_chat.dart diff --git a/example/lib/main.dart b/example/lib/main.dart index d894b9b..d20c816 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -3,6 +3,7 @@ import 'dart:developer'; import 'dart:io'; import 'package:openai_client/openai_client.dart'; +import 'package:openai_client/src/model/openai_chat/chat_message.dart'; Future main() async { // Load app credentials from environment variables or file. @@ -34,6 +35,18 @@ Future main() async { // Print the completion. log(completion.toString()); + // Create a chat. + final chat = await client.chat + .create( + model: 'gpt-3.5-turbo', + message: const ChatMessage( + role: 'user', + content: 'How do you think is Batman dealing with Robin recently?', + )) + .data; + // Print the chat. + log(chat.toString()); + // Create an edit. final edit = await client.edits .create( diff --git a/lib/src/chat.dart b/lib/src/chat.dart new file mode 100644 index 0000000..ba86099 --- /dev/null +++ b/lib/src/chat.dart @@ -0,0 +1,85 @@ +import 'package:http/http.dart' as http; +import 'package:openai_client/src/client.dart'; +import 'package:openai_client/src/logger/logger.dart'; +import 'package:openai_client/src/model/openai_chat/openai_chat.dart'; +import 'package:openai_client/src/network/network.dart'; + +/// Given a chat conversation, the model will return a chat completion response. +/// +/// For more detail see the [OpenAI API documentation](https://beta.openai.com/docs/api-reference/chat). +class OpenAIChat { + /// Creates a new instance which belongs to [client]. + OpenAIChat(this.client) : baseUrl = client.baseUrl.resolve(apiChat); + + /// The parent [OpenAIClient]. + final OpenAIClient client; + + /// The base url for all endpoints for [Chat]. + /// + /// See more at [Chat](https://beta.openai.com/docs/api-reference/chat). + final Uri baseUrl; + + /// Creates a completion for the chat message. + /// + /// A deeper explanation of the parameters can be + /// found in the [OpenAI API documentation](https://beta.openai.com/docs/api-reference/chat/create). + Request create({ + required String model, + required ChatMessage message, + double? temperature = 1.0, + double? topP = 1.0, + int? n = 1, + bool stream = false, + String? stop, + int? maxTokens, + int? presencePenalty = 0, + int? frequencyPenalty = 0, + Map? logitBias, + String? user, + }) { + Logger( + title: 'Chat', + description: 'Fetching is started...', + level: Level.debug, + isActive: client.enableLogging, + ); + + if (temperature != null) { + assert( + temperature > 0 && temperature <= 2, + 'Temperature must be between 0 and 2', + ); + } + + final jsonBody = { + 'model': model, + 'message': message.toJson(), + 'temperature': temperature, + 'top_p': topP, + 'n': n, + 'stream': stream, + if (stop != null) 'stop': stop, + if (maxTokens != null) 'max_tokens': maxTokens, + 'presence_penalty': presencePenalty, + 'frequency_penalty': frequencyPenalty, + if (logitBias != null) 'logit_bias': logitBias, + if (user != null) 'user': user, + }; + + final req = Request( + client: client, + httpRequest: http.Request('POST', baseUrl), + bodyDeserializer: (body) => Chat.fromMap(body as Map), + jsonBody: jsonBody, + ); + + Logger( + title: 'Chat', + description: 'Returning the request...', + level: Level.info, + isActive: client.enableLogging, + ); + + return req; + } +} diff --git a/lib/src/client.dart b/lib/src/client.dart index b395da9..bbbd496 100644 --- a/lib/src/client.dart +++ b/lib/src/client.dart @@ -1,4 +1,5 @@ import 'package:http/http.dart' as http; +import 'package:openai_client/src/chat.dart'; import 'package:openai_client/src/completions.dart'; import 'package:openai_client/src/configuration.dart'; import 'package:openai_client/src/edits.dart'; @@ -54,6 +55,9 @@ class OpenAIClient { /// Provides access to resources related to [Completions]. OpenAICompletions get completions => OpenAICompletions(this); + /// Provides access to resources related to [Completions]. + OpenAIChat get chat => OpenAIChat(this); + /// Provides access to resources related to [Edits]. OpenAIEdits get edits => OpenAIEdits(this); diff --git a/lib/src/model/openai_chat/chat.dart b/lib/src/model/openai_chat/chat.dart new file mode 100644 index 0000000..18a2ce3 --- /dev/null +++ b/lib/src/model/openai_chat/chat.dart @@ -0,0 +1,89 @@ +import 'dart:convert'; + +import 'package:equatable/equatable.dart'; +import 'package:openai_client/src/model/openai_chat/chat_choice.dart'; +import 'package:openai_client/src/model/shared_models/shared_models.dart'; + +/// The chat class. +class Chat extends Equatable { + /// Instance of [Chat]. + const Chat({ + required this.id, + required this.object, + required this.created, + required this.choices, + required this.usage, + }); + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [OpenAiChat]. + factory Chat.fromJson(String data) { + return Chat.fromMap(json.decode(data) as Map); + } + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [Chat]. + factory Chat.fromMap(Map data) => Chat( + id: data['id'] as String, + object: data['object'] as String, + created: data['created'] as int, + choices: (data['choices'] as List) + .map((e) => ChatChoice.fromMap(e as Map)) + .toList(), + usage: Usage.fromMap(data['usage'] as Map), + ); + + /// The chat id. + final String id; + + /// The chat object. + final String object; + + /// The chat created. + final int created; + + /// The chat choices. + final List choices; + + /// The chat usage. + final Usage usage; + + /// Mappping from [Map] to [Chat]. + Map toMap() => { + 'id': id, + 'object': object, + 'created': created, + 'choices': choices.map((e) => e.toMap()).toList(), + 'usage': usage.toMap(), + }; + + /// `dart:convert` + /// + /// Converts [OpenAiChat] to a JSON string. + String toJson() => json.encode(toMap()); + + /// Copy with extension. + Chat copyWith({ + String? id, + String? object, + int? created, + List? choices, + Usage? usage, + }) { + return Chat( + id: id ?? this.id, + object: object ?? this.object, + created: created ?? this.created, + choices: choices ?? this.choices, + usage: usage ?? this.usage, + ); + } + + @override + bool get stringify => true; + + @override + List get props => [id, object, created, choices, usage]; +} diff --git a/lib/src/model/openai_chat/chat_choice.dart b/lib/src/model/openai_chat/chat_choice.dart new file mode 100644 index 0000000..982128a --- /dev/null +++ b/lib/src/model/openai_chat/chat_choice.dart @@ -0,0 +1,71 @@ +import 'dart:convert'; + +import 'package:equatable/equatable.dart'; + +import 'package:openai_client/src/model/openai_chat/chat_message.dart'; + +/// The chat choice class. +class ChatChoice extends Equatable { + /// Instance of [ChatChoice]. + const ChatChoice({ + required this.index, + required this.message, + required this.finishReason, + }); + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [Choice]. + factory ChatChoice.fromJson(String data) { + return ChatChoice.fromMap(json.decode(data) as Map); + } + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [ChatChoice]. + factory ChatChoice.fromMap(Map data) => ChatChoice( + index: data['index'] as int, + message: ChatMessage.fromMap(data['message'] as Map), + finishReason: data['finish_reason'] as String, + ); + + /// The index of the choice. + final int index; + + /// The message. + final ChatMessage message; + + /// The finish reason. + final String finishReason; + + /// Mappping from [Map] to [ChatChoice]. + Map toMap() => { + 'index': index, + 'message': message.toMap(), + 'finish_reason': finishReason, + }; + + /// `dart:convert` + /// + /// Converts [Choice] to a JSON string. + String toJson() => json.encode(toMap()); + + /// Copy with extension. + ChatChoice copyWith({ + int? index, + ChatMessage? message, + String? finishReason, + }) { + return ChatChoice( + index: index ?? this.index, + message: message ?? this.message, + finishReason: finishReason ?? this.finishReason, + ); + } + + @override + bool get stringify => true; + + @override + List get props => [index, message, finishReason]; +} diff --git a/lib/src/model/openai_chat/chat_message.dart b/lib/src/model/openai_chat/chat_message.dart new file mode 100644 index 0000000..103856c --- /dev/null +++ b/lib/src/model/openai_chat/chat_message.dart @@ -0,0 +1,61 @@ +import 'dart:convert'; + +import 'package:equatable/equatable.dart'; + +/// The message class. +class ChatMessage extends Equatable { + /// Instance of [Message]. + const ChatMessage({ + required this.role, + required this.content, + }); + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [Message]. + factory ChatMessage.fromJson(String data) { + return ChatMessage.fromMap(json.decode(data) as Map); + } + + /// `dart:convert` + /// + /// Parses the string and returns the resulting Json object as [Message]. + factory ChatMessage.fromMap(Map data) => ChatMessage( + role: data['role'] as String, + content: data['content'] as String, + ); + + /// The message role. + final String role; + + /// The message content. + final String content; + + /// Mappping from [Map] to [Message]. + Map toMap() => { + 'role': role, + 'content': content, + }; + + /// `dart:convert` + /// + /// Converts [Message] to a JSON string. + String toJson() => json.encode(toMap()); + + /// Copy with extension. + ChatMessage copyWith({ + String? role, + String? content, + }) { + return ChatMessage( + role: role ?? this.role, + content: content ?? this.content, + ); + } + + @override + bool get stringify => true; + + @override + List get props => [role, content]; +} diff --git a/lib/src/model/openai_chat/openai_chat.dart b/lib/src/model/openai_chat/openai_chat.dart new file mode 100644 index 0000000..355c349 --- /dev/null +++ b/lib/src/model/openai_chat/openai_chat.dart @@ -0,0 +1,3 @@ +export 'chat.dart'; +export 'chat_choice.dart'; +export 'chat_message.dart'; diff --git a/lib/src/network/endpoints.dart b/lib/src/network/endpoints.dart index 82f74b2..0d738c5 100644 --- a/lib/src/network/endpoints.dart +++ b/lib/src/network/endpoints.dart @@ -10,6 +10,9 @@ const apiModels = 'models'; /// The base url for all endpoints for [Completions]. const apiCompletions = 'completions'; +/// The base url for all endpoints for [Chat]. +const apiChat = 'chat/completions'; + /// The base url for all endpoints for [Edits]. const apiEdits = 'edits';