Skip to content

Commit

Permalink
Merge pull request #26 from zambrinf/vertexai-gemini-spring-boot-starter
Browse files Browse the repository at this point in the history
[FEATURE] Spring Boot starter for Google Vertex AI Gemini
  • Loading branch information
langchain4j authored Jun 27, 2024
2 parents a3d5a36 + bd9548a commit 31f2dc4
Show file tree
Hide file tree
Showing 7 changed files with 248 additions and 0 deletions.
67 changes: 67 additions & 0 deletions langchain4j-vertex-ai-gemini-spring-boot-starter/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-spring</artifactId>
<version>0.32.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>langchain4j-vertex-ai-gemini-spring-boot-starter</artifactId>
<name>LangChain4j Spring Boot starter for Vertex AI Gemini</name>

<dependencies>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-vertex-ai-gemini</artifactId>
<version>${project.version}</version>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure-processor</artifactId>
<optional>true</optional>
</dependency>

<!-- should be listed before spring-boot-configuration-processor -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>

<!-- needed to generate automatic metadata about available config properties -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>

</dependencies>

<licenses>
<license>
<name>Apache-2.0</name>
<url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
<comments>A business-friendly OSS license</comments>
</license>
</licenses>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package dev.langchain4j.vertexai.spring;

import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiStreamingChatModel;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;

import static dev.langchain4j.vertexai.spring.Properties.PREFIX;

@AutoConfiguration
@EnableConfigurationProperties(Properties.class)
public class AutoConfig {

@Bean
@ConditionalOnProperty(name = PREFIX + ".chat-model.enabled", havingValue = "true")
VertexAiGeminiChatModel vertexAiGeminiChatModel(Properties properties) {
ChatModelProperties chatModelProperties = properties.getChatModel();
return VertexAiGeminiChatModel.builder()
.project(chatModelProperties.getProject())
.location(chatModelProperties.getLocation())
.modelName(chatModelProperties.getModelName())
.temperature(chatModelProperties.getTemperature())
.maxOutputTokens(chatModelProperties.getMaxOutputTokens())
.topK(chatModelProperties.getTopK())
.topP(chatModelProperties.getTopP())
.maxRetries(chatModelProperties.getMaxRetries())
.build();
}

@Bean
@ConditionalOnProperty(name = PREFIX + ".streaming-chat-model.enabled", havingValue = "true")
VertexAiGeminiStreamingChatModel vertexAiGeminiStreamingChatModel(Properties properties) {
ChatModelProperties streamingChatProperties = properties.getStreamingChatModel();
return VertexAiGeminiStreamingChatModel.builder()
.project(streamingChatProperties.getProject())
.location(streamingChatProperties.getLocation())
.modelName(streamingChatProperties.getModelName())
.temperature(streamingChatProperties.getTemperature())
.maxOutputTokens(streamingChatProperties.getMaxOutputTokens())
.topK(streamingChatProperties.getTopK())
.topP(streamingChatProperties.getTopP())
.build();
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package dev.langchain4j.vertexai.spring;

import lombok.Getter;
import lombok.Setter;

@Getter
@Setter
public class ChatModelProperties {

private String project;
private String location;
private String modelName;
private Float temperature;
private Integer maxOutputTokens;
private Integer topK;
private Float topP;
private Integer maxRetries;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package dev.langchain4j.vertexai.spring;

import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;

@Getter
@Setter
@ConfigurationProperties(prefix = Properties.PREFIX)
public class Properties {

static final String PREFIX = "langchain4j.vertex-ai-gemini";

@NestedConfigurationProperty
ChatModelProperties chatModel;

@NestedConfigurationProperty
ChatModelProperties streamingChatModel;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
dev.langchain4j.vertexai.spring.AutoConfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package dev.langchain4j.vertexai.spring;

import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiStreamingChatModel;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;

import java.util.concurrent.CompletableFuture;

import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;

@EnabledIfEnvironmentVariable(named = "GCP_PROJECT_ID", matches = ".+")
@EnabledIfEnvironmentVariable(named = "GCP_LOCATION", matches = ".+")
class AutoConfigIT {

private static final String PROJECT_ID = System.getenv("GCP_PROJECT_ID");
private static final String LOCATION = System.getenv("GCP_LOCATION");
private static final String MODEL = "gemini-1.5-flash";

ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(AutoConfig.class));

@Test
void should_provide_chat_model() {
// given
contextRunner
.withPropertyValues(
"langchain4j.vertex-ai-gemini.chat-model.enabled=true",
"langchain4j.vertex-ai-gemini.chat-model.project=" + PROJECT_ID,
"langchain4j.vertex-ai-gemini.chat-model.modelName=" + MODEL,
"langchain4j.vertex-ai-gemini.chat-model.location=" + LOCATION
)
.run(context -> {
ChatLanguageModel chatLanguageModel = context.getBean(ChatLanguageModel.class);
assertThat(chatLanguageModel).isInstanceOf(VertexAiGeminiChatModel.class);

// when
String message = chatLanguageModel.generate("What is the capital of Germany?");

// then
assertThat(message).contains("Berlin");
assertThat(context.getBean(VertexAiGeminiChatModel.class)).isSameAs(chatLanguageModel);
});
}

@Test
void should_provide_streaming_chat_model() {
// given
contextRunner
.withPropertyValues(
"langchain4j.vertex-ai-gemini.streaming-chat-model.enabled=true",
"langchain4j.vertex-ai-gemini.streaming-chat-model.project=" + PROJECT_ID,
"langchain4j.vertex-ai-gemini.streaming-chat-model.modelName=" + MODEL,
"langchain4j.vertex-ai-gemini.streaming-chat-model.location=" + LOCATION
)
.run(context -> {

StreamingChatLanguageModel streamingChatLanguageModel = context.getBean(StreamingChatLanguageModel.class);
assertThat(streamingChatLanguageModel).isInstanceOf(VertexAiGeminiStreamingChatModel.class);
CompletableFuture<Response<AiMessage>> future = new CompletableFuture<>();
// when
streamingChatLanguageModel.generate("What is the capital of Germany?", new StreamingResponseHandler<>() {

@Override
public void onNext(String token) {
}

@Override
public void onComplete(Response<AiMessage> response) {
future.complete(response);
}

@Override
public void onError(Throwable error) {
}
});
Response<AiMessage> response = future.get(60, SECONDS);

// then
assertThat(response.content().text()).contains("Berlin");
assertThat(context.getBean(VertexAiGeminiStreamingChatModel.class)).isSameAs(streamingChatLanguageModel);
});
}
}
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
<module>langchain4j-open-ai-spring-boot-starter</module>
<module>langchain4j-azure-ai-search-spring-boot-starter</module>
<module>langchain4j-azure-open-ai-spring-boot-starter</module>
<module>langchain4j-vertex-ai-gemini-spring-boot-starter</module>
</modules>

<properties>
Expand Down

0 comments on commit 31f2dc4

Please sign in to comment.