Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ Illustrates:

- An injected demo showing how any Spring component can be injected with an Embabel `Ai` instance to enable it to
perform LLM operations.
- A configuration bean demonstrating how to integrate OpenAI-compatible models (Gemini) with conditional loading based on environment variables.
- A simple agent
- Unit tests for an agent verifying prompts and hyperparameters

Expand Down Expand Up @@ -55,6 +56,8 @@ Try the `InjectedDemo` command to see simple, non-agent use:
animal
```

The `GeminiOpenAIModels` configuration demonstrates how to integrate Google's Gemini models using OpenAI's compatible API interface. This configuration registers two Gemini models (`gemini-2.0-flash` and `gemini-2.5-flash`) as LLM beans that can be used throughout your Embabel agents. The configuration is conditionally loaded only when the `GEMINI_API_KEY` environment variable is present, showcasing how to implement optional model providers that don't interfere with your application when credentials aren't available.

## A2A Support

Embabel integrates with the [A2A](https://github.com/google-a2a/A2A) protocol, allowing you to connect to other
Expand Down
87 changes: 87 additions & 0 deletions src/main/java/com/embabel/template/models/GeminiOpenAIModels.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
package com.embabel.template.models;

import com.embabel.agent.config.models.OpenAiChatOptionsConverter;
import com.embabel.common.ai.model.Llm;
import com.embabel.common.ai.prompt.CurrentDate;
import io.micrometer.observation.ObservationRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import org.springframework.ai.retry.RetryUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.time.LocalDate;
import java.util.List;

@Configuration
@ConditionalOnProperty("GEMINI_API_KEY")
public class GeminiOpenAIModels {
static final Logger logger = LoggerFactory.getLogger(GeminiOpenAIModels.class);

private static final String GEMINI_2_5_FLASH = "gemini-2.5-flash";
private static final String GEMINI_2_0_FLASH = "gemini-2.0-flash";
private static final String GEMINI_PROVIDER = "Google";

@Value("${GEMINI_API_KEY}")
private String geminiApiKey;
@Value("${GEMINI_BASE_URL:https://generativelanguage.googleapis.com/v1beta/openai}")
private String baseUrl;
@Value("${GEMINI_CHAT_COMPLETIONS:/chat/completions}")
private String completionsPath;
@Value("${GEMINI_EMBEDDINGS:/embeddings}")
private String embeddingsPath;
@Autowired
private ObservationRegistry observationRegistry;

private OpenAiApi openAiApi() {
return OpenAiApi.builder()
.baseUrl(baseUrl)
.apiKey(geminiApiKey)
.completionsPath(completionsPath)
.embeddingsPath(embeddingsPath)
.build();
}

private OpenAiChatModel chatModelOf(String model) {
return OpenAiChatModel.builder()
.openAiApi(openAiApi())
.retryTemplate(RetryUtils.DEFAULT_RETRY_TEMPLATE)
.defaultOptions(
OpenAiChatOptions.
builder()
.model(model)
.build())
.observationRegistry(observationRegistry)
.build();
}

@Bean
public Llm gemini_2_0_flash() {
logger.info("Registering Gemini Open AI compatible model: {}", GEMINI_2_0_FLASH);
return new Llm(GEMINI_2_0_FLASH,
GEMINI_PROVIDER,
chatModelOf(GEMINI_2_0_FLASH),
OpenAiChatOptionsConverter.INSTANCE,
LocalDate.now(),
List.of(new CurrentDate()),
null);
}

@Bean
public Llm gemini_2_5_flash() {
logger.info("Registering Gemini Open AI compatible model: {}", GEMINI_2_5_FLASH);
return new Llm(GEMINI_2_5_FLASH,
GEMINI_PROVIDER,
chatModelOf(GEMINI_2_5_FLASH),
OpenAiChatOptionsConverter.INSTANCE,
LocalDate.now(),
List.of(new CurrentDate()),
null);
}
}