public class OpenAiStreamingChatModel extends Object implements dev.langchain4j.model.chat.StreamingChatLanguageModel, dev.langchain4j.model.chat.TokenCountEstimator
StreamingResponseHandler
.Constructor and Description |
---|
OpenAiStreamingChatModel(String baseUrl,
String apiKey,
String modelName,
Double temperature,
Double topP,
List<String> stop,
Integer maxTokens,
Double presencePenalty,
Double frequencyPenalty,
Duration timeout,
Proxy proxy,
Boolean logRequests,
Boolean logResponses) |
Modifier and Type | Method and Description |
---|---|
int |
estimateTokenCount(List<dev.langchain4j.data.message.ChatMessage> messages) |
void |
generate(List<dev.langchain4j.data.message.ChatMessage> messages,
List<dev.langchain4j.agent.tool.ToolSpecification> toolSpecifications,
dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler) |
void |
generate(List<dev.langchain4j.data.message.ChatMessage> messages,
dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler) |
void |
generate(List<dev.langchain4j.data.message.ChatMessage> messages,
dev.langchain4j.agent.tool.ToolSpecification toolSpecification,
dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler) |
static OpenAiStreamingChatModel |
withApiKey(String apiKey) |
public void generate(List<dev.langchain4j.data.message.ChatMessage> messages, dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler)
generate
in interface dev.langchain4j.model.chat.StreamingChatLanguageModel
public void generate(List<dev.langchain4j.data.message.ChatMessage> messages, List<dev.langchain4j.agent.tool.ToolSpecification> toolSpecifications, dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler)
generate
in interface dev.langchain4j.model.chat.StreamingChatLanguageModel
public void generate(List<dev.langchain4j.data.message.ChatMessage> messages, dev.langchain4j.agent.tool.ToolSpecification toolSpecification, dev.langchain4j.model.StreamingResponseHandler<dev.langchain4j.data.message.AiMessage> handler)
generate
in interface dev.langchain4j.model.chat.StreamingChatLanguageModel
public int estimateTokenCount(List<dev.langchain4j.data.message.ChatMessage> messages)
estimateTokenCount
in interface dev.langchain4j.model.chat.TokenCountEstimator
public static OpenAiStreamingChatModel withApiKey(String apiKey)
Copyright © 2023. All rights reserved.