public class OpenAiStreamingChatModel extends Object implements StreamingChatLanguageModel, TokenCountEstimator
StreamingResponseHandler
.
You can find description of parameters here.Constructor and Description |
---|
OpenAiStreamingChatModel(String baseUrl,
String apiKey,
String organizationId,
String modelName,
Double temperature,
Double topP,
List<String> stop,
Integer maxTokens,
Double presencePenalty,
Double frequencyPenalty,
Map<String,Integer> logitBias,
String responseFormat,
Integer seed,
String user,
Duration timeout,
Proxy proxy,
Boolean logRequests,
Boolean logResponses,
Tokenizer tokenizer) |
Modifier and Type | Method and Description |
---|---|
int |
estimateTokenCount(List<ChatMessage> messages) |
void |
generate(List<ChatMessage> messages,
List<ToolSpecification> toolSpecifications,
StreamingResponseHandler<AiMessage> handler) |
void |
generate(List<ChatMessage> messages,
StreamingResponseHandler<AiMessage> handler) |
void |
generate(List<ChatMessage> messages,
ToolSpecification toolSpecification,
StreamingResponseHandler<AiMessage> handler) |
static OpenAiStreamingChatModel |
withApiKey(String apiKey) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
generate
estimateTokenCount, estimateTokenCount, estimateTokenCount, estimateTokenCount
public OpenAiStreamingChatModel(String baseUrl, String apiKey, String organizationId, String modelName, Double temperature, Double topP, List<String> stop, Integer maxTokens, Double presencePenalty, Double frequencyPenalty, Map<String,Integer> logitBias, String responseFormat, Integer seed, String user, Duration timeout, Proxy proxy, Boolean logRequests, Boolean logResponses, Tokenizer tokenizer)
public void generate(List<ChatMessage> messages, StreamingResponseHandler<AiMessage> handler)
generate
in interface StreamingChatLanguageModel
public void generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications, StreamingResponseHandler<AiMessage> handler)
generate
in interface StreamingChatLanguageModel
public void generate(List<ChatMessage> messages, ToolSpecification toolSpecification, StreamingResponseHandler<AiMessage> handler)
generate
in interface StreamingChatLanguageModel
public int estimateTokenCount(List<ChatMessage> messages)
estimateTokenCount
in interface TokenCountEstimator
public static OpenAiStreamingChatModel withApiKey(String apiKey)
Copyright © 2023. All rights reserved.