public class OpenAiTokenizer extends Object implements Tokenizer
Constructor and Description |
---|
OpenAiTokenizer(String modelName) |
Modifier and Type | Method and Description |
---|---|
String |
decode(List<Integer> tokens) |
List<Integer> |
encode(String text) |
List<Integer> |
encode(String text,
int maxTokensToEncode) |
int |
estimateTokenCountInForcefulToolExecutionRequest(ToolExecutionRequest toolExecutionRequest) |
int |
estimateTokenCountInForcefulToolSpecification(ToolSpecification toolSpecification) |
int |
estimateTokenCountInMessage(ChatMessage message) |
int |
estimateTokenCountInMessages(Iterable<ChatMessage> messages) |
int |
estimateTokenCountInText(String text) |
int |
estimateTokenCountInToolExecutionRequests(Iterable<ToolExecutionRequest> toolExecutionRequests) |
int |
estimateTokenCountInToolSpecifications(Iterable<ToolSpecification> toolSpecifications) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
estimateTokenCountInTools, estimateTokenCountInTools
public OpenAiTokenizer(String modelName)
public int estimateTokenCountInText(String text)
estimateTokenCountInText
in interface Tokenizer
public int estimateTokenCountInMessage(ChatMessage message)
estimateTokenCountInMessage
in interface Tokenizer
public int estimateTokenCountInMessages(Iterable<ChatMessage> messages)
estimateTokenCountInMessages
in interface Tokenizer
public int estimateTokenCountInToolSpecifications(Iterable<ToolSpecification> toolSpecifications)
estimateTokenCountInToolSpecifications
in interface Tokenizer
public int estimateTokenCountInForcefulToolSpecification(ToolSpecification toolSpecification)
estimateTokenCountInForcefulToolSpecification
in interface Tokenizer
public int estimateTokenCountInToolExecutionRequests(Iterable<ToolExecutionRequest> toolExecutionRequests)
estimateTokenCountInToolExecutionRequests
in interface Tokenizer
public int estimateTokenCountInForcefulToolExecutionRequest(ToolExecutionRequest toolExecutionRequest)
estimateTokenCountInForcefulToolExecutionRequest
in interface Tokenizer
Copyright © 2023. All rights reserved.