text-generation-webui/extensions/openai/tokens.py

38 lines
780 B
Python
Raw Normal View History

from extensions.openai.utils import float_list_to_base64
from modules.text_generation import encode, decode
import numpy as np
2023-07-12 20:33:25 +02:00
def token_count(prompt):
tokens = encode(prompt)[0]
return {
'results': [{
'tokens': len(tokens)
}]
}
def token_encode(input, encoding_format):
2023-07-12 20:33:25 +02:00
# if isinstance(input, list):
tokens = encode(input)[0]
return {
'results': [{
'tokens': tokens,
'length': len(tokens),
}]
}
def token_decode(tokens, encoding_format):
2023-07-12 20:33:25 +02:00
# if isinstance(input, list):
# if encoding_format == "base64":
# tokens = base64_to_float_list(tokens)
output = decode(tokens)[0]
return {
'results': [{
'text': output
}]
}