Update token_count_utils.py - Replace `print` with `logger.warning` for consistency (#3168)

The code was using both `logger.warning` and `print` for showing warning. This commit fixes this inconsistency which can be an issue on production environments / logging systems
This commit is contained in:
Umer Mansoor 2024-07-19 13:17:18 -07:00 committed by GitHub
parent 1fdeca4205
commit d1d4b10479
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 3 additions and 3 deletions

View File

@ -95,7 +95,7 @@ def _num_token_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0
try: try:
encoding = tiktoken.encoding_for_model(model) encoding = tiktoken.encoding_for_model(model)
except KeyError: except KeyError:
print("Warning: model not found. Using cl100k_base encoding.") logger.warning(f"Model {model} not found. Using cl100k_base encoding.")
encoding = tiktoken.get_encoding("cl100k_base") encoding = tiktoken.get_encoding("cl100k_base")
if model in { if model in {
"gpt-3.5-turbo-0613", "gpt-3.5-turbo-0613",
@ -166,7 +166,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int:
try: try:
encoding = tiktoken.encoding_for_model(model) encoding = tiktoken.encoding_for_model(model)
except KeyError: except KeyError:
print("Warning: model not found. Using cl100k_base encoding.") logger.warning(f"Model {model} not found. Using cl100k_base encoding.")
encoding = tiktoken.get_encoding("cl100k_base") encoding = tiktoken.get_encoding("cl100k_base")
num_tokens = 0 num_tokens = 0
@ -193,7 +193,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int:
function_tokens += 3 function_tokens += 3
function_tokens += len(encoding.encode(o)) function_tokens += len(encoding.encode(o))
else: else:
print(f"Warning: not supported field {field}") logger.warning(f"Not supported field {field}")
function_tokens += 11 function_tokens += 11
if len(parameters["properties"]) == 0: if len(parameters["properties"]) == 0:
function_tokens -= 2 function_tokens -= 2