mirror of https://github.com/microsoft/autogen.git
Update token_count_utils.py - Replace `print` with `logger.warning` for consistency (#3168)
The code was using both `logger.warning` and `print` for showing warning. This commit fixes this inconsistency which can be an issue on production environments / logging systems
This commit is contained in:
parent
1fdeca4205
commit
d1d4b10479
|
@ -95,7 +95,7 @@ def _num_token_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0
|
|||
try:
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
except KeyError:
|
||||
print("Warning: model not found. Using cl100k_base encoding.")
|
||||
logger.warning(f"Model {model} not found. Using cl100k_base encoding.")
|
||||
encoding = tiktoken.get_encoding("cl100k_base")
|
||||
if model in {
|
||||
"gpt-3.5-turbo-0613",
|
||||
|
@ -166,7 +166,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int:
|
|||
try:
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
except KeyError:
|
||||
print("Warning: model not found. Using cl100k_base encoding.")
|
||||
logger.warning(f"Model {model} not found. Using cl100k_base encoding.")
|
||||
encoding = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
num_tokens = 0
|
||||
|
@ -193,7 +193,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int:
|
|||
function_tokens += 3
|
||||
function_tokens += len(encoding.encode(o))
|
||||
else:
|
||||
print(f"Warning: not supported field {field}")
|
||||
logger.warning(f"Not supported field {field}")
|
||||
function_tokens += 11
|
||||
if len(parameters["properties"]) == 0:
|
||||
function_tokens -= 2
|
||||
|
|
Loading…
Reference in New Issue