You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
when i run this code i get:
(dspy) C:\Users\yury_\Downloads\project\dspy>C:/Users/yury_/miniconda3/envs/dspy/python.exe c:/Users/yury_/Downloads/project/dspy/dspy_direct.py
Traceback (most recent call last):
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\llms\openai\completion\handler.py", line 125, in completion
raw_response = openai_client.completions.with_raw_response.create(**data) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_legacy_response.py", line 364, in wrapped
return cast(LegacyAPIResponse[R], func(*args, **kwargs))
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_utils_utils.py", line 279, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai\resources\completions.py", line 539, in create
return self.post(
^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 1296, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 973, in request
return self.request(
^^^^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 1077, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 1528, in completion response = openai_text_completions.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\litellm\llms\openai\completion\handler.py", line 148, in completion
raise OpenAIError(
litellm.llms.openai.common_utils.OpenAIError: Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\Users\yury_\Downloads\project\dspy\dspy_direct.py", line 19, in
print(lm("Translate English to French: 'Hello, how are you?'"))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\utils\callback.py", line 266, in wrapper
return fn(instance, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 115, in call
response = completion(
^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 319, in wrapper
return func_cached(key, request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\cachetools_init_.py", line 771, in wrapper
v = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 309, in func_cached
return func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 374, in cached_litellm_text_completion
return litellm_text_completion(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 394, in litellm_text_completion
return litellm.text_completion(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1212, in wrapper
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1090, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 4209, in text_completion
response = completion(
^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1212, in wrapper
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1090, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 3093, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 2210, in exception_type
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 328, in exception_type
raise BadRequestError(
litellm.exceptions.BadRequestError: litellm.BadRequestError: Text-completion-openaiException - Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
(dspy) C:\Users\yury_\Downloads\project\dspy>
Steps to reproduce
`import dspy
import os
from dotenv import load_dotenv
load_dotenv() # Load environment variables from .env file
hf_token = os.environ.get("HF_TOKEN")
lm = dspy.LM(
model="huggingface/google-t5/t5-small", # Correct model name for Hugging Face client
max_tokens=100, # Added to fix potential issues
model_type="text",
provider="huggingface",
num_retries=1,
launch_kwargs={}, # Ensure no unused model_kwargs are passed
)
dspy.settings.configure(lm=lm)
print(lm("Translate English to French: 'Hello, how are you?'"))`
DSPy version
2.6.10
The text was updated successfully, but these errors were encountered:
What happened?
when i run this code i get:
(dspy) C:\Users\yury_\Downloads\project\dspy>C:/Users/yury_/miniconda3/envs/dspy/python.exe c:/Users/yury_/Downloads/project/dspy/dspy_direct.py
Traceback (most recent call last):
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\llms\openai\completion\handler.py", line 125, in completion
raw_response = openai_client.completions.with_raw_response.create(**data) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_legacy_response.py", line 364, in wrapped
return cast(LegacyAPIResponse[R], func(*args, **kwargs))
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_utils_utils.py", line 279, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai\resources\completions.py", line 539, in create
return self.post(
^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 1296, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 973, in request
return self.request(
^^^^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\openai_base_client.py", line 1077, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 1528, in completion
response = openai_text_completions.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury\miniconda3\envs\dspy\Lib\site-packages\litellm\llms\openai\completion\handler.py", line 148, in completion
raise OpenAIError(
litellm.llms.openai.common_utils.OpenAIError: Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\Users\yury_\Downloads\project\dspy\dspy_direct.py", line 19, in
print(lm("Translate English to French: 'Hello, how are you?'"))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\utils\callback.py", line 266, in wrapper
return fn(instance, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 115, in call
response = completion(
^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 319, in wrapper
return func_cached(key, request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\cachetools_init_.py", line 771, in wrapper
v = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 309, in func_cached
return func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 374, in cached_litellm_text_completion
return litellm_text_completion(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\dspy\clients\lm.py", line 394, in litellm_text_completion
return litellm.text_completion(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1212, in wrapper
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1090, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 4209, in text_completion
response = completion(
^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1212, in wrapper
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\utils.py", line 1090, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\main.py", line 3093, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 2210, in exception_type
raise e
File "C:\Users\yury_\miniconda3\envs\dspy\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 328, in exception_type
raise BadRequestError(
litellm.exceptions.BadRequestError: litellm.BadRequestError: Text-completion-openaiException - Error code: 400 - {'error': {'message': 'invalid model ID', 'type': 'invalid_request_error', 'param': None, 'code': None}}
(dspy) C:\Users\yury_\Downloads\project\dspy>
Steps to reproduce
`import dspy
import os
from dotenv import load_dotenv
load_dotenv() # Load environment variables from .env file
hf_token = os.environ.get("HF_TOKEN")
lm = dspy.LM(
model="huggingface/google-t5/t5-small", # Correct model name for Hugging Face client
max_tokens=100, # Added to fix potential issues
model_type="text",
provider="huggingface",
num_retries=1,
launch_kwargs={}, # Ensure no unused model_kwargs are passed
)
dspy.settings.configure(lm=lm)
print(lm("Translate English to French: 'Hello, how are you?'"))`
DSPy version
2.6.10
The text was updated successfully, but these errors were encountered: