diff --git a/pyproject.toml b/pyproject.toml index a3d8a73ca..901b83de9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ packages = [{ include = "smol_dev" }] [tool.poetry.dependencies] python = "^3.11" openai = "^0.27.8" +litellm = "^0.1.351" openai-function-call = "^0.0.5" tenacity = "^8.2.2" agent-protocol = "^0.1.1" diff --git a/v0/debugger.py b/v0/debugger.py index 8e627e973..50d1d02d9 100644 --- a/v0/debugger.py +++ b/v0/debugger.py @@ -54,6 +54,7 @@ def main(prompt, directory=DEFAULT_DIR, model="gpt-3.5-turbo"): ) def generate_response(system_prompt, user_prompt, model="gpt-3.5-turbo", *args): import openai + from litellm import completion # Set up your OpenAI API credentials openai.api_key = os.environ["OPENAI_API_KEY"] @@ -76,7 +77,7 @@ def generate_response(system_prompt, user_prompt, model="gpt-3.5-turbo", *args): } # Send the API request - response = openai.ChatCompletion.create(**params) + response = completion(**params) # Get the reply from the API response reply = response.choices[0]["message"]["content"]