From f1ee2facef7ba294d5753ab659322d5413cf7d5a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sun, 6 Aug 2023 08:08:58 -0700 Subject: [PATCH 1/2] v0 litellm --- v0/debugger.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/v0/debugger.py b/v0/debugger.py index 8e627e973..50d1d02d9 100644 --- a/v0/debugger.py +++ b/v0/debugger.py @@ -54,6 +54,7 @@ def main(prompt, directory=DEFAULT_DIR, model="gpt-3.5-turbo"): ) def generate_response(system_prompt, user_prompt, model="gpt-3.5-turbo", *args): import openai + from litellm import completion # Set up your OpenAI API credentials openai.api_key = os.environ["OPENAI_API_KEY"] @@ -76,7 +77,7 @@ def generate_response(system_prompt, user_prompt, model="gpt-3.5-turbo", *args): } # Send the API request - response = openai.ChatCompletion.create(**params) + response = completion(**params) # Get the reply from the API response reply = response.choices[0]["message"]["content"] From 4374d98daffe5f58876403baf915fa66260b82c0 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sun, 6 Aug 2023 08:11:05 -0700 Subject: [PATCH 2/2] add v --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index a3d8a73ca..901b83de9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ packages = [{ include = "smol_dev" }] [tool.poetry.dependencies] python = "^3.11" openai = "^0.27.8" +litellm = "^0.1.351" openai-function-call = "^0.0.5" tenacity = "^8.2.2" agent-protocol = "^0.1.1"