From f2bdaaee9fa0aae8b80011fde8c74e95d9030fa7 Mon Sep 17 00:00:00 2001 From: Haseeb Date: Fri, 8 Mar 2024 23:24:04 +0400 Subject: [PATCH] Update offline model configuration and skip client initialization for local models --- README.md | 4 ++-- libs/interpreter_lib.py | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a62b6cc..94855f1 100644 --- a/README.md +++ b/README.md @@ -108,8 +108,8 @@ This Interpreter supports offline models via **LM Studio** so to download it fro - Download any model from **LM Studio** like _Phi-2,Code-Llama,Mistral_. - Then in the app go to **Local Server** option and select the model. - Start the server and copy the **URL**. -- Open config file `configs/offline-model.config` and paste the **URL** in the `api_base` field. -- Now you can use the model with the interpreter set the model name to `offline-model` and run the interpreter.
+- Open config file `configs/local-model.config` and paste the **URL** in the `api_base` field. +- Now you can use the model with the interpreter set the model name to `local-model` and run the interpreter.
4. Run the interpreter with Python:
### Running with Python. diff --git a/libs/interpreter_lib.py b/libs/interpreter_lib.py index 34a8714..2a486b3 100644 --- a/libs/interpreter_lib.py +++ b/libs/interpreter_lib.py @@ -103,6 +103,11 @@ def initialize_client(self): self.INTERPRETER_MODEL = str(self.config_values.get('HF_MODEL', self.INTERPRETER_MODEL)) hf_model_name = self.INTERPRETER_MODEL.strip().split("/")[-1] + # skip init client for local models.(Bug#10 https://github.com/haseeb-heaven/code-interpreter/issues/10) + if 'local' in self.INTERPRETER_MODEL: + self.logger.info(f"Skipping client initialization for local model.") + return + self.logger.info(f"Using model {hf_model_name}") model_api_keys = {