diff --git a/README.md b/README.md index f8b14e5..cb4e215 100644 --- a/README.md +++ b/README.md @@ -298,6 +298,7 @@ If you're interested in contributing to **Code-Interpreter**, we'd love to have - **v2.0.1** - Added AnthropicAI Claude-2, Instant models. 🔥 **v2.1** - Added AnhtorpicAI Claude-3 models powerful _Opus,Sonnet,Haiku_ models. +- **v2.1.1** - Added **Groq-AI** Model _Gemma-7B_ with **700 Tokens/Sec**. ## 📜 **License** diff --git a/configs/groq-gemma.config b/configs/groq-gemma.config new file mode 100644 index 0000000..23f1c87 --- /dev/null +++ b/configs/groq-gemma.config @@ -0,0 +1,17 @@ +# The temperature parameter controls the randomness of the model's output. Lower values make the output more deterministic. +temperature = 0.1 + +# The maximum number of new tokens that the model can generate. +max_tokens = 8192 + +# The start separator for the generated code. +start_sep = ``` + +# The end separator for the generated code. +end_sep = ``` + +# If True, the first line of the generated text will be skipped. +skip_first_line = True + +# The model used for generating the code. +HF_MODEL = groq-gemma \ No newline at end of file diff --git a/interpreter.py b/interpreter.py index 9059bd6..0de71f3 100755 --- a/interpreter.py +++ b/interpreter.py @@ -24,7 +24,7 @@ from libs.utility_manager import UtilityManager # The main version of the interpreter. -INTERPRETER_VERSION = "2.1" +INTERPRETER_VERSION = "2.1.1" def main(): parser = argparse.ArgumentParser(description='Code - Interpreter') diff --git a/libs/interpreter_lib.py b/libs/interpreter_lib.py index 1c63b81..49a3f36 100644 --- a/libs/interpreter_lib.py +++ b/libs/interpreter_lib.py @@ -53,7 +53,7 @@ def initialize(self): self.DISPLAY_CODE = self.args.display_code self.INTERPRETER_MODEL = self.args.model if self.args.model else None self.logger.info(f"Interpreter args model selected is '{self.args.model}") - self.logger.info(f"Interpreter model selected is '{self.INTERPRETER_MODEL}") + self.logger.info(f"Interpreter model selected is '{self.INTERPRETER_MODEL}'") self.system_message = "" self.INTERPRETER_MODE = 'code' @@ -91,7 +91,7 @@ def initialize_client(self): load_dotenv() self.logger.info("Initializing Client") - self.logger.info(f"Interpreter model selected is '{self.INTERPRETER_MODEL}") + self.logger.info(f"Interpreter model selected is '{self.INTERPRETER_MODEL}'") if self.INTERPRETER_MODEL is None or self.INTERPRETER_MODEL == "": self.logger.info("HF_MODEL is not provided, using default model.") config_file_name = f"configs/gpt-3.5-turbo.config" # Setting default model to GPT 3.5 Turbo. @@ -225,7 +225,8 @@ def execute_last_code(self,os_name): def generate_content(self,message, chat_history: list[tuple[str, str]], temperature=0.1, max_tokens=1024,config_values=None,image_file=None): self.logger.info(f"Generating content with args: message={message}, chat_history={chat_history}, temperature={temperature}, max_tokens={max_tokens}, config_values={config_values}, image_file={image_file}") - + self.logger.info(f"Interpreter model selected is '{self.INTERPRETER_MODEL}'") + # Use the values from the config file if they are provided if config_values: temperature = float(config_values.get('temperature', temperature)) @@ -300,6 +301,9 @@ def generate_content(self,message, chat_history: list[tuple[str, str]], temperat elif 'groq-mixtral' in self.INTERPRETER_MODEL: self.logger.info("Model is Groq/Mixtral.") self.INTERPRETER_MODEL = "groq/mixtral-8x7b-32768" + elif 'groq-gemma' in self.INTERPRETER_MODEL: + self.logger.info("Model is Groq/Gemma.") + self.INTERPRETER_MODEL = "groq/gemma-7b-it" response = litellm.completion(self.INTERPRETER_MODEL, messages=messages,temperature=temperature,max_tokens=max_tokens) self.logger.info("Response received from completion function.") @@ -697,7 +701,7 @@ def interpreter_main(self,version): # Check if prompt contains any file uploaded by user. extracted_file_name = self.utility_manager.extract_file_name(prompt) - self.logger.info(f"Input prompt extracted_name: '{extracted_file_name}'") + self.logger.info(f"Input prompt file name: '{extracted_file_name}'") if extracted_file_name is not None: full_path = self.utility_manager.get_full_file_path(extracted_file_name)