From 67b087019c4fae21a546f818c121c8984651302f Mon Sep 17 00:00:00 2001 From: Jason Li Date: Sun, 27 Apr 2025 11:05:25 +0200 Subject: [PATCH] Update Groq AI Model Config (#7335) With current config will get error "Fail to access model(gemma-7b-it) using this api key" Since the model has been removed, according to Groq official document: https://console.groq.com/docs/models ### Type of change - [ x] Bug Fix (non-breaking change which fixes an issue) --- conf/llm_factories.json | 6 ------ 1 file changed, 6 deletions(-) diff --git a/conf/llm_factories.json b/conf/llm_factories.json index 6a1e631b9..4ec74af92 100644 --- a/conf/llm_factories.json +++ b/conf/llm_factories.json @@ -929,12 +929,6 @@ "tags": "LLM", "status": "1", "llm": [ - { - "llm_name": "gemma-7b-it", - "tags": "LLM,CHAT,15k", - "max_tokens": 8192, - "model_type": "chat" - }, { "llm_name": "gemma2-9b-it", "tags": "LLM,CHAT,15k",