adicionado sistema de seleção de provedor de llm para GROQ ou Open ai

This commit is contained in:
Fábio Cavalcanti 2025-01-24 22:27:02 -03:00
parent af20510b2b
commit f63dcc40d1
5 changed files with 286 additions and 118 deletions

View File

@ -39,6 +39,8 @@ class Settings:
"""Inicializa as configurações.""" """Inicializa as configurações."""
logger.debug("Carregando configurações do Redis...") logger.debug("Carregando configurações do Redis...")
self.ACTIVE_LLM_PROVIDER = self.get_redis_value("ACTIVE_LLM_PROVIDER", "groq")
self.OPENAI_API_KEY = self.get_redis_value("OPENAI_API_KEY", "")
self.GROQ_API_KEY = self.get_redis_value("GROQ_API_KEY", "gsk_default_key") self.GROQ_API_KEY = self.get_redis_value("GROQ_API_KEY", "gsk_default_key")
self.BUSINESS_MESSAGE = self.get_redis_value("BUSINESS_MESSAGE", "*Impacte AI* Premium Services") self.BUSINESS_MESSAGE = self.get_redis_value("BUSINESS_MESSAGE", "*Impacte AI* Premium Services")
self.PROCESS_GROUP_MESSAGES = self.get_redis_value("PROCESS_GROUP_MESSAGES", "false").lower() == "true" self.PROCESS_GROUP_MESSAGES = self.get_redis_value("PROCESS_GROUP_MESSAGES", "false").lower() == "true"

View File

@ -252,7 +252,7 @@ def login_page():
# Modificar a função de logout no dashboard # Modificar a função de logout no dashboard
def dashboard(): def dashboard():
# Versão do sistema # Versão do sistema
APP_VERSION = "2.3.2" APP_VERSION = "2.3.3"
show_logo() show_logo()
st.sidebar.markdown('<div class="sidebar-header">TranscreveZAP - Menu</div>', unsafe_allow_html=True) st.sidebar.markdown('<div class="sidebar-header">TranscreveZAP - Menu</div>', unsafe_allow_html=True)
@ -728,8 +728,9 @@ def manage_settings():
st.title("⚙️ Configurações") st.title("⚙️ Configurações")
# Criar tabs para melhor organização # Criar tabs para melhor organização
tab1, tab2, tab3, tab4 = st.tabs([ tab1, tab2, tab3, tab4, tab5 = st.tabs([
"🔑 Chaves API", "🔑 Chaves API",
"🤖 Provedor LLM",
"🌐 Configurações Gerais", "🌐 Configurações Gerais",
"📝 Formatação de Mensagens", "📝 Formatação de Mensagens",
"🗣️ Idiomas e Transcrição" "🗣️ Idiomas e Transcrição"
@ -787,6 +788,46 @@ def manage_settings():
pass pass
with tab2: with tab2:
st.subheader("Configuração do Provedor LLM")
# Select provider
current_provider = storage.get_llm_provider()
provider = st.selectbox(
"Provedor de Serviço",
options=["groq", "openai"],
format_func=lambda x: "Groq (Open Source)" if x == "groq" else "OpenAI (API Paga)",
index=0 if current_provider == "groq" else 1
)
if provider == "openai":
st.info("""
A OpenAI é um serviço pago que requer uma chave API válida.
Obtenha sua chave em https://platform.openai.com
""")
# OpenAI Key Management
openai_key = st.text_input(
"OpenAI API Key",
type="password",
help="Chave que começa com 'sk-'"
)
if st.button("Adicionar Chave OpenAI"):
if openai_key and openai_key.startswith("sk-"):
storage.add_openai_key(openai_key)
st.success("✅ Chave OpenAI adicionada com sucesso!")
else:
st.error("Chave inválida! Deve começar com 'sk-'")
# Save provider selection
if st.button("💾 Salvar Configuração do Provedor"):
try:
storage.set_llm_provider(provider)
st.success(f"Provedor alterado para: {provider}")
except Exception as e:
st.error(f"Erro ao salvar provedor: {str(e)}")
with tab3:
st.subheader("Configurações do Sistema") st.subheader("Configurações do Sistema")
# Business Message # Business Message
@ -850,7 +891,7 @@ def manage_settings():
) )
pass pass
with tab3: with tab4:
st.subheader("Formatação de Mensagens") st.subheader("Formatação de Mensagens")
# Headers personalizados # Headers personalizados
@ -935,7 +976,7 @@ def manage_settings():
st.error(f"Erro ao salvar configurações: {str(e)}") st.error(f"Erro ao salvar configurações: {str(e)}")
with tab4: with tab5:
st.subheader("Idiomas e Transcrição") st.subheader("Idiomas e Transcrição")
# Adicionar estatísticas no topo # Adicionar estatísticas no topo

74
openai_handler.py Normal file
View File

@ -0,0 +1,74 @@
import aiohttp
import json
from datetime import datetime
import logging
from storage import StorageHandler
logger = logging.getLogger("OpenAIHandler")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
async def test_openai_key(key: str) -> bool:
"""Test if an OpenAI key is valid and working."""
url = "https://api.openai.com/v1/models"
headers = {"Authorization": f"Bearer {key}"}
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
if response.status == 200:
data = await response.json()
return len(data.get("data", [])) > 0
return False
except Exception as e:
logger.error(f"Error testing OpenAI key: {e}")
return False
async def handle_openai_request(
url: str,
headers: dict,
data: any,
storage: StorageHandler,
is_form_data: bool = False
) -> tuple[bool, dict, str]:
"""Handle requests to OpenAI API with retries."""
max_retries = 3
for attempt in range(max_retries):
try:
async with aiohttp.ClientSession() as session:
if is_form_data:
async with session.post(url, headers=headers, data=data) as response:
response_data = await response.json()
if response.status == 200:
if is_form_data and response_data.get("text"):
return True, response_data, ""
elif not is_form_data and response_data.get("choices"):
return True, response_data, ""
else:
async with session.post(url, headers=headers, json=data) as response:
response_data = await response.json()
if response.status == 200 and response_data.get("choices"):
return True, response_data, ""
error_msg = response_data.get("error", {}).get("message", "")
if "invalid_api_key" in error_msg or "invalid authorization" in error_msg.lower():
logger.error(f"OpenAI API key invalid or expired")
return False, response_data, error_msg
if attempt < max_retries - 1:
continue
return False, response_data, error_msg
except Exception as e:
logger.error(f"Error in request: {str(e)}")
if attempt < max_retries - 1:
continue
return False, {}, f"Request failed: {str(e)}"
return False, {}, "All retries failed"

View File

@ -47,6 +47,7 @@ async def summarize_text_if_needed(text):
storage.add_log("DEBUG", "Iniciando processo de resumo", { storage.add_log("DEBUG", "Iniciando processo de resumo", {
"text_length": len(text) "text_length": len(text)
}) })
provider = storage.get_llm_provider()
# Obter idioma configurado # Obter idioma configurado
language = redis_client.get("TRANSCRIPTION_LANGUAGE") or "pt" language = redis_client.get("TRANSCRIPTION_LANGUAGE") or "pt"
@ -54,12 +55,20 @@ async def summarize_text_if_needed(text):
"language": language, "language": language,
"redis_value": redis_client.get("TRANSCRIPTION_LANGUAGE") "redis_value": redis_client.get("TRANSCRIPTION_LANGUAGE")
}) })
url_completions = "https://api.groq.com/openai/v1/chat/completions"
groq_key = await get_working_groq_key(storage) if provider == "openai":
if not groq_key: api_key = storage.get_openai_keys()[0]
url = "https://api.openai.com/v1/chat/completions"
model = "gpt-4o-mini"
else: # groq
url = "https://api.groq.com/openai/v1/chat/completions"
api_key = await get_working_groq_key(storage)
if not api_key:
raise Exception("Nenhuma chave GROQ disponível") raise Exception("Nenhuma chave GROQ disponível")
model = "llama-3.3-70b-versatile"
headers = { headers = {
"Authorization": f"Bearer {groq_key}", "Authorization": f"Bearer {api_key}",
"Content-Type": "application/json", "Content-Type": "application/json",
} }
@ -143,11 +152,11 @@ async def summarize_text_if_needed(text):
"role": "user", "role": "user",
"content": f"{base_prompt}\n\nTexto para resumir: {text}", "content": f"{base_prompt}\n\nTexto para resumir: {text}",
}], }],
"model": "llama-3.3-70b-versatile", "model": model,
} }
try: try:
success, response_data, error = await handle_groq_request(url_completions, headers, json_data, storage, is_form_data=False) success, response_data, error = await handle_groq_request(url, headers, json_data, storage, is_form_data=False)
if not success: if not success:
raise Exception(error) raise Exception(error)
@ -195,12 +204,20 @@ async def transcribe_audio(audio_source, apikey=None, remote_jid=None, from_me=F
"from_me": from_me, "from_me": from_me,
"remote_jid": remote_jid "remote_jid": remote_jid
}) })
provider = storage.get_llm_provider()
url = "https://api.groq.com/openai/v1/audio/transcriptions" if provider == "openai":
groq_key = await get_working_groq_key(storage) api_key = storage.get_openai_keys()[0] # Get first OpenAI key
if not groq_key: url = "https://api.openai.com/v1/audio/transcriptions"
model = "whisper-1"
else: # groq
api_key = await get_working_groq_key(storage)
if not api_key:
raise Exception("Nenhuma chave GROQ disponível") raise Exception("Nenhuma chave GROQ disponível")
groq_headers = {"Authorization": f"Bearer {groq_key}"} url = "https://api.groq.com/openai/v1/audio/transcriptions"
model = "whisper-large-v3"
headers = {"Authorization": f"Bearer {api_key}"}
# Inicializar variáveis # Inicializar variáveis
contact_language = None contact_language = None
@ -238,9 +255,9 @@ async def transcribe_audio(audio_source, apikey=None, remote_jid=None, from_me=F
with open(audio_source, 'rb') as audio_file: with open(audio_source, 'rb') as audio_file:
data = aiohttp.FormData() data = aiohttp.FormData()
data.add_field('file', audio_file, filename='audio.mp3') data.add_field('file', audio_file, filename='audio.mp3')
data.add_field('model', 'whisper-large-v3') data.add_field('model', model)
success, response_data, error = await handle_groq_request(url, groq_headers, data, storage, is_form_data=True) success, response_data, error = await handle_groq_request(url, headers, data, storage, is_form_data=True)
if success: if success:
initial_text = response_data.get("text", "") initial_text = response_data.get("text", "")
@ -311,14 +328,14 @@ async def transcribe_audio(audio_source, apikey=None, remote_jid=None, from_me=F
with open(audio_source, 'rb') as audio_file: with open(audio_source, 'rb') as audio_file:
data = aiohttp.FormData() data = aiohttp.FormData()
data.add_field('file', audio_file, filename='audio.mp3') data.add_field('file', audio_file, filename='audio.mp3')
data.add_field('model', 'whisper-large-v3') data.add_field('model', model)
data.add_field('language', transcription_language) data.add_field('language', transcription_language)
if use_timestamps: if use_timestamps:
data.add_field('response_format', 'verbose_json') data.add_field('response_format', 'verbose_json')
# Usar handle_groq_request para ter retry e validação # Usar handle_groq_request para ter retry e validação
success, response_data, error = await handle_groq_request(url, groq_headers, data, storage, is_form_data=True) success, response_data, error = await handle_groq_request(url, headers, data, storage, is_form_data=True)
if not success: if not success:
raise Exception(f"Erro na transcrição: {error}") raise Exception(f"Erro na transcrição: {error}")
@ -428,6 +445,7 @@ async def detect_language(text: str) -> str:
Returns: Returns:
str: Código ISO 639-1 do idioma detectado str: Código ISO 639-1 do idioma detectado
""" """
provider = storage.get_llm_provider()
storage.add_log("DEBUG", "Iniciando detecção de idioma", { storage.add_log("DEBUG", "Iniciando detecção de idioma", {
"text_length": len(text) "text_length": len(text)
}) })
@ -437,14 +455,19 @@ async def detect_language(text: str) -> str:
"pt", "en", "es", "fr", "de", "it", "ja", "ko", "pt", "en", "es", "fr", "de", "it", "ja", "ko",
"zh", "ro", "ru", "ar", "hi", "nl", "pl", "tr" "zh", "ro", "ru", "ar", "hi", "nl", "pl", "tr"
} }
if provider == "openai":
url_completions = "https://api.groq.com/openai/v1/chat/completions" api_key = storage.get_openai_keys()[0]
groq_key = await get_working_groq_key(storage) url = "https://api.openai.com/v1/chat/completions"
if not groq_key: model = "gpt-4o-mini"
else: # groq
url = "https://api.groq.com/openai/v1/chat/completions"
api_key = await get_working_groq_key(storage)
if not api_key:
raise Exception("Nenhuma chave GROQ disponível") raise Exception("Nenhuma chave GROQ disponível")
model = "llama-3.3-70b-versatile"
headers = { headers = {
"Authorization": f"Bearer {groq_key}", "Authorization": f"Bearer {api_key}",
"Content-Type": "application/json", "Content-Type": "application/json",
} }
@ -473,12 +496,12 @@ async def detect_language(text: str) -> str:
"role": "user", "role": "user",
"content": f"{prompt}\n\n{text[:500]}" # Limitando para os primeiros 500 caracteres "content": f"{prompt}\n\n{text[:500]}" # Limitando para os primeiros 500 caracteres
}], }],
"model": "llama-3.3-70b-versatile", "model": model,
"temperature": 0.1 "temperature": 0.1
} }
try: try:
success, response_data, error = await handle_groq_request(url_completions, headers, json_data, storage, is_form_data=False) success, response_data, error = await handle_groq_request(url, headers, json_data, storage, is_form_data=False)
if not success: if not success:
raise Exception(f"Falha na detecção de idioma: {error}") raise Exception(f"Falha na detecção de idioma: {error}")
@ -651,6 +674,7 @@ async def translate_text(text: str, source_language: str, target_language: str)
Returns: Returns:
str: Texto traduzido str: Texto traduzido
""" """
provider = storage.get_llm_provider()
storage.add_log("DEBUG", "Iniciando tradução", { storage.add_log("DEBUG", "Iniciando tradução", {
"source_language": source_language, "source_language": source_language,
"target_language": target_language, "target_language": target_language,
@ -661,13 +685,19 @@ async def translate_text(text: str, source_language: str, target_language: str)
if source_language == target_language: if source_language == target_language:
return text return text
url_completions = "https://api.groq.com/openai/v1/chat/completions" if provider == "openai":
groq_key = await get_working_groq_key(storage) api_key = storage.get_openai_keys()[0]
if not groq_key: url = "https://api.openai.com/v1/chat/completions"
model = "gpt-4o-mini"
else: # groq
url = "https://api.groq.com/openai/v1/chat/completions"
api_key = await get_working_groq_key(storage)
if not api_key:
raise Exception("Nenhuma chave GROQ disponível") raise Exception("Nenhuma chave GROQ disponível")
model = "llama-3.3-70b-versatile"
headers = { headers = {
"Authorization": f"Bearer {groq_key}", "Authorization": f"Bearer {api_key}",
"Content-Type": "application/json", "Content-Type": "application/json",
} }
@ -695,12 +725,12 @@ async def translate_text(text: str, source_language: str, target_language: str)
"role": "user", "role": "user",
"content": prompt "content": prompt
}], }],
"model": "llama-3.3-70b-versatile", "model": model,
"temperature": 0.3 "temperature": 0.3
} }
try: try:
success, response_data, error = await handle_groq_request(url_completions, headers, json_data, storage, is_form_data=False) success, response_data, error = await handle_groq_request(url, headers, json_data, storage, is_form_data=False)
if not success: if not success:
raise Exception(f"Falha na tradução: {error}") raise Exception(f"Falha na tradução: {error}")

View File

@ -694,3 +694,24 @@ class StorageHandler:
self.redis.lpush(key, json.dumps(failed_delivery)) self.redis.lpush(key, json.dumps(failed_delivery))
# Manter apenas as últimas 100 falhas # Manter apenas as últimas 100 falhas
self.redis.ltrim(key, 0, 99) self.redis.ltrim(key, 0, 99)
def get_llm_provider(self) -> str:
"""Returns active LLM provider (groq or openai)"""
return self.redis.get(self._get_redis_key("active_llm_provider")) or "groq"
def set_llm_provider(self, provider: str):
"""Sets active LLM provider"""
if provider not in ["groq", "openai"]:
raise ValueError("Provider must be 'groq' or 'openai'")
self.redis.set(self._get_redis_key("active_llm_provider"), provider)
def get_openai_keys(self) -> List[str]:
"""Get stored OpenAI API keys"""
return list(self.redis.smembers(self._get_redis_key("openai_keys")))
def add_openai_key(self, key: str):
"""Add OpenAI API key"""
if key and key.startswith("sk-"):
self.redis.sadd(self._get_redis_key("openai_keys"), key)
return True
return False