mirror of
https://github.com/JamesTheGiblet/BuddAI.git
synced 2026-01-08 21:58:40 +00:00
- Implemented tests for confidence scoring logic in `test_buddai_confidence.py` and `test_confidence.py`, covering high and low confidence scenarios, escalation thresholds, and validation scoring penalties. - Created tests for fallback logging functionality in `test_fallback_logging.py`, ensuring fallback prompts are logged correctly and the `/logs` command retrieves log content. - Developed tests for fallback prompts in `test_fallback_prompts.py`, verifying that specific prompts are used for different models based on confidence levels. - Generated detailed test reports for multiple test runs, confirming all tests passed successfully.
66 lines
No EOL
2.4 KiB
Python
66 lines
No EOL
2.4 KiB
Python
import os
|
|
import logging
|
|
|
|
# Optional import for Google Generative AI
|
|
try:
|
|
import google.generativeai as genai
|
|
HAS_GEMINI = True
|
|
except ImportError:
|
|
HAS_GEMINI = False
|
|
|
|
class FallbackClient:
|
|
"""
|
|
Handles escalation to external AI models (Gemini) when local confidence is low.
|
|
"""
|
|
def __init__(self):
|
|
self.api_key = os.getenv("GEMINI_API_KEY")
|
|
self.client = None
|
|
|
|
if self.api_key and HAS_GEMINI:
|
|
try:
|
|
genai.configure(api_key=self.api_key)
|
|
# Using gemini-1.5-flash for speed and efficiency (comparable to Sonnet tier)
|
|
self.client = genai.GenerativeModel('gemini-1.5-flash')
|
|
except Exception as e:
|
|
print(f"⚠️ Failed to initialize Gemini client: {e}")
|
|
elif not HAS_GEMINI:
|
|
print("⚠️ Warning: google-generativeai package not installed. Fallback disabled.")
|
|
elif not self.api_key:
|
|
# Silent fail if key is missing, just disable fallback
|
|
pass
|
|
|
|
def escalate_to_gemini(self, original_prompt: str, buddai_attempt: str, confidence: int) -> str:
|
|
"""
|
|
Calls Gemini API to improve upon a low-confidence local attempt.
|
|
|
|
Args:
|
|
original_prompt (str): The user's original query.
|
|
buddai_attempt (str): The code generated by the local model.
|
|
confidence (int): The confidence score (0-100) of the local attempt.
|
|
|
|
Returns:
|
|
str: The improved solution from Gemini.
|
|
"""
|
|
if not self.client:
|
|
return f"⚠️ Fallback unavailable: Gemini client not configured (Confidence: {confidence}%)."
|
|
|
|
try:
|
|
prompt = f"""
|
|
You are an expert coding assistant acting as a fallback for a local AI model.
|
|
The local model attempted to answer a request but had low confidence ({confidence}%).
|
|
|
|
[USER REQUEST]
|
|
{original_prompt}
|
|
|
|
[LOCAL ATTEMPT (Low Confidence)]
|
|
{buddai_attempt}
|
|
|
|
[TASK]
|
|
Analyze the request and the local attempt. Provide a corrected, high-quality solution.
|
|
"""
|
|
|
|
response = self.client.generate_content(prompt)
|
|
return f"✨ **Gemini Fallback (Confidence: {confidence}%)**\n\n{response.text}"
|
|
|
|
except Exception as e:
|
|
return f"❌ Error calling Gemini API: {str(e)}" |