Spaces:
Running
Running
File size: 1,113 Bytes
5ccd7d7 ec392c1 5ccd7d7 ec392c1 5ccd7d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import requests
import os
API_KEY = os.getenv("GEMINI_API_KEY")
print(f"API KEY Loaded? {'β
Yes' if API_KEY else 'β No'}")
MODEL_NAME = "gemini-2.0-flash"
BASE_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{MODEL_NAME}:generateContent"
print("π Gemini API Key Loaded:", bool(API_KEY))
def ask_llm(question: str) -> str:
"""
Sends the given question to the Gemini LLM and returns its response.
Args:
question (str): The question to ask the LLM.
Returns:
str: The response from the LLM or an error message.
"""
headers = {"Content-Type": "application/json"}
params = {"key": API_KEY}
payload = {
"contents": [
{"parts": [{"text": f"Answer this financial question clearly:\n\n{question}"}]}
]
}
try:
resp = requests.post(BASE_URL, headers=headers, params=params, json=payload)
resp.raise_for_status()
data = resp.json()
return data["candidates"][0]["content"]["parts"][0]["text"]
except Exception as e:
return f"β οΈ Error calling LLM: {str(e)}"
|