Spaces:
Running
Running
| import requests | |
| import os | |
| API_KEY = os.getenv("GEMINI_API_KEY") | |
| print(f"API KEY Loaded? {'β Yes' if API_KEY else 'β No'}") | |
| MODEL_NAME = "gemini-2.0-flash" | |
| BASE_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{MODEL_NAME}:generateContent" | |
| print("π Gemini API Key Loaded:", bool(API_KEY)) | |
| def ask_llm(question: str) -> str: | |
| """ | |
| Sends the given question to the Gemini LLM and returns its response. | |
| Args: | |
| question (str): The question to ask the LLM. | |
| Returns: | |
| str: The response from the LLM or an error message. | |
| """ | |
| headers = {"Content-Type": "application/json"} | |
| params = {"key": API_KEY} | |
| payload = { | |
| "contents": [ | |
| {"parts": [{"text": f"Answer this financial question clearly:\n\n{question}"}]} | |
| ] | |
| } | |
| try: | |
| resp = requests.post(BASE_URL, headers=headers, params=params, json=payload) | |
| resp.raise_for_status() | |
| data = resp.json() | |
| return data["candidates"][0]["content"]["parts"][0]["text"] | |
| except Exception as e: | |
| return f"β οΈ Error calling LLM: {str(e)}" | |