diff --git a/utils/call_llm.py b/utils/call_llm.py index b2cbaf9..0d794b4 100644 --- a/utils/call_llm.py +++ b/utils/call_llm.py @@ -48,6 +48,10 @@ def call_llm(prompt: str, use_cache: bool = True) -> str: project=os.getenv("GEMINI_PROJECT_ID", "your-project-id"), location=os.getenv("GEMINI_LOCATION", "us-central1") ) + # You can comment the previous line and use the AI Studio key instead: + # client = genai.Client( + # api_key=os.getenv("GEMINI_API_KEY", "your-api_key"), + # ) model = os.getenv("GEMINI_MODEL", "gemini-2.5-pro-exp-03-25") response = client.models.generate_content( model=model, @@ -118,4 +122,4 @@ if __name__ == "__main__": print("Making call...") response1 = call_llm(test_prompt, use_cache=False) print(f"Response: {response1}") - \ No newline at end of file +