Files
slide-translate/llm.py
nite 1a867844ce feat: Introduce OpenAI LLM provider and update API key handling
This commit integrates OpenAI as a new Large Language Model (LLM) provider,
expanding the available options for content refinement.

Key changes include:
- Added `set_openai_api_key` to handle OpenAI API key retrieval from
  `config.ini` or environment variables.
- Modified `set_api_key` to dynamically read the LLM provider from `config.ini`
2025-11-12 02:51:18 +11:00

75 lines
2.3 KiB
Python
Executable File

import configparser
import os
def set_api_key() -> None:
config = configparser.ConfigParser()
config.read("config.ini")
provider = config.get("llm", "PROVIDER", fallback="gemini")
if provider == "gemini":
set_gemini_api_key()
elif provider == "ollama":
set_ollama_config()
elif provider == "openai":
set_openai_api_key()
def set_openai_api_key() -> None:
config = configparser.ConfigParser()
config.read("config.ini")
openai_api_key = config.get("llm", "OPENAI_API_KEY", fallback=None)
if not os.environ.get("OPENAI_API_KEY"):
if openai_api_key:
os.environ["OPENAI_API_KEY"] = openai_api_key
else:
raise ValueError(
"Error: OPENAI_API_KEY not found in config.ini or environment variables"
)
return
def set_gemini_api_key() -> None:
config = configparser.ConfigParser()
config.read("config.ini")
google_api_key = config.get("llm", "GOOGLE_API_KEY", fallback=None)
if not os.environ.get("GOOGLE_API_KEY"):
if google_api_key:
os.environ["GOOGLE_API_KEY"] = google_api_key
else:
raise ValueError(
"Error: GOOGLE_API_KEY not found in config.ini or environment variables"
)
return
def set_ollama_config() -> None:
config = configparser.ConfigParser()
config.read("config.ini")
ollama_base_url = config.get(
"llm", "OLLAMA_BASE_URL", fallback="http://localhost:11434"
)
if not os.environ.get("OLLAMA_BASE_URL"):
os.environ["OLLAMA_BASE_URL"] = ollama_base_url
return
def get_model_name() -> str:
config = configparser.ConfigParser()
config.read("config.ini")
provider = config.get("llm", "PROVIDER", fallback="gemini")
if provider == "gemini":
return config.get("llm", "GEMINI_MODEL_NAME", fallback="gemini-2.5-flash")
elif provider == "ollama":
return config.get("llm", "OLLAMA_MODEL_NAME", fallback="gemma3:latest")
elif provider == "openai":
return config.get("llm", "OPENAI_MODEL_NAME", fallback="gpt-5-mini")
return "gemini-2.5-flash" # Default fallback
def get_temperature() -> float:
config = configparser.ConfigParser()
config.read("config.ini")
return float(config.get("llm", "TEMPERATURE", fallback=0.7))