feat: Introduce OpenAI LLM provider and update API key handling
This commit integrates OpenAI as a new Large Language Model (LLM) provider, expanding the available options for content refinement. Key changes include: - Added `set_openai_api_key` to handle OpenAI API key retrieval from `config.ini` or environment variables. - Modified `set_api_key` to dynamically read the LLM provider from `config.ini`
This commit is contained in:
27
llm.py
27
llm.py
@@ -2,11 +2,30 @@ import configparser
|
||||
import os
|
||||
|
||||
|
||||
def set_api_key(provider: str) -> None:
|
||||
def set_api_key() -> None:
|
||||
config = configparser.ConfigParser()
|
||||
config.read("config.ini")
|
||||
provider = config.get("llm", "PROVIDER", fallback="gemini")
|
||||
if provider == "gemini":
|
||||
set_gemini_api_key()
|
||||
elif provider == "ollama":
|
||||
set_ollama_config()
|
||||
elif provider == "openai":
|
||||
set_openai_api_key()
|
||||
|
||||
|
||||
def set_openai_api_key() -> None:
|
||||
config = configparser.ConfigParser()
|
||||
config.read("config.ini")
|
||||
openai_api_key = config.get("llm", "OPENAI_API_KEY", fallback=None)
|
||||
if not os.environ.get("OPENAI_API_KEY"):
|
||||
if openai_api_key:
|
||||
os.environ["OPENAI_API_KEY"] = openai_api_key
|
||||
else:
|
||||
raise ValueError(
|
||||
"Error: OPENAI_API_KEY not found in config.ini or environment variables"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def set_gemini_api_key() -> None:
|
||||
@@ -41,9 +60,11 @@ def get_model_name() -> str:
|
||||
config.read("config.ini")
|
||||
provider = config.get("llm", "PROVIDER", fallback="gemini")
|
||||
if provider == "gemini":
|
||||
return config.get("llm", "MODEL_NAME", fallback="gemini-2.5-flash")
|
||||
return config.get("llm", "GEMINI_MODEL_NAME", fallback="gemini-2.5-flash")
|
||||
elif provider == "ollama":
|
||||
return config.get("llm", "MODEL_NAME", fallback="gemma3:latest")
|
||||
return config.get("llm", "OLLAMA_MODEL_NAME", fallback="gemma3:latest")
|
||||
elif provider == "openai":
|
||||
return config.get("llm", "OPENAI_MODEL_NAME", fallback="gpt-5-mini")
|
||||
return "gemini-2.5-flash" # Default fallback
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user