Skip to content

Commit eb7bdb7

Browse files
committed
remove LLM client configs
1 parent 4143c19 commit eb7bdb7

File tree

4 files changed

+13
-329
lines changed

4 files changed

+13
-329
lines changed

src/codemodder/codemodder.py

Lines changed: 13 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from codemodder.codetf import CodeTF
1515
from codemodder.context import CodemodExecutionContext
1616
from codemodder.dependency import Dependency
17-
from codemodder.llm import MisconfiguredAIClient, TokenUsage, log_token_usage
17+
from codemodder.llm import TokenUsage, log_token_usage
1818
from codemodder.logging import configure_logger, log_list, log_section, logger
1919
from codemodder.project_analysis.file_parsers.package_store import PackageStore
2020
from codemodder.project_analysis.python_repo_manager import PythonRepoManager
@@ -134,7 +134,6 @@ def run(
134134
original_cli_args: list[str] | None = None,
135135
codemod_registry: registry.CodemodRegistry | None = None,
136136
sast_only: bool = False,
137-
ai_client: bool = True,
138137
log_matched_files: bool = False,
139138
remediation: bool = False,
140139
) -> tuple[CodeTF | None, int, TokenUsage]:
@@ -162,24 +161,18 @@ def run(
162161

163162
repo_manager = PythonRepoManager(Path(directory))
164163

165-
try:
166-
context = CodemodExecutionContext(
167-
Path(directory),
168-
dry_run,
169-
verbose,
170-
codemod_registry,
171-
provider_registry,
172-
repo_manager,
173-
path_include,
174-
path_exclude,
175-
tool_result_files_map,
176-
max_workers,
177-
ai_client,
178-
)
179-
except MisconfiguredAIClient as e:
180-
logger.error(e)
181-
# Codemodder instructions conflicted (according to spec)
182-
return None, 3, token_usage
164+
context = CodemodExecutionContext(
165+
Path(directory),
166+
dry_run,
167+
verbose,
168+
codemod_registry,
169+
provider_registry,
170+
repo_manager,
171+
path_include,
172+
path_exclude,
173+
tool_result_files_map,
174+
max_workers,
175+
)
183176

184177
context.repo_manager.parse_project()
185178

src/codemodder/context.py

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
build_failed_dependency_notification,
1818
)
1919
from codemodder.file_context import FileContext
20-
from codemodder.llm import setup_azure_llama_llm_client, setup_openai_llm_client
2120
from codemodder.logging import log_list, logger
2221
from codemodder.project_analysis.file_parsers.package_store import PackageStore
2322
from codemodder.project_analysis.python_repo_manager import PythonRepoManager
@@ -28,9 +27,6 @@
2827
from codemodder.utils.update_finding_metadata import update_finding_metadata
2928

3029
if TYPE_CHECKING:
31-
from azure.ai.inference import ChatCompletionsClient
32-
from openai import OpenAI
33-
3430
from codemodder.codemods.base_codemod import BaseCodemod
3531

3632

@@ -51,8 +47,6 @@ class CodemodExecutionContext:
5147
max_workers: int = 1
5248
tool_result_files_map: dict[str, list[Path]]
5349
semgrep_prefilter_results: ResultSet | None = None
54-
openai_llm_client: OpenAI | None = None
55-
azure_llama_llm_client: ChatCompletionsClient | None = None
5650

5751
def __init__(
5852
self,
@@ -66,7 +60,6 @@ def __init__(
6660
path_exclude: list[str] | None = None,
6761
tool_result_files_map: dict[str, list[Path]] | None = None,
6862
max_workers: int = 1,
69-
ai_client: bool = True,
7063
):
7164
self.directory = directory
7265
self.dry_run = dry_run
@@ -85,10 +78,6 @@ def __init__(
8578
self.max_workers = max_workers
8679
self.tool_result_files_map = tool_result_files_map or {}
8780
self.semgrep_prefilter_results = None
88-
self.openai_llm_client = setup_openai_llm_client() if ai_client else None
89-
self.azure_llama_llm_client = (
90-
setup_azure_llama_llm_client() if ai_client else None
91-
)
9281

9382
def add_changesets(self, codemod_name: str, change_sets: List[ChangeSet]):
9483
self._changesets_by_codemod.setdefault(codemod_name, []).extend(change_sets)

src/codemodder/llm.py

Lines changed: 0 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -2,35 +2,13 @@
22

33
import os
44
from dataclasses import dataclass
5-
from typing import TYPE_CHECKING
65

76
from typing_extensions import Self
87

9-
try:
10-
from openai import AzureOpenAI, OpenAI
11-
except ImportError:
12-
OpenAI = None
13-
AzureOpenAI = None
14-
15-
try:
16-
from azure.ai.inference import ChatCompletionsClient
17-
from azure.core.credentials import AzureKeyCredential
18-
except ImportError:
19-
ChatCompletionsClient = None
20-
AzureKeyCredential = None
21-
22-
if TYPE_CHECKING:
23-
from openai import OpenAI
24-
from azure.ai.inference import ChatCompletionsClient
25-
from azure.core.credentials import AzureKeyCredential
26-
278
from codemodder.logging import logger
289

2910
__all__ = [
3011
"MODELS",
31-
"setup_openai_llm_client",
32-
"setup_azure_llama_llm_client",
33-
"MisconfiguredAIClient",
3412
"TokenUsage",
3513
"log_token_usage",
3614
]
@@ -42,7 +20,6 @@
4220
"o1-mini",
4321
"o1",
4422
]
45-
DEFAULT_AZURE_OPENAI_API_VERSION = "2024-02-01"
4623

4724

4825
class ModelRegistry(dict):
@@ -66,68 +43,6 @@ def __getattr__(self, name):
6643
MODELS = ModelRegistry(models)
6744

6845

69-
def setup_openai_llm_client() -> OpenAI | None:
70-
"""Configure either the Azure OpenAI LLM client or the OpenAI client, in that order."""
71-
if not AzureOpenAI:
72-
logger.info("Azure OpenAI API client not available")
73-
return None
74-
75-
azure_openapi_key = os.getenv("CODEMODDER_AZURE_OPENAI_API_KEY")
76-
azure_openapi_endpoint = os.getenv("CODEMODDER_AZURE_OPENAI_ENDPOINT")
77-
if bool(azure_openapi_key) ^ bool(azure_openapi_endpoint):
78-
raise MisconfiguredAIClient(
79-
"Azure OpenAI API key and endpoint must both be set or unset"
80-
)
81-
82-
if azure_openapi_key and azure_openapi_endpoint:
83-
logger.info("Using Azure OpenAI API client")
84-
return AzureOpenAI(
85-
api_key=azure_openapi_key,
86-
api_version=os.getenv(
87-
"CODEMODDER_AZURE_OPENAI_API_VERSION",
88-
DEFAULT_AZURE_OPENAI_API_VERSION,
89-
),
90-
azure_endpoint=azure_openapi_endpoint,
91-
)
92-
93-
if not OpenAI:
94-
logger.info("OpenAI API client not available")
95-
return None
96-
97-
if not (api_key := os.getenv("CODEMODDER_OPENAI_API_KEY")):
98-
logger.info("OpenAI API key not found")
99-
return None
100-
101-
logger.info("Using OpenAI API client")
102-
return OpenAI(api_key=api_key)
103-
104-
105-
def setup_azure_llama_llm_client() -> ChatCompletionsClient | None:
106-
"""Configure the Azure Llama LLM client."""
107-
if not ChatCompletionsClient:
108-
logger.info("Azure Llama client not available")
109-
return None
110-
111-
azure_llama_key = os.getenv("CODEMODDER_AZURE_LLAMA_API_KEY")
112-
azure_llama_endpoint = os.getenv("CODEMODDER_AZURE_LLAMA_ENDPOINT")
113-
if bool(azure_llama_key) ^ bool(azure_llama_endpoint):
114-
raise MisconfiguredAIClient(
115-
"Azure Llama API key and endpoint must both be set or unset"
116-
)
117-
118-
if azure_llama_key and azure_llama_endpoint:
119-
logger.info("Using Azure Llama API client")
120-
return ChatCompletionsClient(
121-
credential=AzureKeyCredential(azure_llama_key),
122-
endpoint=azure_llama_endpoint,
123-
)
124-
return None
125-
126-
127-
class MisconfiguredAIClient(ValueError):
128-
pass
129-
130-
13146
@dataclass
13247
class TokenUsage:
13348
completion_tokens: int = 0

0 commit comments

Comments
 (0)