Skip to content

Commit e129828

Browse files
refactoring
1 parent 6b5eb0e commit e129828

File tree

3 files changed

+211
-0
lines changed

3 files changed

+211
-0
lines changed

.env.example

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
# code2docs LLM Configuration
2+
# Copy to .env and fill in your values.
3+
# If no LLM is configured, code2docs falls back to algorithm-based generation.
4+
5+
# LLM model identifier (litellm format)
6+
# Examples: openai/gpt-4o-mini, anthropic/claude-3-haiku, ollama/llama3, groq/llama-3.1-8b
7+
CODE2DOCS_LLM_MODEL=openai/gpt-4o-mini
8+
9+
# API key for the LLM provider
10+
# Not needed for local models (ollama, etc.)
11+
CODE2DOCS_LLM_API_KEY=
12+
13+
# Optional: API base URL (for self-hosted or proxy endpoints)
14+
# CODE2DOCS_LLM_API_BASE=http://localhost:11434
15+
16+
# Optional: max tokens per LLM call (default: 1024)
17+
# CODE2DOCS_LLM_MAX_TOKENS=1024
18+
19+
# Optional: temperature (default: 0.3 — low for factual docs)
20+
# CODE2DOCS_LLM_TEMPERATURE=0.3

code2docs/generators/getting_started_gen.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from ..config import Code2DocsConfig
88
from ..analyzers.dependency_scanner import DependencyScanner
9+
from ..llm_helper import LLMHelper
910

1011

1112
class GettingStartedGenerator:
@@ -14,12 +15,20 @@ class GettingStartedGenerator:
1415
def __init__(self, config: Code2DocsConfig, result: AnalysisResult):
1516
self.config = config
1617
self.result = result
18+
self.llm = LLMHelper(config.llm)
1719

1820
def generate(self) -> str:
1921
"""Generate getting-started.md content."""
2022
project = self.config.project_name or "Project"
2123
lines = [
2224
f"# Getting Started with {project}\n",
25+
]
26+
# LLM-generated intro if available
27+
intro = self._generate_intro(project)
28+
if intro:
29+
lines.append(intro)
30+
lines.append("")
31+
lines += [
2332
self._render_prerequisites(),
2433
"",
2534
self._render_installation(),
@@ -115,6 +124,27 @@ def _render_first_usage(self) -> str:
115124
lines.append("```")
116125
return "\n".join(lines)
117126

127+
def _generate_intro(self, project: str) -> str:
128+
"""Generate LLM-enhanced intro paragraph. Returns '' if unavailable."""
129+
if not self.llm.available:
130+
return ""
131+
# Gather CLI commands
132+
cli_funcs = [
133+
f for f in self.result.functions.values()
134+
if not f.is_private and not f.is_method
135+
and f.module and "cli" in f.module
136+
]
137+
cli_str = ", ".join(f.name for f in cli_funcs[:8]) or "N/A"
138+
# Gather public API
139+
public_funcs = [
140+
f for f in self.result.functions.values()
141+
if not f.is_private and not f.is_method
142+
and not f.name.startswith("_")
143+
]
144+
api_str = ", ".join(f"{f.name}()" for f in public_funcs[:8]) or "N/A"
145+
result = self.llm.generate_getting_started_summary(project, cli_str, api_str)
146+
return result or ""
147+
118148
def _render_next_steps(self) -> str:
119149
"""Render next steps with links to other docs."""
120150
lines = [

code2docs/llm_helper.py

Lines changed: 161 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
"""LLM helper — optional LLM-assisted documentation generation via litellm.
2+
3+
If litellm is not installed or LLM is not configured, all methods return None
4+
and generators fall back to algorithm-based template output.
5+
"""
6+
7+
import logging
8+
from typing import Optional
9+
10+
from .config import LLMConfig
11+
12+
logger = logging.getLogger(__name__)
13+
14+
# Lazy import: litellm is an optional dependency
15+
_litellm = None
16+
17+
18+
def _get_litellm():
19+
"""Import litellm lazily."""
20+
global _litellm
21+
if _litellm is None:
22+
try:
23+
import litellm
24+
litellm.suppress_debug_info = True
25+
_litellm = litellm
26+
except ImportError:
27+
_litellm = False # sentinel: tried and failed
28+
return _litellm if _litellm is not False else None
29+
30+
31+
class LLMHelper:
32+
"""Thin wrapper around litellm for documentation generation.
33+
34+
If LLM is unavailable or disabled, every method returns None so callers
35+
can fall through to template-based generation.
36+
"""
37+
38+
def __init__(self, config: LLMConfig):
39+
self.config = config
40+
self._available: Optional[bool] = None
41+
42+
@property
43+
def available(self) -> bool:
44+
"""Check if LLM is configured and litellm is installed."""
45+
if self._available is None:
46+
self._available = (
47+
self.config.enabled
48+
and bool(self.config.model)
49+
and _get_litellm() is not None
50+
)
51+
if self._available:
52+
logger.info("LLM enabled: model=%s", self.config.model)
53+
else:
54+
logger.debug("LLM disabled (enabled=%s, model=%s, litellm=%s)",
55+
self.config.enabled, bool(self.config.model),
56+
_get_litellm() is not None)
57+
return self._available
58+
59+
def complete(self, prompt: str, system: str = "") -> Optional[str]:
60+
"""Send a completion request. Returns None on any failure."""
61+
if not self.available:
62+
return None
63+
litellm = _get_litellm()
64+
if litellm is None:
65+
return None
66+
67+
messages = []
68+
if system:
69+
messages.append({"role": "system", "content": system})
70+
messages.append({"role": "user", "content": prompt})
71+
72+
kwargs = {
73+
"model": self.config.model,
74+
"messages": messages,
75+
"max_tokens": self.config.max_tokens,
76+
"temperature": self.config.temperature,
77+
}
78+
if self.config.api_key:
79+
kwargs["api_key"] = self.config.api_key
80+
if self.config.api_base:
81+
kwargs["api_base"] = self.config.api_base
82+
83+
try:
84+
response = litellm.completion(**kwargs)
85+
return response.choices[0].message.content.strip()
86+
except Exception as exc:
87+
logger.warning("LLM call failed: %s", exc)
88+
return None
89+
90+
# ── High-level doc helpers (return None if LLM unavailable) ────────
91+
92+
def generate_project_description(self, project_name: str,
93+
modules_summary: str,
94+
entry_points: str) -> Optional[str]:
95+
"""Generate a concise project description from analysis data."""
96+
system = (
97+
"You are a technical writer generating concise project documentation. "
98+
"Write clear, factual descriptions. No marketing language. "
99+
"Output plain Markdown, 2-4 sentences."
100+
)
101+
prompt = (
102+
f"Project: {project_name}\n\n"
103+
f"Modules:\n{modules_summary}\n\n"
104+
f"Entry points:\n{entry_points}\n\n"
105+
"Write a concise description of what this project does and how to use it."
106+
)
107+
return self.complete(prompt, system)
108+
109+
def generate_architecture_summary(self, project_name: str,
110+
layers: str,
111+
patterns: str,
112+
metrics: str) -> Optional[str]:
113+
"""Generate a natural-language architecture overview."""
114+
system = (
115+
"You are a software architect explaining a codebase. "
116+
"Be precise and concise. Use technical terms correctly. "
117+
"Output plain Markdown, 3-6 sentences."
118+
)
119+
prompt = (
120+
f"Project: {project_name}\n\n"
121+
f"Architecture layers:\n{layers}\n\n"
122+
f"Detected patterns:\n{patterns}\n\n"
123+
f"Metrics:\n{metrics}\n\n"
124+
"Write a concise architecture overview explaining how the components "
125+
"work together and the key design decisions."
126+
)
127+
return self.complete(prompt, system)
128+
129+
def generate_getting_started_summary(self, project_name: str,
130+
cli_commands: str,
131+
public_api: str) -> Optional[str]:
132+
"""Generate a getting-started introduction."""
133+
system = (
134+
"You are writing a getting-started guide for developers. "
135+
"Be practical and action-oriented. "
136+
"Output plain Markdown, 2-4 sentences."
137+
)
138+
prompt = (
139+
f"Project: {project_name}\n\n"
140+
f"CLI commands:\n{cli_commands}\n\n"
141+
f"Public API:\n{public_api}\n\n"
142+
"Write a brief introduction explaining what the user will learn "
143+
"and what they can do with this tool."
144+
)
145+
return self.complete(prompt, system)
146+
147+
def enhance_module_docstring(self, module_name: str,
148+
functions: str,
149+
classes: str) -> Optional[str]:
150+
"""Generate a module-level summary from its contents."""
151+
system = (
152+
"You are a technical writer documenting a Python module. "
153+
"Be concise. Output 1-2 sentences of plain text (no Markdown)."
154+
)
155+
prompt = (
156+
f"Module: {module_name}\n\n"
157+
f"Functions:\n{functions}\n\n"
158+
f"Classes:\n{classes}\n\n"
159+
"Write a one-line summary of what this module does."
160+
)
161+
return self.complete(prompt, system)

0 commit comments

Comments
 (0)