-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathwordpress_detector_v2.py
More file actions
214 lines (182 loc) · 8.19 KB
/
wordpress_detector_v2.py
File metadata and controls
214 lines (182 loc) · 8.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
import sys
import asyncio
import aiohttp
import re
import json
import random
import logging
from urllib.parse import urljoin
from aiohttp import ClientSession
from bs4 import BeautifulSoup
# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
USER_AGENTS = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
]
async def fetch(session, url):
headers = {'User-Agent': random.choice(USER_AGENTS)}
logger.info(f"Fetching content from {url}")
async with session.get(url, headers=headers) as response:
content = await response.text()
logger.info(f"Fetched {len(content)} bytes from {url}")
return content
async def is_wordpress(session, url):
try:
logger.info(f"Checking if {url} is a WordPress site")
content = await fetch(session, url)
soup = BeautifulSoup(content, 'html.parser')
wp_patterns = [
lambda s: s.find('meta', attrs={'name': 'generator', 'content': re.compile('WordPress', re.I)}),
lambda s: s.find('link', attrs={'rel': 'https://api.w.org/'}),
lambda s: s.find('link', attrs={'rel': 'pingback', 'href': re.compile('/xmlrpc.php', re.I)})
]
for i, pattern in enumerate(wp_patterns):
if pattern(soup):
logger.info(f"WordPress pattern {i+1} matched for {url}")
return True
login_url = urljoin(url, 'wp-login.php')
logger.info(f"Checking WordPress login page at {login_url}")
login_content = await fetch(session, login_url)
if 'WordPress' in login_content:
logger.info(f"WordPress login page found at {login_url}")
return True
logger.info(f"{url} is not a WordPress site")
return False
except Exception as e:
logger.error(f"Error checking WordPress for {url}: {str(e)}")
return False
async def detect_plugins(session, url):
logger.info(f"Detecting plugins for {url}")
content = await fetch(session, url)
soup = BeautifulSoup(content, 'html.parser')
client_side_plugins = set()
server_side_plugins = set()
# Client-side detection
logger.info("Performing client-side plugin detection")
for link in soup.find_all('link'):
href = link.get('href', '')
if 'wp-content/plugins/' in href:
plugin_name = href.split('wp-content/plugins/')[1].split('/')[0]
client_side_plugins.add(plugin_name)
logger.info(f"Detected client-side plugin: {plugin_name}")
for script in soup.find_all('script'):
src = script.get('src', '')
if 'wp-content/plugins/' in src:
plugin_name = src.split('wp-content/plugins/')[1].split('/')[0]
client_side_plugins.add(plugin_name)
logger.info(f"Detected client-side plugin: {plugin_name}")
# Server-side detection
logger.info("Performing server-side plugin detection")
plugins_dir_url = urljoin(url, 'wp-content/plugins/')
try:
plugins_content = await fetch(session, plugins_dir_url)
plugin_folders = re.findall(r'<a href="([^"]+)/">', plugins_content)
for folder in plugin_folders:
if folder in ['.', '..']:
continue
plugin_url = urljoin(plugins_dir_url, folder)
plugin_files = ['readme.txt', f'{folder}.php', 'index.php']
for file in plugin_files:
file_url = urljoin(plugin_url, file)
try:
logger.info(f"Checking plugin file: {file_url}")
response = await session.head(file_url)
if response.status == 200:
if file.endswith('.php'):
php_content = await fetch(session, file_url)
plugin_info = extract_plugin_info(php_content)
if plugin_info:
server_side_plugins.add(f"{folder} ({plugin_info['name']} v{plugin_info['version']})")
logger.info(f"Detected server-side plugin: {folder} ({plugin_info['name']} v{plugin_info['version']})")
else:
server_side_plugins.add(folder)
logger.info(f"Detected server-side plugin: {folder}")
else:
server_side_plugins.add(folder)
logger.info(f"Detected server-side plugin: {folder}")
break
except Exception as e:
logger.error(f"Error checking plugin file {file_url}: {str(e)}")
except Exception as e:
logger.error(f"Error during server-side plugin detection: {str(e)}")
return {
'client_side': list(client_side_plugins),
'server_side': list(server_side_plugins)
}
def extract_plugin_info(content):
logger.info("Extracting plugin info from PHP file")
plugin_info = {}
patterns = {
'name': r'Plugin Name:\s*(.+)',
'version': r'Version:\s*(.+)',
'description': r'Description:\s*(.+)',
'author': r'Author:\s*(.+)',
}
for key, pattern in patterns.items():
match = re.search(pattern, content)
if match:
plugin_info[key] = match.group(1).strip()
logger.info(f"Extracted plugin {key}: {plugin_info[key]}")
return plugin_info if 'name' in plugin_info and 'version' in plugin_info else None
async def detect_theme(session, url):
logger.info(f"Detecting theme for {url}")
content = await fetch(session, url)
soup = BeautifulSoup(content, 'html.parser')
for link in soup.find_all('link'):
href = link.get('href', '')
if 'wp-content/themes/' in href:
theme = href.split('wp-content/themes/')[1].split('/')[0]
logger.info(f"Detected theme: {theme}")
return theme
logger.info("No theme detected")
return None
async def check_wp_api(session, url):
logger.info(f"Checking WordPress API for {url}")
api_url = urljoin(url, 'wp-json/')
try:
response = await session.get(api_url)
if response.status == 200:
data = await response.json()
api_info = {
'name': data.get('name'),
'description': data.get('description'),
'url': data.get('url'),
'version': data.get('namespaces', [])
}
logger.info(f"WordPress API info: {json.dumps(api_info)}")
return api_info
except Exception as e:
logger.error(f"Error checking WordPress API: {str(e)}")
logger.info("No WordPress API information found")
return None
async def main():
if len(sys.argv) != 2:
logger.error("Usage: python wordpress_detector_v2.py <url>")
sys.exit(1)
url = sys.argv[1]
if not url.startswith(('http://', 'https://')):
url = 'https://' + url
logger.info(f"Starting analysis for {url}")
async with ClientSession() as session:
if await is_wordpress(session, url):
logger.info(f"{url} is a WordPress site")
plugins = await detect_plugins(session, url)
theme = await detect_theme(session, url)
api_info = await check_wp_api(session, url)
result = {
'url': url,
'is_wordpress': True,
'plugins': plugins,
'theme': theme,
'api_info': api_info
}
logger.info(f"Analysis result: {json.dumps(result, indent=2)}")
print(json.dumps(result, indent=2))
else:
logger.info(f"{url} is not a WordPress site")
print(f"{url} is not a WordPress site")
if __name__ == "__main__":
asyncio.run(main())