Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 102 additions & 6 deletions legallint/plugins/python/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
LegalLint python locates 3rd party libraries used and returns name and metadata
"""
import os
import re
from importlib.metadata import distributions

Expand Down Expand Up @@ -28,6 +29,87 @@ def run(self):
# print(f"python deps expanded {deps}")
return

class Requirements: # create a class requirements.txt , dev-req.txt , req-dev.txt root direc scan find txt files / req / read that file store the data[] in a var list
basedir = get_pwd() # Current working directory
files = ['requirements.txt', 'dev-requirements.txt'] # List of potential requirements files ((dep, req,))
dependencies = {}

@classmethod
def get_requirements_file(cls):
"""
Locate the requirements file (requirements.txt or dev-requirements.txt) in the base directory.
Returns the file path if found, otherwise returns None.
"""
requirements_files = []
patterns = [
r'requirements.*\.txt$', # Matches 'requirements.txt', 'requirements-dev.txt', etc.
r'dev.*\.txt$', # Matches 'dev-requirements.txt', 'dev.txt', etc.
r'packages.*\.txt$', # Matches 'packages.txt', etc.
r'dep.*\.txt$', # Matches 'dep.txt', 'dependencies.txt', etc.
r'.*\.txt$' # Matches any .txt file (last resort)
]

for filename in os.listdir(cls.basedir):
for pattern in patterns:
if re.match(pattern, filename, re.IGNORECASE) and os.path.isfile(os.path.join(cls.basedir, filename)):
requirements_files.append(os.path.join(cls.basedir, filename))
break # Stop checking other patterns once a match is found

return requirements_files
# for filename in cls.files:
# fpath = f"{cls.basedir}/{filename}"
# if os.path.isfile(fpath):
# return fpath
# return None
# we should get list files

@classmethod
def get_dependencies(cls):
"""
Read the requirements file and extract dependencies.
Optionally take a specific file path.
"""
requirements_files = cls.get_requirements_file()

if not requirements_files:
print("No requirements files found in the base directory.")
return cls.dependencies

cls.dependencies={}

for fpath in requirements_files:
# Use the filename to create a key for dependencies
# key = os.path.basename(fpath).replace('.txt', '').replace('requirements', 'reqs')
# cls.dependencies[key] = [] # Initialize a list for this key
file_key = os.path.basename(fpath) # Use the file name as a key
cls.dependencies[file_key] = set() # Initialize the list for this requirements file

try:
with open(fpath, 'r') as file:
for line in file:
line = line.strip()
if line and not line.startswith('#'): # Ignore empty lines and comments
# Handle version specifications (e.g., package==version or package>=version)
package = line.split(' ')[0].split('==')[0].split('>=')[0]
cls.dependencies[file_key].add(package) # add the package to the corrrespondind file list
except Exception as e:
print(f"Error reading requirements file: {e}")

return cls.dependencies
# we need only keys and it supposed to be a set not dict

@classmethod
# def to_set(cls, deps=None):
# """
# Convert the collected dependencies into a set for uniqueness.
# """
# # {‘reqs’: [‘pytest’, ‘pandas’], ‘dev-requirements’: [‘mkdocs’]}
# return set(cls.dependencies.keys()) if deps is None else set(deps)
def to_set(cls):
"""
Convert the collected dependencies into a set for uniqueness.
"""
return {k: set(v) for k, v in cls.dependencies.items() if v}

class Expand:
dep_map = {}
Expand All @@ -44,7 +126,7 @@ def get_dependencies(cls, pkgs_set):

for pkg_name in pkgs_set:
if pkg_name in cls.visited:
continue
continue
cls.visited.add(pkg_name)

if pkg_name not in cls.dep_map:
Expand Down Expand Up @@ -115,9 +197,23 @@ def to_set(cls, deps=None):


if __name__ == "__main__":
Toml.get_dependencies()
deps = Toml.to_set()
print(deps)
deps = Expand.get_dependencies(deps)
print(deps)
# Toml.get_dependencies()
# deps = Toml.to_set()
# print(deps)
# deps = Expand.get_dependencies(deps)
# print(deps)
# # Requirement testing
# fpath= Requirements.get_requirements_file()
# if fpath:
# print(fpath)# it prints
# deps= Requirements.get_dependencies(fpath=fpath)
# print(deps)

deps = Requirements.get_dependencies() # This will automatically find and read all matching requirements files
if deps:
print("\nAll Dependencies found:")
print(deps) # Print the structured dependencies dictionary
else:
print("No dependencies found.")