Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions DHIS2/delete_users/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Local files
working_notes
docker_log.log
error*
output*
temp*
preprod*

# User exports
user*

# env files
.env
8 changes: 8 additions & 0 deletions DHIS2/delete_users/.sample-env
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
JSON_FILE_PATH="" # Path to JSON file with users to be deleted
API_ENDPOINT="" # API endpoint DELETE users (example: http://localhost:8080/api/38/users)
USERNAME="" # API username
PASSWD="" # API password
LOG_FILE_PATH="" # Path to dhis2 log file
USER_TO_TAKEOVER="" # User to use as new owner of the object
DHIS2_DATA_IMAGE="" # Dhis2 image name
SQL_INIT_FILE="" # Init sql file (init.sql, or init-preprod.sql only for preprod)
18 changes: 18 additions & 0 deletions DHIS2/delete_users/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
Remove users from DHIS2 instnces

Scripts should be started on the server where dhis2 logs and dhis2 database are available.

Steps:

1. Download user list from "User extended app" using json format or convert existing csv to json
2. Create another csv file coping only ID column, use it only in step 3
3. Copy newly created csv file to dhis2 DB container to this path: /var/lib/postgresql/data/init.csv
4. Create .env file from .sample-env
5. Run the script:
```
python remove_users.py
```
6. Run bash script clean_temp.sh to remove all temp files created
```
chmod +x clean_temp.sh && bash clean_temp.sh
```
3 changes: 3 additions & 0 deletions DHIS2/delete_users/clean_temp.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash

rm -rf error_objects.json temp_log.txt output.sql
45 changes: 45 additions & 0 deletions DHIS2/delete_users/create_sql_query.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import re
import sys

def extract_and_write_queries(temp_log_file, sql_output_file, user_to_takeover):
with open(temp_log_file, 'r') as infile, open(sql_output_file, 'a') as outfile:
lines = infile.readlines()
for i in range(1, len(lines)):
line = lines[i]
if "Status code: 500" in line:
match = re.search(r"ID (\S+)\. Status code: 500", line)
if match:
some_id = match.group(1)
query1 = f"select userinfoid from userinfo where uid='{some_id}'\n"

# Read the previous line
previous_line = lines[i - 1]
constraint_match = re.search(r'ERROR: update or delete on table "userinfo" violates foreign key constraint "(\S+)" on table "(\S+)"', previous_line)
if constraint_match:
query_user = f"select userinfoid from userinfo where username='{user_to_takeover}'\n"
some_constraint = constraint_match.group(1)
some_table = constraint_match.group(2)
if "lastupdateby" in some_constraint.lower():
query2 = f"update {some_table} set lastupdatedby=({query_user.strip()}) where lastupdatedby=({query1.strip()});\n"
elif "lastupdatedby" in some_constraint.lower():
query2 = f"update {some_table} set lastupdatedby=({query_user.strip()}) where lastupdatedby=({query1.strip()});\n"
elif "creator" in some_constraint.lower():
query2 = f"update {some_table} set creator=({query_user.strip()}) where creator=({query1.strip()});\n"
elif "assigneduserid" in some_constraint.lower():
query2 = f"update {some_table} set assigneduserid=({query_user.strip()}) where assigneduserid=({query1.strip()});\n"
else:
query2 = f"update {some_table} set userid=({query_user.strip()}) where userid=({query1.strip()});\n"

lines_present = set()
for l in temp_log_file:
if l not in lines_present:
outfile.write(query2)
lines_present.add(l)

if __name__ == "__main__":

temp_log_file = sys.argv[1]
sql_output_file = sys.argv[2]
user_to_takeover = sys.argv[3]

extract_and_write_queries(temp_log_file, sql_output_file, user_to_takeover)
109 changes: 109 additions & 0 deletions DHIS2/delete_users/init-preprod.sql

Large diffs are not rendered by default.

105 changes: 105 additions & 0 deletions DHIS2/delete_users/init.sql

Large diffs are not rendered by default.

110 changes: 110 additions & 0 deletions DHIS2/delete_users/remove_users.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import json
import requests
import subprocess
import threading
import sys
import os
from dotenv import load_dotenv
import time

load_dotenv()
stop_event = threading.Event()

def delete_objects(json_file, api_endpoint, username, password, error_json_file_path, temp_log_file):
with open(json_file, 'r') as file:
data = json.load(file)

error_objects = []

# Extract IDs and send DELETE requests
for item in data:
object_id = item.get('ID')
if object_id:
url = f"{api_endpoint}/{object_id}"
response = requests.delete(url, auth=(username, password))
if response.status_code == 200:
print(f"Object with ID {object_id} deleted successfully")
elif response.status_code == 409:
print(f"Object with ID {object_id} has document attached and can not be deleted!")
print()
elif response.status_code == 404:
print(f"Object with ID {object_id} not exists, skiping.")
else:
message = f"ERROR: Failed to delete object with ID {object_id}. Status code: {response.status_code}"
print(message)
with open(temp_log_file, 'a') as log:
log.write(message + "\n")
error_objects.append(item)
time.sleep(1) # Sleep for 1 second between API calls

# Write error objects to a different JSON file if there are users not deleted
if error_objects:
with open(error_json_file_path, 'w') as file:
json.dump(error_objects, file, indent=4)

return bool(error_objects)

def log_error_if_present(log_file_path, temp_log_file):
# Tail dhis2 log
cmd = ["tail", "-F", log_file_path]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)

while True:
output = process.stdout.readline()
if "ERROR: update or delete on table" in output:
message = f"{output.strip()}"
print(message)
with open(temp_log_file, 'a') as log:
log.write(message + "\n")

def execute_create_sql_query_script(temp_log_file, sql_output_file, user_to_takeover):
subprocess.run(["python3", "create_sql_query.py", temp_log_file, sql_output_file, user_to_takeover])

def execute_init_sql_script(image, sql_init_file):
subprocess.run(["d2-docker", "run-sql", "-i", image, sql_init_file])

def execute_sql_script(sql_output_file, image):
subprocess.run(["d2-docker", "run-sql", "-i", image, sql_output_file])

def main():
json_file_path = os.getenv('JSON_FILE_PATH') # Path to JSON file with users to be deleted. Example: users.json
api_endpoint = os.getenv('API_ENDPOINT') # API endpoint DELETE users. Example: http://localhost:8080/api/38/users
username = os.getenv('USERNAME') # API username
password = os.getenv('PASSWD') # API password
log_file_path = os.getenv('LOG_FILE_PATH') # Path to dhis2 log file
error_json_file_path = 'error_objects.json' # Path to created JSON file containing not deleted users
user_to_takeover = os.getenv('USER_TO_TAKEOVER') # User to use as new owner of the object
image = os.getenv('DHIS2_DATA_IMAGE') # Dhis2 image name
temp_log_file = 'temp_log.txt' # Input file to generate sql
sql_output_file = 'output.sql' # Output sql file
sql_init_file = os.getenv('SQL_INIT_FILE') # Init sql file per instance

while True:
# Create and start log thread
log_thread = threading.Thread(target=log_error_if_present, args=(log_file_path, temp_log_file))
log_thread.start()

# Run init sql script
has_errors = execute_init_sql_script(image, sql_init_file)

# Run delete_objects function
has_errors = delete_objects(json_file_path, api_endpoint, username, password, error_json_file_path, temp_log_file)

if not has_errors:
print("INFO: All objects deleted successfully. Exiting.")
# Stop the log thread
stop_event.set()
log_thread.join()
sys.exit(0)

# Extract user IDs from failed deletes and create sql query to fix dependencies
has_errors = execute_create_sql_query_script(temp_log_file, sql_output_file, user_to_takeover)

# Execute sql script to fix dependencies
has_errors = execute_sql_script(sql_output_file, image)

# Use the error file as the input for the next iteration
json_file_path = error_json_file_path

if __name__ == "__main__":
main()