Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions DHIS2/audit_log_tools/conversions_from_log/audittools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import re

start = 100
end = 232

with open("unified_logs.log", "w", encoding='utf-8') as out_file:
for i in range(start, end + 1):
try:
with open(f"logs/dhis-audit.log.{i}", "r", encoding='utf-8') as log_file:
for line in log_file:
processed_line = re.sub(r' \(AbstractAuditConsumer.*', '', line)
out_file.write(processed_line)
except FileNotFoundError:
print(f"File dhis-audit.log.{i} not found")

print("Success.")
55 changes: 55 additions & 0 deletions DHIS2/audit_log_tools/conversions_from_log/filterunified.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import re
import json

# Ask the user to enter a date for filtering
fecha = input("Please enter a date in the format YYYY-MM-DD: ")

# Validate that the entered date has the correct format
if fecha != "":
if not re.match(r'^\d{4}-\d{2}-\d{2}$', fecha):
print("Invalid date. Please enter a date in the format YYYY-MM-DD.")
exit()

# Ask the user to enter an auditScope type (optional)
audit_scope = input("Enter the auditScope type (optional, press Enter to skip): ")

# Ask the user to enter a value for klass (optional)
klass_prefix = input("Enter the initial value for 'klass' (optional, press Enter to skip): ")

# Ask the user to enter the name of the 'createdBy' user
created_by = input("Enter the name of the 'createdBy' user (optional, press Enter to skip): ")

# Open and read the unified log file
try:
with open("unified_logs.log", "r", encoding="utf-8") as infile, open(f"filtered_logs_{fecha}_{audit_scope if audit_scope else 'ALL'}_{klass_prefix if klass_prefix else 'ALL'}_{created_by if created_by else 'ALL'}.json", "w", encoding="utf-8") as outfile:

outfile.write('{"audit":[')
first_line_written = False
# Iterate over each line in the unified log file
for line in infile:
# Check if the line contains the entered date
if line.startswith(f"* INFO {fecha}"):
# Extract the JSON part of the line
json_part = re.search(r'{.*}', line)
if json_part:
json_data = json.loads(json_part.group())
# Check if the line matches all entered criteria
if (not audit_scope or json_data.get('auditScope') == audit_scope.upper()) and \
(not klass_prefix or json_data.get('klass', '').startswith(klass_prefix)) and \
(not fecha or json_data.get('createdAt', '').startswith(fecha)) and \
(not created_by or json_data.get('createdBy') == created_by):
processed_line = ('' if not first_line_written else ',') + re.sub(r'^[^{]+', '', line).rstrip()
# Write the processed line to the output file
outfile.write(processed_line)
first_line_written = True # Update control variable

# Finalize JSON array
outfile.write(']}')

# Inform the user that the process is complete
print(f"Lines with date {fecha}{' and auditScope ' + audit_scope if audit_scope else ''}{' and klass starting with ' + klass_prefix if klass_prefix else ''}{' and createdBy ' + created_by if created_by else ''} have been filtered and saved to filtered_logs_{fecha}_{audit_scope if audit_scope else 'ALL'}_{klass_prefix if klass_prefix else 'ALL'}_{created_by if created_by else 'ALL'}.json")

except FileNotFoundError:
print("File unified_logs.log not found.")
except Exception as e:
print(f"An unexpected error occurred: {e}")
2 changes: 2 additions & 0 deletions DHIS2/audit_log_tools/conversions_from_log/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
audittools is a python script to unify all the logs from start to end in a single file
filteredunified is a python script to filter according input entries the big log file, and create at output a json file
5 changes: 5 additions & 0 deletions DHIS2/audit_log_tools/convert_from_json_to_sqlite/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
unifiedtodb.py is a python script to convert json file into dbaudit database.
Note: if you execute it multiple times you will add repeated audits in the database, you need remove all data first to have a clean database.
convert .json into sqldb to easiest query and understand the audit data. Only user and usergroups convert all the interesting types at this point.
example of query:
select a.createdby, a.createdat, ug.ug_members, ug.ug_members_count, ug_sharing_ug_count, ug_createdby, ug.ug_uid, ug_lastupdated, ug_lastupdatedby from audit a join usergroup ug on a.auditid = ug.ug_auditid where ug_name like '%NHWA%' order by createdat DESC;
38 changes: 38 additions & 0 deletions DHIS2/audit_log_tools/convert_from_json_to_sqlite/audit_item.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import json
import json

from user_audit import UserAudit
from usergroup_audit import UserGroupAudit


class AuditItem:
def __init__(self):
self.audit_type = ""
self.createdat = ""
self.createdby = ""
self.klass = ""
self.uid = ""
self.name = ""
self.listofattributes = ""
self.user_group = UserGroupAudit()
self.user = UserAudit()

# Método adicional (opcional)
def map(self, json_data):
self.audit_type = json_data["auditType"]
self.createdat = json_data["createdAt"]
self.createdby = json_data["createdBy"]
self.klass = json_data["klass"]
if "uid" in json_data.keys():
self.uid = json_data["uid"]
else:
print("nouid")
self.name = ""
self.listofattributes = json.dumps(json_data["attributes"])
if "org.hisp.dhis.user.UserGroup" == self.klass:
data = json_data["data"]
self.user_group.map(data)
elif "org.hisp.dhis.user.User" == self.klass:
data = json_data["data"]
self.user.map(data)

Binary file not shown.
Binary file not shown.
91 changes: 91 additions & 0 deletions DHIS2/audit_log_tools/convert_from_json_to_sqlite/unifiedtodb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import sqlite3
import ijson

from audit_item import AuditItem

# Connect to the SQLite database; if it doesn't exist, it will be created automatically
conn = sqlite3.connect('dbaudit.db')

# Create a cursor to execute SQL queries
cursor = conn.cursor()

# Define the data to insert into the table
file = "filtered_logs__ALL_ALL_ALL.json"
with open(file, "r", encoding="utf-8") as infile:
audit = []
objects = ijson.items(infile, 'audit.item')
for item in objects:
if item["auditScope"] == "METADATA":
audit_item = AuditItem()
audit_item.map(item)
audit.append(audit_item)
print("append")

for audit_item in audit:
cursor.execute('''INSERT INTO audit (
auditid, auditype, createdat, createdby, klass, uid, name, listofattributes
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)''', (None,
audit_item.audit_type,
audit_item.createdat,
audit_item.createdby,
audit_item.klass,
audit_item.uid,
audit_item.name,
audit_item.listofattributes))

# Commit the changes
conn.commit()

last_id = cursor.lastrowid
if audit_item.klass == "org.hisp.dhis.user.UserGroup":
cursor.execute('''INSERT INTO usergroup (
usergroupid,
ug_uid, ug_lastupdated, ug_lastupdatedby, ug_createdby,
ug_members,
ug_members_count, ug_name, ug_sharing, ug_sharing_ug_count,
ug_sharing_u_count, ug_publicaccess, ug_auditid
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', (None,
audit_item.user_group.uid,
audit_item.user_group.lastupdated,
audit_item.user_group.lastupdatedby,
audit_item.user_group.createdby,
audit_item.user_group.members,
audit_item.user_group.members_count,
audit_item.user_group.name,
audit_item.user_group.sharing,
audit_item.user_group.sharing_ug_count,
audit_item.user_group.sharing_u_count,
audit_item.user_group.publicaccess,
last_id))

# Commit the changes
conn.commit()

if audit_item.klass == "org.hisp.dhis.user.User":
cursor.execute('''INSERT INTO user (
userid,
u_lastlogin, u_lastupdated, u_openid, u_created,
u_user_roles,
u_user_roles_count, u_createdby, u_surname, u_firstname,
u_disabled, u_twofa, u_email, u_username, u_auditid
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', (None,
audit_item.user.lastLogin,
audit_item.user.lastUpdated,
audit_item.user.open_id,
audit_item.user.created,
audit_item.user.user_roles,
audit_item.user.user_roles_count,
audit_item.user.created_by,
audit_item.user.surname,
audit_item.user.firstname,
audit_item.user.disabled,
audit_item.user.twoFA,
audit_item.user.email,
audit_item.user.username,
last_id))

# Commit the changes
conn.commit()

# Close the connection
conn.close()
49 changes: 49 additions & 0 deletions DHIS2/audit_log_tools/convert_from_json_to_sqlite/user_audit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import json

from usergroup_audit import UserGroupAudit


class UserAudit:
def __init__(self):
self.uid = ""
self.lastLogin = ""
self.lastUpdated = ""
self.open_id = ""
self.created = ""
self.user_roles = ""
self.user_roles_count = ""
self.created_by = ""
self.surname = ""
self.firstname = ""
self.disabled = ""
self.twoFA = ""
self.email = ""
self.username = ""



def map(self, data):
if "name" in data.keys():
self.name = data["name"]
self.uid = data["uid"]
if "lastLogin" in data.keys():
self.lastLogin = data["lastLogin"]
if "lastUpdated" in data.keys():
self.lastupdatedby = data["lastUpdated"]
if "openId" in data.keys():
self.open_id = data["openId"]
self.created = data["created"]
if "createdBy" in data.keys():
self.created_by = data["createdBy"]
self.surname = data["surname"]
self.firstname = data["firstName"]
self.disabled = data["disabled"]
self.twoFA = data["twoFA"]
if "email" in data.keys():
self.email = data["email"]
self.username = data["username"]
if "userRoles" in data.keys():
data["userRoles"].sort()
self.user_roles = json.dumps(data["userRoles"])
self.user_roles_count = len(data["userRoles"])

Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import json


class UserGroupAudit:
def __init__(self):
self.uid = ""
self.lastupdated = ""
self.lastupdatedby = ""
self.createdby = ""
self.created = ""
self.members = ""
self.members_count = ""
self.name = ""
self.sharing = ""
self.sharing_ug_count = ""
self.sharing_u_count = ""
self.publicaccess = ""


def map(self, data):
if "name" in data.keys():
self.name = data["name"]
self.uid = data["uid"]
self.lastupdated = data["lastUpdated"]
if "lastUpdatedBy" in data.keys():
self.lastupdatedby = data["lastUpdatedBy"]
self.createdby = data["createdBy"]
self.created = data["created"]
if "members" in data.keys():
data["members"].sort()
self.members = json.dumps(data["members"])
self.members_count = len(data["members"])
if "sharing" in data.keys():
self.sharing = json.dumps(data["sharing"])
self.sharing_u_count = len(data["sharing"]["users"])
self.sharing_ug_count = len(data["sharing"]["userGroups"])
self.publicaccess = data["sharing"]["public"]