Sort out switch to pymysql and add SME10 log convert and runmailstats for SME10
This commit is contained in:
parent
fce93e1dcd
commit
2d54c4f7f5
97
root/usr/bin/mailstats-convert-log-sme10-to-sme11.py
Normal file
97
root/usr/bin/mailstats-convert-log-sme10-to-sme11.py
Normal file
@ -0,0 +1,97 @@
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
import glob # Import the glob module
|
||||
|
||||
def tai64n_to_datetime(tai64n):
|
||||
"""Convert TAI64N formatted timestamp to a datetime object."""
|
||||
if len(tai64n) < 16:
|
||||
raise ValueError(f"Invalid TAI64N timestamp length: {tai64n}")
|
||||
|
||||
high_bits = int(tai64n[:15], 16)
|
||||
low_bits = int(tai64n[15:23], 16)
|
||||
|
||||
seconds_since_epoch = high_bits
|
||||
nanoseconds = low_bits
|
||||
|
||||
# Create datetime object
|
||||
epoch = datetime(1970, 1, 1)
|
||||
dt = epoch + timedelta(seconds=seconds_since_epoch)
|
||||
dt += timedelta(microseconds=nanoseconds // 1000)
|
||||
|
||||
return dt
|
||||
|
||||
def convert_log(file_paths, output_path):
|
||||
host_name = "sme11"
|
||||
total_files = 0
|
||||
total_lines = 0
|
||||
|
||||
# Input file validation
|
||||
for file_path in file_paths:
|
||||
if not os.path.isfile(file_path):
|
||||
print(f"Input file {file_path} does not exist.")
|
||||
return
|
||||
with open(output_path, 'w') as output_file:
|
||||
for file_path in file_paths:
|
||||
print(f"{file_path}")
|
||||
# Determine the process name based on the file being read
|
||||
if "sqpsmtpd" in file_path:
|
||||
process_name = "sqpsmtpd-forkserver"
|
||||
else:
|
||||
process_name = "qpsmtpd-forkserver"
|
||||
|
||||
with open(file_path, 'r', encoding='latin1') as log_file:
|
||||
total_files += 1
|
||||
try:
|
||||
for line in log_file:
|
||||
total_lines += 1
|
||||
match = re.match(r'@(\w+) (\d+) \((.*?)\) (.*)', line.strip())
|
||||
if match:
|
||||
tai64n_timestamp, pid, context, message = match.groups()
|
||||
try:
|
||||
log_time = tai64n_to_datetime(tai64n_timestamp[1:]) # Ignore '@'
|
||||
formatted_time = log_time.strftime('%b %d %H:%M:%S')
|
||||
|
||||
# Replace "bjsystems.co.uk" with "thereadclan.me.uk" in the message
|
||||
#message = message.replace("bjsystems.co.uk", "thereadclan.me.uk")
|
||||
|
||||
# Correctly format the output line
|
||||
formatted_line = f"{formatted_time} {host_name} {process_name}[{pid}]: {pid} ({context}) {message}\n"
|
||||
output_file.write(formatted_line)
|
||||
except Exception as e:
|
||||
with open("error_log.txt", 'a') as error_file:
|
||||
error_file.write(f"Could not convert timestamp {tai64n_timestamp}: {e}\n")
|
||||
print(f"Error logged for timestamp {tai64n_timestamp}.")
|
||||
else:
|
||||
#does not mathc the logterse line, but still needed
|
||||
match = re.match(r'@(\w+) (\d+) (.*)', line.strip())
|
||||
if match:
|
||||
tai64n_timestamp, pid, message = match.groups()
|
||||
try:
|
||||
log_time = tai64n_to_datetime(tai64n_timestamp[1:]) # Ignore '@'
|
||||
formatted_time = log_time.strftime('%b %d %H:%M:%S')
|
||||
# Replace "bjsystems.co.uk" with "thereadclan.me.uk" in the message
|
||||
#message = message.replace("bjsystems.co.uk", "thereadclan.me.uk")
|
||||
# Correctly format the output line
|
||||
formatted_line = f"{formatted_time} {host_name} {process_name}[{pid}]: {pid} {message}\n"
|
||||
output_file.write(formatted_line)
|
||||
except Exception as e:
|
||||
with open("error_log.txt", 'a') as error_file:
|
||||
error_file.write(f"Could not convert timestamp {tai64n_timestamp}: {e}\n")
|
||||
print(f"Error logged for timestamp {tai64n_timestamp}.")
|
||||
except Exception as e:
|
||||
print(f"Error reading file {file_path}: {e}")
|
||||
continue
|
||||
print(f"Processed {total_files} files and {total_lines} lines.")
|
||||
# Specify the input and output file paths
|
||||
# Use glob to expand file patterns
|
||||
input_log_files = (
|
||||
glob.glob("/var/log/qpsmtpd/@*.s") +
|
||||
["/var/log/qpsmtpd/current", "/var/log/sqpsmtpd/current"] +
|
||||
glob.glob("/var/log/sqpsmtpd/@*.s") # Adjust the asterisk * as needed
|
||||
)
|
||||
output_log_file = "output_log.txt" # Specify your desired output file path
|
||||
|
||||
# Convert the log
|
||||
convert_log(input_log_files, output_log_file)
|
||||
print(f"Log conversion complete. Check the output at: {output_log_file}")
|
@ -375,12 +375,9 @@ def create_line_chart(data2d, xLabels, yLabels, save_path='line_chart.html'):
|
||||
graph_html = fig.to_html(full_html=False,include_plotlyjs='https://cdn.plot.ly/plotly-latest.min.js')
|
||||
return graph_html
|
||||
|
||||
|
||||
|
||||
|
||||
def save_summaries_to_db(date_str, hour, parsed_data):
|
||||
|
||||
def save_summaries_to_db(cursor, conn, date_str, hour, parsed_data):
|
||||
# Convert parsed_data to JSON string
|
||||
global count_records_to_db
|
||||
json_data = json.dumps(parsed_data)
|
||||
|
||||
# Insert the record
|
||||
@ -392,7 +389,8 @@ def save_summaries_to_db(date_str, hour, parsed_data):
|
||||
try:
|
||||
cursor.execute(insert_query, (date_str, hour, json_data))
|
||||
conn.commit()
|
||||
except mysql.connector.Error as err:
|
||||
count_records_to_db += 1
|
||||
except pymysql.Error as err:
|
||||
print(f"DB Error {date_str} {hour} : {err}")
|
||||
conn.rollback()
|
||||
|
||||
@ -839,7 +837,7 @@ def read_html_from_file(filepath):
|
||||
# Need to add in here the contents of the css file at the end of the head section.
|
||||
with open(filepath, 'r', encoding='utf-8') as file:
|
||||
html_contents = file.read()
|
||||
print("reading from html file")
|
||||
print("Reading from html file")
|
||||
# Get Filepath
|
||||
css_path = os.path.dirname(filepath)+"/../css/mailstats.css"
|
||||
# Read in CSS
|
||||
@ -1115,28 +1113,36 @@ if __name__ == "__main__":
|
||||
|
||||
BadCountries = get_value(ConfigDB,"qpsmtpd","BadCountries")
|
||||
|
||||
count_records_to_db = 0;
|
||||
|
||||
# Db save control
|
||||
saveData = get_value(ConfigDB,"mailstats","SaveDataToMySQL","no") == 'yes' or forceDbSave
|
||||
|
||||
if saveData:
|
||||
DBName = "mailstats";
|
||||
DBName = "mailstats"
|
||||
DBHost = get_value(ConfigDB, 'mailstats', 'DBHost', "localhost")
|
||||
DBPort = get_value(ConfigDB,'mailstats','DBPort',"3306")
|
||||
DBName = 'mailstats'
|
||||
DBPort = int(get_value(ConfigDB, 'mailstats', 'DBPort', "3306")) # Ensure port is an integer
|
||||
DBPassw = 'mailstats'
|
||||
DBUser = 'mailstats'
|
||||
UnixSocket = "/var/lib/mysql/mysql.sock"
|
||||
# see if the DB exists
|
||||
# Try to Establish a database connection
|
||||
|
||||
# Try to establish a database connection
|
||||
try:
|
||||
conn = mysql.connector.connect(
|
||||
conn = pymysql.connect(
|
||||
host=DBHost,
|
||||
user=DBUser,
|
||||
password=DBPassw,
|
||||
database=DBName,
|
||||
port=DBPort,
|
||||
unix_socket=UnixSocket
|
||||
unix_socket=UnixSocket,
|
||||
cursorclass=pymysql.cursors.DictCursor # Optional: use DictCursor for dict output
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
# Check if the table exists before creating it
|
||||
check_table_query = "SHOW TABLES LIKE 'SummaryLogs'"
|
||||
cursor.execute(check_table_query)
|
||||
table_exists = cursor.fetchone()
|
||||
if not table_exists:
|
||||
# Create table if it doesn't exist
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS SummaryLogs (
|
||||
@ -1146,21 +1152,21 @@ if __name__ == "__main__":
|
||||
logData TEXT
|
||||
)
|
||||
""")
|
||||
# and prune the DB here if needed.
|
||||
# Delete existing records for the given date
|
||||
try:
|
||||
delete_query = """
|
||||
DELETE FROM SummaryLogs
|
||||
WHERE Date = %s
|
||||
"""
|
||||
cursor.execute(delete_query, (analysis_date,)) #Don't forget the syntactic sugar of the extra comma to make it a tuple!
|
||||
cursor.execute(delete_query, (analysis_date,)) # Don't forget the extra comma for tuple
|
||||
# Get the number of records deleted
|
||||
rows_deleted = cursor.rowcount
|
||||
if rows_deleted > 0:
|
||||
print(f"Deleted {rows_deleted} rows for {analysis_date} ")
|
||||
except mysql.connector.Error as e:
|
||||
except pymysql.Error as e:
|
||||
print(f"SQL Delete failed ({delete_query}) ({e}) ")
|
||||
except mysql.connector.Error as e:
|
||||
|
||||
except pymysql.Error as e:
|
||||
print(f"Unable to connect to {DBName} on {DBHost} port {DBPort} error ({e}) ")
|
||||
saveData = False
|
||||
|
||||
@ -1269,11 +1275,11 @@ if __name__ == "__main__":
|
||||
#else:
|
||||
#Take out the mailstats email
|
||||
if 'mailstats' in parsed_data['from-email'] and DomainName in parsed_data['from-email']:
|
||||
print(f"{parsed_data}")
|
||||
#continue
|
||||
#print(f"{parsed_data}")
|
||||
continue
|
||||
# Save the data here if necessary
|
||||
if saveData:
|
||||
save_summaries_to_db(anaysis_date_obj.strftime('%Y-%m-%d'),hour,parsed_data)
|
||||
save_summaries_to_db(cursor,conn,anaysis_date_obj.strftime('%Y-%m-%d'),hour,parsed_data)
|
||||
|
||||
#Count the number of emails through each of qpsmtpd, uqpsmtpd and sqpsmtpd
|
||||
# the forkserver column in the log indicates it.
|
||||
@ -1632,16 +1638,40 @@ if __name__ == "__main__":
|
||||
rendered_html = get_heading()
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in header information here -->")
|
||||
|
||||
#add in the subservient tables..
|
||||
#add in the subservient tables..(remeber they appear in the reverse order of below!)
|
||||
|
||||
#virus codes
|
||||
virus_headers = ["Virus",'Count','Percent']
|
||||
virus_title = 'Viruses found'
|
||||
rendered_html = render_sub_table(virus_title,virus_headers,found_viruses)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
#qpsmtd codes
|
||||
#print(f"{found_qpcodes}")
|
||||
qpsmtpd_headers = ["Reason",'Count','Percent']
|
||||
qpsmtpd_title = 'Qpsmtpd codes league table'
|
||||
rendered_html = render_sub_table(qpsmtpd_title,qpsmtpd_headers,found_qpcodes)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
|
||||
#Junk mails
|
||||
junk_mail_count_headers = ['Username','Count', 'Percent']
|
||||
junk_mail_counts = scan_mail_users()
|
||||
junk_mail_count_title = 'Junk mail counts'
|
||||
rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
|
||||
#Recipient counts
|
||||
#print(f"{recipients_found}")
|
||||
recipient_count_headers = ["Email",'Queued','Rejected','Spam tagged','Accepted Percent']
|
||||
recipient_count_title = 'Incoming email recipients'
|
||||
rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
#Geoip Country codes
|
||||
geoip_headers = ['Country','Count','Percent','Rejected?']
|
||||
geoip_title = 'Geoip results'
|
||||
@ -1649,28 +1679,6 @@ if __name__ == "__main__":
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
#Junk mails
|
||||
junk_mail_count_headers = ['Username','Count', 'Percent']
|
||||
junk_mail_counts = scan_mail_users()
|
||||
junk_mail_count_title = 'Junk mail counts'
|
||||
rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
#virus codes
|
||||
virus_headers = ["Virus",'Count','Percent']
|
||||
virus_title = 'Viruses found'
|
||||
rendered_html = render_sub_table(virus_title,virus_headers,found_viruses)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
#Recipient counts
|
||||
#print(f"{recipients_found}")
|
||||
recipient_count_headers = ["Email",'Queued','Rejected','Spam tagged','Accepted Percent']
|
||||
recipient_count_title = 'Incoming email recipients'
|
||||
rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found)
|
||||
# Add it to the total
|
||||
total_html = insert_string_after(total_html,rendered_html, "<!---Add in sub tables here -->")
|
||||
|
||||
|
||||
if saveData:
|
||||
@ -1699,6 +1707,7 @@ if __name__ == "__main__":
|
||||
else:
|
||||
text_file_path = ""
|
||||
|
||||
print(f"Written {count_records_to_db} records to DB")
|
||||
|
||||
html_content = None
|
||||
text_content = None
|
||||
|
17
root/usr/bin/runmailstatsSME10.sh
Executable file
17
root/usr/bin/runmailstatsSME10.sh
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
#exec 1> >(logger -t $(basename $0)) 2>&1
|
||||
perl /usr/bin/mailstats.pl /var/log/qpsmtpd/\@* /var/log/qpsmtpd/current /var/log/sqpsmtpd/\@* /var/log/sqpsmtpd/current
|
||||
# and run new python one - start by copying and decoding log files
|
||||
yesterday_date=$(date -d "yesterday" +'%mm %d')
|
||||
#cd /var/log/qpsmtpd
|
||||
#cat \@* current >/opt/mailstats/logs/current1 2>/dev/null
|
||||
#cd /var/log/sqpsmtpd
|
||||
#cat \@* current >/opt/mailstats/logs/current2 2>/dev/null
|
||||
cd /opt/mailstats/logs
|
||||
#cat current1 current2 2>/dev/null | /usr/local/bin/tai64nlocal | grep "$yesterday_date" > current1.log
|
||||
python3 /usr/bin/mailstats-convert-log-sme10-to-sme11.py
|
||||
yesterday_date=$(date -d "yesterday" +'%b %d')
|
||||
cat output_log.txt | grep "$yesterday_date" | sort >current.log
|
||||
ls -l
|
||||
python3 /usr/bin/mailstats.py
|
||||
echo "Done"
|
Loading…
Reference in New Issue
Block a user