13 Commits

Author SHA1 Message Date
72e7f2a5c5 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-10.sme
- Fix version and build date from spec file  [SME: 13121]
2025-09-13 08:56:14 +01:00
248bbed240 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-9.sme
- Fix version and build date from spec file  [SME: 13121]
2025-09-12 18:20:15 +01:00
a9dbafc584 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-8.sme
- Remove www from systemd-journal group and setuid bit in journal wrapper [SME: 13121]
2025-09-12 12:23:48 +01:00
55cb7a6f05 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-7.sme
- Truncate Geoip table and add other category [SME: 13121]
- Cope with blank data in action1 [SME: 13121]
2025-09-12 11:26:35 +01:00
1b757b1336 * Thu Sep 04 2025 Brian Read <brianr@koozali.org> 11.1-6.sme
- Add favicon to mailstats table, summary and detailed pages [SME: 13121]
- Bring DB config reading for mailstats itself inline with php summary and detailed logs - using /etc/mailstats/db.php [SME: 13121]
- Remove DB config fields from the SM2 config panel {sme: 13121]
- Arrange for password to be generated and mailstats user to be set with limited permissions [SME: 13121]
2025-09-08 15:24:18 +01:00
52b33e166a Sort out DB params access for mailstats, remove DB config from SM2 2025-09-07 09:18:39 +01:00
88bc38adf3 Add favicon to table, summ,ary and details webpages 2025-09-04 19:28:36 +01:00
b070554fdd Get journal api wrapper working for detailed logs 2025-09-04 13:17:44 +01:00
2dd3d234df Add in two tables on header, sort out permission and ownership of params file 2025-09-04 10:04:25 +01:00
d94bf8e033 Get detail logs page working - WIP 2025-09-03 11:00:00 +01:00
5deb31cd92 Extra security for php part of mailstats web 2025-09-02 11:23:48 +01:00
f86021b8c9 Fix missing blacklist URLs from report 2025-09-02 10:17:26 +01:00
a77cb094df Optimise journal access speeding up processing 2025-09-02 08:48:48 +01:00
20 changed files with 1451 additions and 501 deletions

BIN
additional/journalwrap Executable file

Binary file not shown.

View File

@@ -6,8 +6,9 @@ $event = 'smeserver-mailstats-update';
#see the /etc/systemd/system-preset/49-koozali.preset should be present for systemd integration on all you yum update event #see the /etc/systemd/system-preset/49-koozali.preset should be present for systemd integration on all you yum update event
foreach my $file (qw( foreach my $file (qw(
/etc/systemd/system-preset/49-koozali.preset /etc/systemd/system-preset/49-koozali.preset
/etc/e-smith/sql/init/99smeserver-mailstats.sql /etc/mailstats/db.php
/etc/e-smith/sql/init/99mailstats
/etc/httpd/conf/httpd.conf /etc/httpd/conf/httpd.conf
)) ))
{ {
@@ -20,7 +21,7 @@ event_link('systemd-reload', $event, '50');
#event_link('action', $event, '30'); #event_link('action', $event, '30');
#services we need to restart #services we need to restart
safe_symlink('restart', "root/etc/e-smith/events/$event/services2adjust/httpd-e-smith"); safe_symlink('restart', "root/etc/e-smith/events/$event/services2adjust/httpd-e-smith");
safe_symlink("restart", "root/etc/e-smith/events/$event/services2adjust/mysql.init");;
#and Server Mmanager panel link #and Server Mmanager panel link
#panel_link('somefunction', 'manager'); #panel_link('somefunction', 'manager');
#templates2events("/etc/e-smith/sql/init/99smeserver-mailstats.sql", "post-upgrade");
templates2events("/etc/e-smith/sql/init/99smeserver-mailstats.sql", "post-upgrade");

179
journalwrap.c Normal file
View File

@@ -0,0 +1,179 @@
#include <systemd/sd-journal.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <errno.h>
#include <time.h>
#ifndef MAX_OUTPUT_BYTES
#define MAX_OUTPUT_BYTES (2 * 1000 * 1000) // 2 MB
#endif
static int append_bytes(char **buf, size_t *len, size_t *cap, const char *src, size_t n) {
if (*len + n + 1 > *cap) {
size_t newcap = (*cap == 0) ? 8192 : *cap;
while (*len + n + 1 > newcap) {
newcap *= 2;
if (newcap > (size_t)(MAX_OUTPUT_BYTES + 65536)) {
newcap = (size_t)(MAX_OUTPUT_BYTES + 65536);
break;
}
}
char *nbuf = realloc(*buf, newcap);
if (!nbuf) return -1;
*buf = nbuf; *cap = newcap;
}
memcpy(*buf + *len, src, n);
*len += n;
(*buf)[*len] = '\0';
return 0;
}
static int append_cstr(char **buf, size_t *len, size_t *cap, const char *s) {
return append_bytes(buf, len, cap, s, strlen(s));
}
static size_t min_size(size_t a, size_t b) { return a < b ? a : b; }
static void sanitize_text(char *s, size_t n) {
for (size_t i = 0; i < n; i++) if (s[i] == '\0') s[i] = ' ';
}
static void format_ts(char *out, size_t outsz, uint64_t usec) {
time_t sec = (time_t)(usec / 1000000ULL);
struct tm tm;
localtime_r(&sec, &tm);
strftime(out, outsz, "%Y-%m-%d %H:%M:%S", &tm);
}
static const char* field_value(const void *data, size_t len, const char *key, size_t *vlen) {
size_t klen = strlen(key);
if (len < klen + 1) return NULL;
const char *p = (const char *)data;
if (memcmp(p, key, klen) != 0 || p[klen] != '=') return NULL;
*vlen = len - (klen + 1);
return p + klen + 1;
}
static int append_entry_line(sd_journal *j, char **buf, size_t *len, size_t *cap) {
uint64_t usec = 0;
(void)sd_journal_get_realtime_usec(j, &usec);
char ts[32];
format_ts(ts, sizeof(ts), usec);
const void *data = NULL;
size_t dlen = 0;
const char *message = NULL;
size_t mlen = 0;
int r = sd_journal_get_data(j, "MESSAGE", &data, &dlen);
if (r >= 0) message = field_value(data, dlen, "MESSAGE", &mlen);
const char *ident = NULL;
size_t ilen = 0;
r = sd_journal_get_data(j, "SYSLOG_IDENTIFIER", &data, &dlen);
if (r >= 0) {
ident = field_value(data, dlen, "SYSLOG_IDENTIFIER", &ilen);
} else if (sd_journal_get_data(j, "_COMM", &data, &dlen) >= 0) {
ident = field_value(data, dlen, "_COMM", &ilen);
}
if (append_cstr(buf, len, cap, "[") < 0) return -1;
if (append_cstr(buf, len, cap, ts) < 0) return -1;
if (append_cstr(buf, len, cap, "] ") < 0) return -1;
if (ident && ilen > 0) {
if (append_bytes(buf, len, cap, ident, ilen) < 0) return -1;
if (append_cstr(buf, len, cap, ": ") < 0) return -1;
}
if (message && mlen > 0) {
char *tmp = malloc(mlen);
if (!tmp) return -1;
memcpy(tmp, message, mlen);
sanitize_text(tmp, mlen);
size_t to_copy = min_size(mlen, (size_t)(MAX_OUTPUT_BYTES > *len ? MAX_OUTPUT_BYTES - *len : 0));
int ok = append_bytes(buf, len, cap, tmp, to_copy);
free(tmp);
if (ok < 0) return -1;
} else {
const char *keys[] = {"PRIORITY","SYSLOG_IDENTIFIER","_COMM","_EXE","_CMDLINE","MESSAGE"};
for (size_t i = 0; i < sizeof(keys)/sizeof(keys[0]); i++) {
if (sd_journal_get_data(j, keys[i], &data, &dlen) < 0) continue;
if (append_cstr(buf, len, cap, (i == 0 ? "" : " ")) < 0) return -1;
if (append_bytes(buf, len, cap, (const char*)data, min_size(dlen, (size_t)(MAX_OUTPUT_BYTES - *len))) < 0) return -1;
}
}
if (*len < MAX_OUTPUT_BYTES) {
if (append_cstr(buf, len, cap, "\n") < 0) return -1;
}
return 0;
}
static char* journal_get_by_pid_impl(int pid) {
if (pid <= 0) { char *z = malloc(1); if (z) z[0] = '\0'; return z; }
sd_journal *j = NULL;
if (sd_journal_open(&j, SD_JOURNAL_LOCAL_ONLY) < 0) {
char *z = malloc(1); if (z) z[0] = '\0'; return z;
}
char match[64];
snprintf(match, sizeof(match), "_PID=%d", pid);
if (sd_journal_add_match(j, match, 0) < 0) {
sd_journal_close(j);
char *z = malloc(1); if (z) z[0] = '\0'; return z;
}
sd_journal_seek_head(j);
char *buf = NULL; size_t len = 0, cap = 0;
int r;
while ((r = sd_journal_next(j)) > 0) {
if (len >= MAX_OUTPUT_BYTES) break;
if (append_entry_line(j, &buf, &len, &cap) < 0) {
free(buf); sd_journal_close(j); return NULL;
}
}
if (len >= MAX_OUTPUT_BYTES) {
const char *trunc = "[output truncated]\n";
(void)append_bytes(&buf, &len, &cap, trunc, strlen(trunc));
}
if (!buf) { buf = malloc(1); if (!buf) { sd_journal_close(j); return NULL; } buf[0] = '\0'; }
sd_journal_close(j);
return buf;
}
#ifdef __GNUC__
__attribute__((visibility("default")))
#endif
char* journal_get_by_pid(int pid) { return journal_get_by_pid_impl(pid); }
#ifdef __GNUC__
__attribute__((visibility("default")))
#endif
void journal_free(char* p) { free(p); }
#ifdef BUILD_CLI
static int parse_pid(const char *s, int *out) {
if (!s || !*s) return -1;
char *end = NULL;
errno = 0;
long v = strtol(s, &end, 10);
if (errno != 0 || end == s || *end != '\0' || v <= 0 || v > 0x7fffffffL) return -1;
*out = (int)v; return 0;
}
int main(int argc, char **argv) {
if (argc != 2) { fprintf(stderr, "Usage: %s <pid>\n", argv[0]); return 2; }
int pid = 0;
if (parse_pid(argv[1], &pid) != 0) { fprintf(stderr, "Invalid pid\n"); return 2; }
char *out = journal_get_by_pid_impl(pid);
if (!out) { fprintf(stderr, "Out of memory or error\n"); return 1; }
fputs(out, stdout);
free(out);
return 0;
}
#endif

View File

@@ -0,0 +1,16 @@
{
use MIME::Base64 qw(encode_base64);
my $rec = $DB->get('mailstats') || $DB->new_record('mailstats', {type => 'report'});
my $pw = $rec->prop('DBPass');
return "" if $pw;
my $length = shift || 16;
my @chars = ('A'..'Z', 'a'..'z', 0..9, qw(! @ $ % ^ & * ? _ - + =));
$pw = '';
$pw .= $chars[rand @chars] for 1..$length;
$rec->set_prop('DBPass', $pw);
return ""
}

View File

@@ -0,0 +1,24 @@
{
my $db = $mailstats{DBName} || 'mailstats';
my $user = $mailstats{DBUser} || 'mailstats_rw';
my $pass = $mailstats{DBPass} || 'changeme';
$OUT .= <<END
#! /bin/sh
if [ -d /var/lib/mysql/mailstats ]; then
exit
fi
/usr/bin/mariadb <<EOF
CREATE DATABASE $db DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci;
USE $db;
CREATE TABLE IF NOT EXISTS SummaryLogs (
id INT AUTO_INCREMENT PRIMARY KEY,
Date DATE,
Hour INT,
logData TEXT
);
CREATE USER $user@localhost IDENTIFIED BY '$pass';
GRANT SELECT, INSERT, UPDATE, DELETE ON $db.* TO $user@localhost;
FLUSH PRIVILEGES;
EOF
END
}

View File

@@ -1,97 +0,0 @@
CREATE DATABASE IF NOT EXISTS `mailstats`;
USE `mailstats`;
CREATE TABLE IF NOT EXISTS `ColumnStats` (
`ColumnStatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`timeid` int(11) NOT NULL default '0',
`descr` varchar(20) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`ColumnStatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `JunkMailStats` (
`JunkMailstatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`user` varchar(12) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) default NULL,
PRIMARY KEY (`JunkMailstatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `SARules` (
`SARulesid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`rule` varchar(50) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`totalhits` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`SARulesid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `SAscores` (
`SAscoresid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`acceptedcount` bigint(20) NOT NULL default '0',
`rejectedcount` bigint(20) NOT NULL default '0',
`hamcount` bigint(20) NOT NULL default '0',
`acceptedscore` decimal(20,2) NOT NULL default '0.00',
`rejectedscore` decimal(20,2) NOT NULL default '0.00',
`hamscore` decimal(20,2) NOT NULL default '0.00',
`totalsmtp` bigint(20) NOT NULL default '0',
`totalrecip` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`SAscoresid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `VirusStats` (
`VirusStatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`descr` varchar(40) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`VirusStatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `date` (
`dateid` int(11) NOT NULL auto_increment,
`date` date NOT NULL default '0000-00-00',
PRIMARY KEY (`dateid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `domains` (
`domainsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`domain` varchar(40) NOT NULL default '',
`type` varchar(10) NOT NULL default '',
`total` bigint(20) NOT NULL default '0',
`denied` bigint(20) NOT NULL default '0',
`xfererr` bigint(20) NOT NULL default '0',
`accept` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`domainsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `qpsmtpdcodes` (
`qpsmtpdcodesid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`reason` varchar(40) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`qpsmtpdcodesid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `time` (
`timeid` int(11) NOT NULL auto_increment,
`time` time NOT NULL default '00:00:00',
PRIMARY KEY (`timeid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE USER 'mailstats'@'localhost' IDENTIFIED BY 'mailstats';
GRANT ALL PRIVILEGES ON mailstats.* TO 'mailstats'@'localhost';
FLUSH PRIVILEGES;

View File

@@ -0,0 +1,24 @@
{
# Load SME::ConfigDB to read values from DB
my $cdb = esmith::ConfigDB->open() || die "Cannot open configuration DB\n";
# Get the fragment (report database definition)
my $report = $cdb->get('mailstats');
my $dbhost = $report->prop('DBHost') || 'localhost';
my $dbport = $report->prop('DBPort') || '3306';
my $dbuser = $report->prop('DBUser') || 'mailstats_rw';
# Assume password is stored in a property 'DBPass'
my $dbpass = $report->prop('DBPass') || 'changeme';
my $dbname = $report->key || 'mailstats';
$OUT = <<"END";
<?php
return [
'host' => '$dbhost',
'user' => '$dbuser',
'pass' => '$dbpass',
'name' => '$dbname',
];
END
}

View File

@@ -0,0 +1,7 @@
<?php
return [
'host' => 'localhost',
'user' => 'mailstats', //Should be mailstat-ro
'pass' => 'mailstats', //Will be randon strong password
'name' => 'mailstats',
];

View File

@@ -208,3 +208,140 @@ p.cssvalid,p.htmlvalid {float:left;margin-right:20px}
.traffictable {border-collapse:collapse;width:98%} .traffictable {border-collapse:collapse;width:98%}
.divseeinbrowser{text-align:center;} .divseeinbrowser{text-align:center;}
.bordercollapse{border-collapse:collapse;} .bordercollapse{border-collapse:collapse;}
/* ==============================================
Summary Logs Section (scoped under .mailstats-summary)
============================================== */
.mailstats-summary .summary-container {
width: 100%;
overflow-x: auto;
font-size: 0.85vw;
}
/* Table styling */
.mailstats-summary .summary-table {
border-collapse: collapse;
width: 98%;
font-size: inherit;
}
.mailstats-summary .summary-table th {
text-align: left;
padding: 0.5em;
border-bottom: 2px solid #ddd;
background-color: #f8f8f8;
}
.mailstats-summary .summary-table td {
padding: 0.5em;
border-bottom: 1px solid #ddd;
word-break: break-word; /* Allows breaking long words at arbitrary points */
overflow-wrap: break-word; /* Modern standard for breaking long words */
hyphens: auto; /* Optionally adds hyphenation if supported */
}
/* Zebra striping */
.mailstats-summary .summary-table tbody tr:nth-child(even) {
background-color: #fafafa;
}
/* Pagination */
.mailstats-summary .pagination {
margin-top: 1em;
}
.mailstats-summary .pagination a {
text-decoration: none;
color: #0066cc;
padding: 0.3em 0.6em;
}
.mailstats-summary .pagination a:hover {
text-decoration: underline;
}
.mailstats-summary table.stripes {
border-collapse: collapse;
width: 95%;
overflow-x: auto;
margin: 0.6% auto;
}
/* Optional zebra striping */
.mailstats-summary table.stripes tbody tr:nth-child(even) {
background-color: #fafafa;
}
/* ==============================================
Log Detail Page (scoped under .mailstats-detail)
============================================== */
.mailstats-detail .detail-container {
width: 100%;
max-width: 1200px;
margin: 1em auto;
padding: 0 1em;
}
/* Preformatted log box */
.mailstats-detail .log {
white-space: pre-wrap;
word-wrap: break-word;
background: #111;
color: #eee;
padding: 1em;
border-radius: 6px;
font-family: monospace, monospace;
font-size: 0.75em;
line-height: 1.4;
overflow-x: auto;
}
/* Back link styling */
.mailstats-detail a {
color: #0066cc;
text-decoration: none;
}
.mailstats-detail a:hover {
text-decoration: underline;
}
/* ==============================================
Status header at top of table (scoped under emailstatus)
============================================== */
.emailstatus-wrapper {
font-family: Arial, sans-serif;
padding: 20px;
}
.emailstatus-header {
text-align: center;
margin-bottom: 20px;
}
.emailstatus-tablecontainer {
display: flex;
gap: 20px;
flex-wrap: wrap;
}
.emailstatus-table {
border-collapse: collapse;
min-width: 300px;
flex: 1 1 45%;
}
.emailstatus-table th {
background-color: #a9a9a9;
color: black;
text-align: left;
padding: 8px;
}
.emailstatus-table td {
padding: 8px;
border: 1px solid #ddd;
}
.emailstatus-table tr:nth-child(even) {
background-color: #f9f9f9;
}
@media (max-width: 768px) {
.emailstatus-tablecontainer {
flex-direction: column;
}
}

View File

@@ -1,51 +1,244 @@
<?php <?php
header('Content-Type: text/plain'); // Security headers
header('Content-Type: text/html; charset=UTF-8');
header("Content-Security-Policy: default-src 'self'; script-src 'none'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; base-uri 'none'; object-src 'none'; frame-ancestors 'none'");
header('X-Content-Type-Options: nosniff');
header('Referrer-Policy: no-referrer');
header('Permissions-Policy: geolocation=(), microphone=(), camera=()');
header('Cache-Control: no-store, no-cache, must-revalidate, max-age=0');
header('Pragma: no-cache');
if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off') {
header('Strict-Transport-Security: max-age=31536000; includeSubDomains');
}
$input_param = isset($_GET['id']) ? $_GET['id'] : '9999'; function e($s) {
return htmlspecialchars((string)$s, ENT_QUOTES | ENT_SUBSTITUTE, 'UTF-8');
}
// Set the directory and file names // Configuration: env first, then fallback to optional file
$directory = "/opt/mailstats/logs"; $servername = getenv('MAILSTATS_DB_HOST') ?: 'localhost';
$files = ['current1', 'current2']; $username = getenv('MAILSTATS_DB_USER') ?: '';
$password = getenv('MAILSTATS_DB_PASS') ?: '';
$dbname = getenv('MAILSTATS_DB_NAME') ?: '';
function process_file($file_path, $input_param) { if ($username === '' || $password === '' || $dbname === '') {
$file = fopen($file_path, 'r'); $cfgPath = '/etc/mailstats/db.php'; // optional fallback config file
$match = "/ $input_param /"; if (is_readable($cfgPath)) {
$endmatch = "/cleaning up after $input_param/"; ob_start();
while (($line = fgets($file)) !== false) { $cfg = include $cfgPath;
// Check if the line contains the input_parameter ob_end_clean();
if (preg_match($match,$line) === 1) { $servername = $cfg['host'] ?? $servername;
echo $line; $username = $cfg['user'] ?? $username;
} elseif (preg_match($endmatch,$line) === 1) { $password = $cfg['pass'] ?? $password;
echo $line; $dbname = $cfg['name'] ?? $dbname;
exit(); }
}
if ($username === '' || $password === '' || $dbname === '') {
error_log('DB credentials missing (env and config file).');
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Input validation: id
$id = isset($_GET['id']) ? filter_var($_GET['id'], FILTER_VALIDATE_INT) : null;
if ($id === false || $id === null || $id < 1) {
http_response_code(400);
exit('Invalid id');
}
// DB connect with exceptions
mysqli_report(MYSQLI_REPORT_ERROR | MYSQLI_REPORT_STRICT);
try {
$conn = new mysqli($servername, $username, $password, $dbname);
$conn->set_charset('utf8mb4');
} catch (mysqli_sql_exception $e) {
error_log('DB connect failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Fetch the record and extract PID from JSON logData
try {
$stmt = $conn->prepare('SELECT id, logData FROM SummaryLogs WHERE id = ?');
$stmt->bind_param('i', $id);
$stmt->execute();
$res = $stmt->get_result();
$row = $res->fetch_assoc();
$stmt->close();
} catch (mysqli_sql_exception $e) {
error_log('Query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
if (!$row) {
http_response_code(404);
exit('Record not found');
}
$logData = $row['logData'];
$pid = null;
$data = json_decode($logData, true, 512, JSON_INVALID_UTF8_SUBSTITUTE);
if (is_array($data)) {
foreach (['id','pid', 'PID', 'Pid', 'process_id', 'ProcessId'] as $k) {
if (isset($data[$k]) && (is_int($data[$k]) || ctype_digit((string)$data[$k]))) {
$pid = (int)$data[$k];
break;
} }
} }
fclose($file);
} }
function tai64nToDate($tai64n) { if (!$pid || $pid < 1) {
// Check if the input TAI64N string is valid http_response_code(422);
if (preg_match('/^@([0-9a-f]{8})([0-9a-f]{8})$/', $tai64n, $matches)) { exit('PID not found in this record');
// First part: seconds since epoch }
$sec_hex = $matches[1];
// Second part: nanoseconds in hex
$nsec_hex = $matches[2];
// Convert hex to decimal // Journal retrieval using C wrapper
$seconds = hexdec($sec_hex); define('FFI_LIB', 'libjournalwrap.so'); // adjust if needed
$nanoseconds = hexdec($nsec_hex); define('WRAPPER_BIN', '/usr/bin/journalwrap'); // fallback executable path
define('MAX_OUTPUT_BYTES', 2_000_000); // 2MB safety cap
// Calculate the full timestamp in seconds function getJournalByPidViaFFI(int $pid): ?string {
$timestamp = $seconds + ($nanoseconds / 1e9); // Nanoseconds to seconds if (!extension_loaded('FFI')) {
return null;
// Format timestamp to 'Y-m-d H:i:s' }
return date('Y-m-d H:i:s', $timestamp); try {
} else { // Adjust the function signatures to match your wrapper
throw new InvalidArgumentException("Invalid TAI64N format."); $ffi = FFI::cdef("
char* journal_get_by_pid(int pid);
void journal_free(char* p);
", FFI_LIB);
$cstr = $ffi->journal_get_by_pid($pid);
if ($cstr === null) {
return '';
}
$out = FFI::string($cstr);
$ffi->journal_free($cstr);
return $out;
} catch (Throwable $e) {
error_log('FFI journal wrapper failed: ' . $e->getMessage());
return null;
} }
} }
chdir($directory);
foreach ($files as $file) { function getJournalByPidViaExec(int $pid): ?string {
process_file($file, $input_param); // Fallback to an external wrapper binary (must be safe and not use shell)
$cmd = WRAPPER_BIN . ' ' . (string)$pid;
$descriptorspec = [
0 => ['pipe', 'r'],
1 => ['pipe', 'w'],
2 => ['pipe', 'w'],
];
$pipes = [];
$proc = proc_open($cmd, $descriptorspec, $pipes, null, null, ['bypass_shell' => true]);
if (!\is_resource($proc)) {
error_log('Failed to start journal wrapper binary');
return null;
}
fclose($pipes[0]); // no stdin
stream_set_blocking($pipes[1], false);
stream_set_blocking($pipes[2], false);
$stdout = '';
$stderr = '';
$start = microtime(true);
$timeout = 10.0; // seconds
$readChunk = 65536;
while (true) {
$status = proc_get_status($proc);
$running = $status['running'];
$read = [$pipes[1], $pipes[2]];
$write = null;
$except = null;
$tv_sec = 0;
$tv_usec = 300000; // 300ms
stream_select($read, $write, $except, $tv_sec, $tv_usec);
foreach ($read as $r) {
if ($r === $pipes[1]) {
$chunk = fread($pipes[1], $readChunk);
if ($chunk !== false && $chunk !== '') {
$stdout .= $chunk;
}
} elseif ($r === $pipes[2]) {
$chunk = fread($pipes[2], $readChunk);
if ($chunk !== false && $chunk !== '') {
$stderr .= $chunk;
}
}
}
if (!$running) {
break;
}
if ((microtime(true) - $start) > $timeout) {
proc_terminate($proc);
$stderr .= "\n[terminated due to timeout]";
break;
}
if (strlen($stdout) + strlen($stderr) > MAX_OUTPUT_BYTES) {
proc_terminate($proc);
$stderr .= "\n[terminated due to output size limit]";
break;
}
}
foreach ($pipes as $p) {
if (is_resource($p)) {
fclose($p);
}
}
$exitCode = proc_close($proc);
if ($exitCode !== 0 && $stderr !== '') {
error_log('journal wrapper stderr: ' . $stderr);
}
return $stdout;
} }
$logs = getJournalByPidViaFFI($pid);
if ($logs === null) {
$logs = getJournalByPidViaExec($pid);
}
if ($logs === null) {
http_response_code(500);
exit('Unable to read journal for this PID');
}
// Safety cap to avoid rendering gigantic outputs
if (strlen($logs) > MAX_OUTPUT_BYTES) {
$logs = substr($logs, 0, MAX_OUTPUT_BYTES) . "\n[output truncated]";
}
// Done with DB
$conn->close();
?> ?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Log details for PID <?= e($pid) ?> (record <?= e($id) ?>)</title>
<link rel="stylesheet" type="text/css" href="css/mailstats.css" />
<link rel="icon" type="image/x-icon" href="favicon.ico">
</head>
<body>
<div class="mailstats-detail">
<div class="detail-container">
<h1>Log details for PID <?= e($pid) ?> (record <?= e($id) ?>)</h1>
<p><a href="javascript:history.back()">Back</a></p>
<pre class="log"><?= e($logs) ?></pre>
</div>
</div>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -1,102 +1,192 @@
<?php <?php
// Database configuration // Set security headers (must be sent before output)
$servername = "localhost"; header('Content-Type: text/html; charset=UTF-8');
$username = "mailstats"; header("Content-Security-Policy: default-src 'self'; script-src 'none'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; base-uri 'none'; object-src 'none'; frame-ancestors 'none'");
$password = "mailstats"; header('X-Content-Type-Options: nosniff');
$dbname = "mailstats"; header('Referrer-Policy: no-referrer');
header('Permissions-Policy: geolocation=(), microphone=(), camera=()');
// Default date to yesterday header('Cache-Control: no-store, no-cache, must-revalidate, max-age=0');
$date = isset($_GET['date']) ? $_GET['date'] : date('Y-m-d', strtotime('-1 day')); header('Pragma: no-cache');
if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off') {
// Default hour to 99 (means all the hours) header('Strict-Transport-Security: max-age=31536000; includeSubDomains');
$hour = isset($_GET['hour']) ? $_GET['hour'] : 99;
// Create connection
$conn = new mysqli($servername, $username, $password, $dbname);
// Check connection
if ($conn->connect_error) {
die("Connection failed: " . $conn->connect_error);
} }
// Prepare and execute the query // Helper for safe HTML encoding
if ($hour == 99){ function e($s) {
$sql = "SELECT * FROM SummaryLogs WHERE Date = ?"; return htmlspecialchars((string)$s, ENT_QUOTES | ENT_SUBSTITUTE, 'UTF-8');
$stmt = $conn->prepare($sql); }
$stmt->bind_param("s", $date);
} else { // Configuration: read DB credentials from environment
$sql = "SELECT * FROM SummaryLogs WHERE Date = ? AND Hour = ?"; $servername = getenv('MAILSTATS_DB_HOST') ?: '';
$stmt = $conn->prepare($sql); $username = getenv('MAILSTATS_DB_USER') ?: '';
$stmt->bind_param("si", $date, $hour); $password = getenv('MAILSTATS_DB_PASS') ?: '';
$dbname = getenv('MAILSTATS_DB_NAME') ?: '';
// Otherwise try config in /etc/mailstats
if ($username === '' || $password === '' || $dbname === '') {
$cfgPath = '/etc/mailstats/db.php';
if (is_readable($cfgPath)) {
ob_start();
$cfg = include $cfgPath;
ob_end_clean();
$servername = $cfg['host'] ?? $servername ?: 'localhost';
$username = $cfg['user'] ?? $username;
$password = $cfg['pass'] ?? $password;
$dbname = $cfg['name'] ?? $dbname;
}
}
// Fail fast if credentials are not provided via environment
if ($username === '' || $password === '' || $dbname === '') {
error_log('Configuration error: DB credentials not set via environment.');
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Robust input handling
$defaultDate = date('Y-m-d', strtotime('-1 day'));
$date = isset($_GET['date']) ? $_GET['date'] : $defaultDate;
if (!preg_match('/^\d{4}-\d{2}-\d{2}$/', $date)) {
http_response_code(400);
exit('Invalid date');
}
// hour: allow 023 or special 99 meaning “all hours”
$hour = isset($_GET['hour']) ? filter_var($_GET['hour'], FILTER_VALIDATE_INT) : 99;
if ($hour === false || ($hour !== 99 && ($hour < 0 || $hour > 23))) {
http_response_code(400);
exit('Invalid hour');
}
// Pagination
$page = isset($_GET['page']) ? filter_var($_GET['page'], FILTER_VALIDATE_INT) : 1;
if ($page === false || $page < 1) { $page = 1; }
$pageSize = isset($_GET['page_size']) ? filter_var($_GET['page_size'], FILTER_VALIDATE_INT) : 50;
if ($pageSize === false) { $pageSize = 50; }
// Bound page size to prevent huge result sets
if ($pageSize < 1) { $pageSize = 1; }
if ($pageSize > 100) { $pageSize = 100; }
$limit = $pageSize;
$offset = ($page - 1) * $pageSize;
// Use mysqli with exceptions and UTF-8
mysqli_report(MYSQLI_REPORT_ERROR | MYSQLI_REPORT_STRICT);
try {
$conn = new mysqli($servername, $username, $password, $dbname);
$conn->set_charset('utf8mb4');
} catch (mysqli_sql_exception $e) {
error_log('DB connect failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Build WHERE clause and bind parameters safely
$where = 'Date = ?';
$bindTypesCount = 's';
$bindValuesCount = [$date];
if ($hour !== 99) {
$where .= ' AND Hour = ?';
$bindTypesCount .= 'i';
$bindValuesCount[] = $hour;
}
// Count query for total rows (for display/pagination info)
try {
$sqlCount = "SELECT COUNT(*) AS total FROM SummaryLogs WHERE $where";
$stmtCount = $conn->prepare($sqlCount);
$stmtCount->bind_param($bindTypesCount, ...$bindValuesCount);
$stmtCount->execute();
$resultCount = $stmtCount->get_result();
$rowCount = $resultCount->fetch_assoc();
$totalRows = (int)$rowCount['total'];
$stmtCount->close();
} catch (mysqli_sql_exception $e) {
error_log('Count query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Data query with ORDER and LIMIT/OFFSET
try {
$sql = "SELECT id, logData FROM SummaryLogs WHERE $where ORDER BY id DESC LIMIT ? OFFSET ?";
// Bind types: existing where types + limit (i) + offset (i)
$bindTypesData = $bindTypesCount . 'ii';
$bindValuesData = $bindValuesCount;
$bindValuesData[] = $limit;
$bindValuesData[] = $offset;
$stmt = $conn->prepare($sql);
$stmt->bind_param($bindTypesData, ...$bindValuesData);
$stmt->execute();
$result = $stmt->get_result();
} catch (mysqli_sql_exception $e) {
error_log('Data query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
} }
$stmt->execute();
$result = $stmt->get_result();
$result_count = $result->num_rows;
function generateLogDataTable($logData) { function generateLogDataTable($logData) {
$data = json_decode($logData, true); // Defensive decode with substitution for invalid UTF-8
if (is_null($data)) { $data = json_decode($logData, true, 512, JSON_INVALID_UTF8_SUBSTITUTE);
return "Invalid JSON data";
if (!is_array($data)) {
return '<em>Invalid JSON data</em>';
} }
//// Remove entries with the key "logterse" // Remove entries with key 'logterse' and entries with empty values
//if (isset($data['logterse'])) {
//unset($data['logterse']);
//}
// Remove entries with the key "logterse" and remove entries with empty values
foreach ($data as $key => $value) { foreach ($data as $key => $value) {
if ($key === 'logterse' || empty($value)) { if ($key === 'logterse' || $value === '' || $value === null) {
unset($data[$key]); unset($data[$key]);
} }
} }
// Handle adjacent duplicates by merging keys // Merge adjacent duplicates by value
$mergedData = []; $mergedData = [];
$previousValue = null; $previousValue = null;
foreach ($data as $key => $value) { foreach ($data as $key => $value) {
if ($value === $previousValue) { // Normalize non-scalar values for display
// Merge the current key with the previous key if (is_array($value) || is_object($value)) {
$value = json_encode($value, JSON_UNESCAPED_UNICODE | JSON_UNESCAPED_SLASHES);
}
$valueStr = (string)$value;
if ($valueStr === $previousValue) {
end($mergedData); end($mergedData);
$lastKey = key($mergedData); $lastKey = key($mergedData);
$newKey = "$lastKey/$key"; $newKey = $lastKey . '/' . $key;
$mergedData[$newKey] = $value; $mergedData[$newKey] = $valueStr;
// Remove the old entry
unset($mergedData[$lastKey]); unset($mergedData[$lastKey]);
} else { } else {
// Otherwise, add a new entry $mergedData[$key] = $valueStr;
$mergedData[$key] = $value;
} }
$previousValue = $value; $previousValue = $valueStr;
} }
// Optional truncation to keep rendering safe
$maxValueLen = 500;
foreach ($mergedData as $k => $v) {
if (mb_strlen($v, 'UTF-8') > $maxValueLen) {
$mergedData[$k] = mb_substr($v, 0, $maxValueLen, 'UTF-8') . '…';
}
}
$keys = array_keys($mergedData); $keys = array_keys($mergedData);
$values = array_values($mergedData); $values = array_values($mergedData);
$output = '<table class="stripes" style="border-collapse: collapse; width:95%;overflow-x:auto; margin: 0.6% auto 0.6% auto;"><tbody>'; $output = '<table class="mailstats-summary stripes"><tbody>';
#$output = '<table class="stripes" style="border-collapse: collapse; width:95%;overflow-x:auto; margin:2%"><tbody>';
// Divide keys and values into sets of 6 // Divide keys and values into sets of 6
$chunks = array_chunk($keys, 6); $chunks = array_chunk($keys, 6);
foreach ($chunks as $chunkIndex => $chunk) { foreach ($chunks as $chunkIndex => $chunk) {
if ($chunkIndex > 0) {
// Add spacing between different sets
#$output .= '<tr><td colspan="6" style="height: 1em;"></td></tr>';
}
$output .= '<tr>'; $output .= '<tr>';
foreach ($chunk as $key) { foreach ($chunk as $key) {
$output .= '<th>' . htmlspecialchars($key) . '</th>'; $output .= '<th>' . e($key) . '</th>';
} }
$output .= '</tr><tr>'; $output .= '</tr><tr>';
foreach ($chunk as $i => $key) { foreach ($chunk as $i => $key) {
$val = htmlspecialchars($values[$chunkIndex * 6+ $i]); $val = $values[$chunkIndex * 6 + $i];
if ($key == 'id'){ $output .= '<td>' . e($val) . '</td>';
$output .= '<td>' . "<a href='./ShowDetailedLogs.php?id=".$val."'</a>".$val."</td>";
} else {
$output .= '<td>' . $val . '</td>';
}
} }
$output .= '</tr>'; $output .= '</tr>';
} }
@@ -106,61 +196,89 @@ function generateLogDataTable($logData) {
} }
?> ?>
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<link rel='stylesheet' type='text/css' href='css/mailstats.css' />
<title>Summary Logs</title> <title>Summary Logs</title>
<!-- <style> <link rel="stylesheet" type="text/css" href="css/mailstats.css" />
table { <link rel="icon" type="image/x-icon" href="favicon.ico">
xxwidth: 100%;
xxborder-collapse: collapse;
}
table, th, td {
xxborder: 1px solid black;
}
th, td {
xxpadding: 8px;
xxtext-align: left;
}
</style>
-->
</head> </head>
<body> <body>
<div style="width:100%;overflow-x:auto;font-size:0.726cqw">" <div class="mailstats-summary">
<h1>Summary Logs for Date: <?= htmlspecialchars($date) ?> <?= $hour == 99 ? 'for All Hours' : 'and Hour: ' . htmlspecialchars($hour) ?></h1> <div class="summary-container">
<h3>Found <?= $result_count ?> records.</h3> <h1>
<table style="border-collapse:collapse;width:98%"> Summary Logs for Date: <?= e($date) ?>
<thead> <?= $hour === 99 ? ' (All Hours)' : ' at Hour: ' . e($hour) ?>
<tr> </h1>
<th>Id</th> <?php
<!--<th>Date</th>--> $startRow = $totalRows > 0 ? ($offset + 1) : 0;
<!--<th>Hour</th>--> $endRow = min($offset + $limit, $totalRows);
<th>Log Data</th> ?>
</tr> <h3>Found <?= e($totalRows) ?> records. Showing <?= e($startRow) ?><?= e($endRow) ?>.</h3>
</thead>
<tbody> <table class="summary-table">
<?php if ($result->num_rows > 0): ?> <thead>
<?php while($row = $result->fetch_assoc()): ?>
<tr> <tr>
<td><?= htmlspecialchars($row['id']) ?></td> <th>Id</th>
<td><?= generateLogDataTable($row['logData']) ?></td> <th>Details</th>
<th>Log Data</th>
</tr> </tr>
<?php endwhile; ?> </thead>
<?php else: ?> <tbody>
<tr> <?php if ($result && $result->num_rows > 0): ?>
<td colspan="4">No records found for the specified date and hour.</td> <?php while ($row = $result->fetch_assoc()): ?>
</tr> <?php
<?php endif; ?> $id = (int)$row['id'];
</tbody> $detailUrl = './ShowDetailedLogs.php?id=' . rawurlencode((string)$id);
</table> ?>
<tr>
<td><?= e($id) ?></td>
<td><a href="<?= e($detailUrl) ?>">View details</a></td>
<td><?= generateLogDataTable($row['logData']) ?></td>
</tr>
<?php endwhile; ?>
<?php else: ?>
<tr>
<td colspan="3">No records found for the specified date and hour.</td>
</tr>
<?php endif; ?>
</tbody>
</table>
<?php
// Pagination
$baseParams = [
'date' => $date,
'hour' => $hour,
'page_size' => $pageSize
];
$prevPage = $page > 1 ? $page - 1 : null;
$nextPage = ($offset + $limit) < $totalRows ? $page + 1 : null;
?>
<div class="pagination">
<?php if ($prevPage !== null): ?>
<?php
$paramsPrev = $baseParams; $paramsPrev['page'] = $prevPage;
$urlPrev = '?' . http_build_query($paramsPrev, '', '&', PHP_QUERY_RFC3986);
?>
<a href="<?= e($urlPrev) ?>">&laquo; Previous</a>
<?php endif; ?>
<?php if ($nextPage !== null): ?>
<?php
$paramsNext = $baseParams; $paramsNext['page'] = $nextPage;
$urlNext = '?' . http_build_query($paramsNext, '', '&', PHP_QUERY_RFC3986);
?>
<?php if ($prevPage !== null): ?> | <?php endif; ?>
<a href="<?= e($urlNext) ?>">Next &raquo;</a>
<?php endif; ?>
</div>
</div>
</div> </div>
<?php <?php
// Close the connection if (isset($stmt) && $stmt instanceof mysqli_stmt) { $stmt->close(); }
$stmt->close(); if (isset($conn) && $conn instanceof mysqli) { $conn->close(); }
$conn->close();
?> ?>
</body> </body>
</html> </html>

View File

@@ -1,7 +1,7 @@
<div class="${classname}"> <div class="${classname}">
<h2>${title}</h2> <h2>${title}</h2>
<tal:block condition="threshold != 0"> <tal:block condition="threshold != 0">
<span class='greyed-out'>Display threshold set to ${threshold}%</span> <span class='greyed-out'>${threshold}</span>
</tal:block> </tal:block>
<tal:block condition="threshold == 0"> <tal:block condition="threshold == 0">
<br> <br>

View File

@@ -3,6 +3,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<title>SMEServer Mailstats</title> <title>SMEServer Mailstats</title>
<link rel='stylesheet' type='text/css' href='css/mailstats.css' /> <link rel='stylesheet' type='text/css' href='css/mailstats.css' />
<link rel="icon" type="image/x-icon" href="favicon.ico">
<!-- Check links --> <!-- Check links -->
<!--css here--> <!--css here-->
</head> </head>
@@ -16,9 +17,30 @@
<br /> <br />
<h2>${structure:title}</h2> <h2>${structure:title}</h2>
<br /> <br />
<div class="headerpanel"> <div class="emailstatus-wrapper">
<div class = "innerheaderpanel"> <h2 class="emailstatus-header">Email System Status</h2>
<!---Add in header information here --> <div class="emailstatus-tablecontainer">
<!-- Table 1 -->
<table class="emailstatus-table">
<thead>
<tr>
<th colspan="2">Security & Filtering</th>
</tr>
</thead>
<tbody>
<!---Add in table1 information here -->
</tbody>
</table>
<table class="emailstatus-table">
<thead>
<tr>
<th colspan="2">Mail Traffic Statistics</th>
</tr>
</thead>
<tbody>
<!---Add in table2 information here -->
</tbody>
</table>
</div> </div>
</div> </div>
<br /> <br />

View File

@@ -111,9 +111,11 @@ except ImportError:
logging.warning("Matplotlib is not installed - no graphs") logging.warning("Matplotlib is not installed - no graphs")
enable_graphs = False; enable_graphs = False;
Mailstats_version = '1.2' Mailstats_version = '1.3'
build_date_time = "2024-06-18 12:03:40OURCE" build_date_time = "2024-06-18 12:03:40OURCE"
build_date_time = build_date_time[:19] #Take out crap that sneaks in. #Take out the crap that sneaks in...
build_date_time = build_date_time[:19]
Mailstats_version = Mailstats_version[:6]
#if build_date_time == "2024-06-18 12:03:40OURCE": #if build_date_time == "2024-06-18 12:03:40OURCE":
# build_date_time = "Unknown" # build_date_time = "Unknown"
@@ -123,7 +125,6 @@ data_file_path = script_dir+'/../..' #back to the top
now = datetime.now() now = datetime.now()
yesterday = now - timedelta(days=1) yesterday = now - timedelta(days=1)
formatted_yesterday = yesterday.strftime("%Y-%m-%d") formatted_yesterday = yesterday.strftime("%Y-%m-%d")
#html_page_path = data_file_path+"/home/e-smith/files/ibays/mesdb/html/mailstats/"
html_page_dir = data_file_path+"/opt/mailstats/html/" html_page_dir = data_file_path+"/opt/mailstats/html/"
template_dir = data_file_path+"/opt/mailstats/templates/" template_dir = data_file_path+"/opt/mailstats/templates/"
logs_dir = data_file_path+"/opt/mailstats/logs/" logs_dir = data_file_path+"/opt/mailstats/logs/"
@@ -173,100 +174,145 @@ def replace_bracket_content(input_filename, output_filename):
def get_logs_from_Journalctl(date='yesterday'): def get_logs_from_Journalctl(date='yesterday'):
# JSON-pretty output example from journalctl # JSON-pretty output example from journalctl
# { # {
# "__CURSOR" : "s=21b4f015be0c4f1fb71ac439a8365ee7;i=385c;b=dd778625547f4883b572daf53ae93cd4;m=ca99d6d;t=62d6316802b05;x=71b24e9f19f3b99a", # "__CURSOR" : "s=21b4f015be0c4f1fb71ac439a8365ee7;i=385c;b=dd778625547f4883b572daf53ae93cd4;m=ca99d6d;t=62d6316802b05;x=71b24e9f19f3b99a",
# "__REALTIME_TIMESTAMP" : "1738753462774533", # "__REALTIME_TIMESTAMP" : "1738753462774533",
# "__MONOTONIC_TIMESTAMP" : "212442477", # "__MONOTONIC_TIMESTAMP" : "212442477",
# "_BOOT_ID" : "dd778625547f4883b572daf53ae93cd4", # "_BOOT_ID" : "dd778625547f4883b572daf53ae93cd4",
# "_MACHINE_ID" : "f20b7edad71a44e59f9e9b68d4870b19", # "_MACHINE_ID" : "f20b7edad71a44e59f9e9b68d4870b19",
# "PRIORITY" : "6", # "PRIORITY" : "6",
# "SYSLOG_FACILITY" : "3", # "SYSLOG_FACILITY" : "3",
# "_UID" : "0", # "_UID" : "0",
# "_GID" : "0", # "_GID" : "0",
# "_SYSTEMD_SLICE" : "system.slice", # "_SYSTEMD_SLICE" : "system.slice",
# "_CAP_EFFECTIVE" : "1ffffffffff", # "_CAP_EFFECTIVE" : "1ffffffffff",
# "_TRANSPORT" : "stdout", # "_TRANSPORT" : "stdout",
# "_COMM" : "openssl", # "_COMM" : "openssl",
# "_EXE" : "/usr/bin/openssl", # "_EXE" : "/usr/bin/openssl",
# "_HOSTNAME" : "sme11.thereadclan.me.uk", # "_HOSTNAME" : "sme11.thereadclan.me.uk",
# "_STREAM_ID" : "8bb0ef8920af4ae09b424a2e30abcdf7", # "_STREAM_ID" : "8bb0ef8920af4ae09b424a2e30abcdf7",
# "SYSLOG_IDENTIFIER" : "qpsmtpd-init", # "SYSLOG_IDENTIFIER" : "qpsmtpd-init",
# "MESSAGE" : "Generating DH parameters, 2048 bit long safe prime, generator 2", # "MESSAGE" : "Generating DH parameters, 2048 bit long safe prime, generator 2",
# "_PID" : "2850", # "_PID" : "2850",
# } # }
# and the return from here: # and the return from here:
# { # {
# '_TRANSPORT': 'stdout', 'PRIORITY': 6, 'SYSLOG_FACILITY': 3, '_CAP_EFFECTIVE': '0', '_SYSTEMD_SLICE': 'system.slice', # '_TRANSPORT': 'stdout', 'PRIORITY': 6, 'SYSLOG_FACILITY': 3, '_CAP_EFFECTIVE': '0', '_SYSTEMD_SLICE': 'system.slice',
# '_BOOT_ID': UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f'), '_MACHINE_ID': UUID('f20b7eda-d71a-44e5-9f9e-9b68d4870b19'), # '_BOOT_ID': UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f'), '_MACHINE_ID': UUID('f20b7eda-d71a-44e5-9f9e-9b68d4870b19'),
# '_HOSTNAME': 'sme11.thereadclan.me.uk', '_STREAM_ID': '06c860deea374544a2b561f55394d728', 'SYSLOG_IDENTIFIER': 'qpsmtpd-forkserver', # '_HOSTNAME': 'sme11.thereadclan.me.uk', '_STREAM_ID': '06c860deea374544a2b561f55394d728', 'SYSLOG_IDENTIFIER': 'qpsmtpd-forkserver',
# '_UID': 453, '_GID': 453, '_COMM': 'qpsmtpd-forkser', '_EXE': '/usr/bin/perl', # '_UID': 453, '_GID': 453, '_COMM': 'qpsmtpd-forkser', '_EXE': '/usr/bin/perl',
# '_CMDLINE': '/usr/bin/perl -Tw /usr/bin/qpsmtpd-forkserver -u qpsmtpd -l 0.0.0.0 -p 25 -c 40 -m 5', # '_CMDLINE': '/usr/bin/perl -Tw /usr/bin/qpsmtpd-forkserver -u qpsmtpd -l 0.0.0.0 -p 25 -c 40 -m 5',
# '_SYSTEMD_CGROUP': '/system.slice/qpsmtpd.service', '_SYSTEMD_UNIT': 'qpsmtpd.service', # '_SYSTEMD_CGROUP': '/system.slice/qpsmtpd.service', '_SYSTEMD_UNIT': 'qpsmtpd.service',
# '_SYSTEMD_INVOCATION_ID': 'a2b7889a307748daaeb60173d31c5e0f', '_PID': 93647, # '_SYSTEMD_INVOCATION_ID': 'a2b7889a307748daaeb60173d31c5e0f', '_PID': 93647,
# 'MESSAGE': '93647 Connection from localhost [127.0.0.1]', # 'MESSAGE': '93647 Connection from localhost [127.0.0.1]',
# '__REALTIME_TIMESTAMP': datetime.datetime(2025, 4, 2, 0, 1, 11, 668929), # '__REALTIME_TIMESTAMP': datetime.datetime(2025, 4, 2, 0, 1, 11, 668929),
# '__MONOTONIC_TIMESTAMP': journal.Monotonic(timestamp=datetime.timedelta(11, 53118, 613602), # '__MONOTONIC_TIMESTAMP': journal.Monotonic(timestamp=datetime.timedelta(11, 53118, 613602),
# bootid=UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f')), # bootid=UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f')),
# '__CURSOR': 's=21b4f015be0c4f1fb71ac439a8365ee7;i=66d2c;b=465c620236ac4a8b98e91581e8fec68f;m=e9a65ed862;t= # '__CURSOR': 's=21b4f015be0c4f1fb71ac439a8365ee7;i=66d2c;b=465c620236ac4a8b98e91581e8fec68f;m=e9a65ed862;t=
# } # }
""" """
Retrieve and parse journalctl logs for a specific date and units, Retrieve and parse journalctl logs for a specific date and units,
returning them as a sorted list of dictionaries. returning them as a sorted list of dictionaries.
""" """
try:
# Parse the input date to calculate the start and end of the day
if date.lower() == "yesterday":
target_date = datetime.now() - timedelta(days=1)
else:
target_date = datetime.strptime(date, "%Y-%m-%d")
# Define the time range for the specified date def to_us(ts):
since = target_date.strftime("%Y-%m-%d 00:00:00") # Convert a journal timestamp (datetime or int/string microseconds) to integer microseconds
until = target_date.strftime("%Y-%m-%d 23:59:59") if ts is None:
return None
if hasattr(ts, "timestamp"):
return int(ts.timestamp() * 1_000_000)
try:
return int(ts)
except Exception:
return None
# Convert times to microseconds for querying try:
since_microseconds = int(datetime.strptime(since, "%Y-%m-%d %H:%M:%S").timestamp() * 1_000_000) # Parse the input date to calculate start and end of the day
until_microseconds = int(datetime.strptime(until, "%Y-%m-%d %H:%M:%S").timestamp() * 1_000_000) if isinstance(date, str) and date.lower() == "yesterday":
target_date = datetime.now() - timedelta(days=1)
elif isinstance(date, datetime):
target_date = date
else:
# Supports either a datetime.date-like object (has year attr) or a string YYYY-MM-DD
try:
target_date = datetime(date.year, date.month, date.day)
except Exception:
target_date = datetime.strptime(str(date), "%Y-%m-%d")
# Open the systemd journal # Define the time range for the specified date
j = journal.Reader() since_dt = datetime(target_date.year, target_date.month, target_date.day, 0, 0, 0, 0)
until_dt = datetime(target_date.year, target_date.month, target_date.day, 23, 59, 59, 999999)
since_microseconds = int(since_dt.timestamp() * 1_000_000)
until_microseconds = int(until_dt.timestamp() * 1_000_000)
# Set filters for units # Open the systemd journal (system-only if supported)
j.add_match(_SYSTEMD_UNIT="qpsmtpd.service") try:
j.add_match(_SYSTEMD_UNIT="uqpsmtpd.service") j = journal.Reader(flags=journal.SYSTEM_ONLY)
j.add_match(_SYSTEMD_UNIT="sqpsmtpd.service") except Exception:
j = journal.Reader()
# Filter by time range # Set filters for units (multiple add_match on same field => OR)
j.seek_realtime(since_microseconds // 1_000_000) # Convert back to seconds for seeking j.add_match(_SYSTEMD_UNIT="qpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="uqpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="sqpsmtpd.service")
# Retrieve logs within the time range # Filter by time range: seek to the start of the interval
logs = [] j.seek_realtime(since_dt)
log_count = 0
error_count = 0
for entry in j:
try:
entry_timestamp = entry.get('__REALTIME_TIMESTAMP', None)
entry_microseconds = int(entry_timestamp.timestamp() * 1_000_000)
if entry_timestamp and since_microseconds <= entry_microseconds <= until_microseconds:
log_count += 1
# takeout ASCII Escape sequences from the message
entry['MESSAGE'] = strip_ansi_codes(entry['MESSAGE'])
logs.append(entry)
except Exception as e:
logging.warning(f"Error - log line: {log_count} {entry['_PID']} {entry['SYSLOG_IDENTIFIER']} : {e}")
error_count += 1
if error_count:
logging.info(f"Had {error_count} errors on journal import - probably non character bytes")
# Sort logs by __REALTIME_TIMESTAMP in ascending order
sorted_logs = sorted(logs, key=lambda x: x.get("__REALTIME_TIMESTAMP", 0))
return sorted_logs # Retrieve logs within the time range
logs = []
log_count = 0
error_count = 0
except Exception as e: for entry in j:
logging.error(f"Unexpected error: {e}") try:
return {} entry_timestamp = entry.get("__REALTIME_TIMESTAMP", None)
entry_microseconds = to_us(entry_timestamp)
if entry_microseconds is None:
continue
# Early stop once we pass the end of the window
if entry_microseconds > until_microseconds:
break
if entry_microseconds >= since_microseconds:
log_count += 1
# Strip ANSI escape sequences in MESSAGE (if present and is text/bytes)
try:
msg = entry.get("MESSAGE", "")
if isinstance(msg, (bytes, bytearray)):
msg = msg.decode("utf-8", "replace")
# Only call strip if ESC is present
if "\x1b" in msg:
msg = strip_ansi_codes(msg)
entry["MESSAGE"] = msg
except Exception as se:
# Keep original message, just note the issue at debug level
logging.debug(f"strip_ansi_codes failed: {se}")
logs.append(entry)
except Exception as e:
# Be defensive getting context fields to avoid raising inside logging
pid = entry.get("_PID", "?") if isinstance(entry, dict) else "?"
ident = entry.get("SYSLOG_IDENTIFIER", "?") if isinstance(entry, dict) else "?"
logging.warning(f"Error - log line: {log_count} {pid} {ident} : {e}")
error_count += 1
if error_count:
logging.info(f"Had {error_count} errors on journal import - probably non character bytes")
# Sort logs by __REALTIME_TIMESTAMP in ascending order (keep original behavior)
sorted_logs = sorted(logs, key=lambda x: to_us(x.get("__REALTIME_TIMESTAMP")) or 0)
logging.debug(f"Collected {len(sorted_logs)} entries for {since_dt.date()} "
f"between {since_dt} and {until_dt} (scanned {log_count} in-window)")
return sorted_logs
except Exception as e:
logging.error(f"Unexpected error: {e}")
return {}
def transform_to_dict(data, keys, iso_date): def transform_to_dict(data, keys, iso_date):
""" """
@@ -409,23 +455,28 @@ def create_graph(data_dict, graph_type="line", output_file="graph.png",iso_date=
# return data # return data
def save_summaries_to_db(cursor, conn, date_str, hour, parsed_data): def save_summaries_to_db(cursor, conn, date_str, hour, parsed_data):
# Convert parsed_data to JSON string
global count_records_to_db global count_records_to_db
json_data = json.dumps(parsed_data) json_data = json.dumps(parsed_data)
# Insert the record
insert_query = """ insert_query = """
INSERT INTO SummaryLogs (Date, Hour, logData) INSERT INTO SummaryLogs (Date, Hour, logData)
VALUES (%s, %s, %s) VALUES (%s, %s, %s)
""" """
try: try:
# Check if the cursor is open (pymysql has no explicit is_closed; handle by try/except)
cursor.execute(insert_query, (date_str, hour, json_data)) cursor.execute(insert_query, (date_str, hour, json_data))
conn.commit() conn.commit()
count_records_to_db += 1 count_records_to_db += 1
except pymysql.Error as err: except pymysql.Error as err:
logging.error(f"DB Error {date_str} {hour} : {err}") # Handle cursor closed or other DB errors
if 'closed' in str(err).lower():
logging.error(f"DB Error {date_str} {hour} : Cursor is closed. Check connection handling.")
else:
logging.error(f"DB Error {date_str} {hour} : {err}")
conn.rollback() conn.rollback()
except Exception as ex:
logging.error(f"Unexpected DB Error {date_str} {hour} : {ex}")
conn.rollback()
def is_running_under_thonny(): def is_running_under_thonny():
# Check for the 'THONNY_USER_DIR' environment variable # Check for the 'THONNY_USER_DIR' environment variable
@@ -785,7 +836,123 @@ def split_timestamp_and_data(log_entry: str) -> list:
rest_of_line = log_entry # If no match, return the whole line rest_of_line = log_entry # If no match, return the whole line
return [timestamp, rest_of_line] return [timestamp, rest_of_line]
def render_sub_table(table_title, table_headers, found_values, get_character=None, suppress_threshold=False): MIN_COUNT = 3 # Hide entries with count < 5
MAX_TOTAL_ROWS = 10 # Total rows INCLUDING "Other"
OTHER_TARGET_FRAC = 0.01 # Strictly less than 1%
OTHER_LABEL = 'Other'
SHOW_ALL = True # Set True to show all entries >= MIN_COUNT, no "Other" row
def select_rows_just_below(items, min_count=MIN_COUNT,
max_total_rows=MAX_TOTAL_ROWS,
other_target_frac=OTHER_TARGET_FRAC,
other_label=OTHER_LABEL, show_all=SHOW_ALL):
"""
Build rows with percentages of total (0..100).
- If show_all is True: show all entries with count >= min_count, no 'Other', ignore caps and 1% target.
- If show_all is False: pick as many top entries (count >= min_count) as needed so that
'Other' is strictly < other_target_frac (if possible), always include 'Other(n)',
and respect max_total_rows (including 'Other').
Output rows preserve original extra fields for selected entries.
The percent is written to field index 2 (replacing it if present, or appended if not).
"""
# Normalize items to a list while preserving original rows
def to_rows(seq):
if isinstance(seq, dict):
# Convert dict to rows without extras
return [(k, v) for k, v in seq.items()]
rows_ = []
for it in seq:
if isinstance(it, (tuple, list)) and len(it) >= 2:
rows_.append(tuple(it)) # store as tuple
else:
raise TypeError("Each item must be a (key, count, ...) tuple/list or a dict mapping key->count.")
return rows_
def set_percent(row, pct_value):
# Return a tuple like the input row but with percent inserted at index 2 (0..100 number, rounded)
pct_value = round(pct_value, 2)
r = list(row)
if len(r) >= 3:
r[2] = pct_value
else:
r.append(pct_value)
return tuple(r)
rows_in = to_rows(items)
total = sum(r[1] for r in rows_in)
if total == 0:
return ([(f"{other_label}(0)", 0, 0.0)] if not show_all else []), 0, "No data."
# Filter by min_count and sort by count desc
eligible = [r for r in rows_in if r[1] >= min_count]
eligible.sort(key=lambda r: r[1], reverse=True)
if show_all:
# Show all eligible rows, no 'Other', ignore caps/target; compute percent (0..100) per row
rows_out = [set_percent(r, (r[1] / total) * 100.0) for r in eligible]
return rows_out, total, None
#logging.info(f"{show_all}")
# Leave room for the "Other" row
max_top_cap = max(0, max_total_rows - 1)
# Find smallest number of top rows so that Other is strictly < target
cum = 0
needed_top = None
for i, r in enumerate(eligible, start=1):
cum += r[1]
other_frac = (total - cum) / total
if other_frac < other_target_frac:
needed_top = i
break
notes = []
if needed_top is None:
# Even after including all eligible, Other >= target
final_top = min(len(eligible), max_top_cap)
#if final_top < len(eligible):
#notes.append(f"Row cap prevents adding enough rows to push Other below {other_target_frac*100:.2f}%.")
#else:
#notes.append(f"Cannot push Other below {other_target_frac*100:.2f}% with MIN_COUNT={min_count}.")
else:
# Apply cap
if needed_top > max_top_cap:
final_top = max_top_cap
#notes.append(
# f"Row cap prevents reaching Other < {other_target_frac*100:.2f}%; "
# f"need {needed_top} rows but only {max_top_cap} allowed before Other."
#)
else:
final_top = needed_top
top = eligible[:final_top]
shown_sum = sum(r[1] for r in top)
other_count = total - shown_sum
other_percent = (other_count / total) * 100.0
# Count how many rows are aggregated into Other: everything not in 'top'
other_rows_count = len(rows_in) - len(top)
# Build output: preserve extras; write percent at index 2 as a numeric percent 0..100
rows_out = [set_percent(r, (r[1] / total) * 100.0) for r in top]
# Build the Other row with percent; no extra fields beyond the percent
rows_out.append((f"{other_label}({other_rows_count})", other_count, round(other_percent, 2)))
#if other_percent >= other_target_frac * 100.0:
#notes.append(
# f"Other is {other_percent:.2f}%, which is not strictly below {other_target_frac*100:.2f}% "
# f"(MIN_COUNT={min_count}, MAX_TOTAL_ROWS={max_total_rows})."
#)
return rows_out, total, " ".join(notes) if notes else None
def render_sub_table(table_title, table_headers, found_values, get_character=None, show_all=True):
#Check if any data provided #Check if any data provided
if len(found_values) != 0: if len(found_values) != 0:
# Get the total # Get the total
@@ -829,30 +996,36 @@ def render_sub_table(table_title, table_headers, found_values, get_character=Non
raise ValueError("found_values must be either a list of numbers or a list of dictionaries.") raise ValueError("found_values must be either a list of numbers or a list of dictionaries.")
else: else:
raise TypeError("found_values must be a dictionary or a list.") raise TypeError("found_values must be a dictionary or a list.")
# # Dynamic threshold calculation
# if not suppress_threshold:
# dynamic_threshold = max(1, 100 / (original_total**0.65)) if original_total > 0 else 0
# dynamic_threshold = round(dynamic_threshold,1)
# logging.debug(f"Threshold for {table_title} set to {dynamic_threshold}% ")
# else:
# dynamic_threshold=0
# absolute_floor = 10 # Minimum absolute value threshold
# # Filter results using early termination
# filtered_sub_result = []
# for row in sub_result:
# value = row[1]
# percentage = (value / original_total * 100) if original_total else 0
# # Exit condition: below both thresholds
# if percentage < dynamic_threshold or value < absolute_floor:
# break
# filtered_sub_result.append(row)
# sub_result = filtered_sub_result # Keep only significant rows
sub_result.sort(key=lambda x: float(x[1]), reverse=True) # Sort by percentage in descending order sub_result.sort(key=lambda x: float(x[1]), reverse=True) # Sort by percentage in descending order
if not show_all:
# Dynamic threshold calculation sub_result, total, note = select_rows_just_below(sub_result,show_all=False)
if not suppress_threshold:
dynamic_threshold = max(1, 100 / (original_total**0.5)) if original_total > 0 else 0
dynamic_threshold = round(dynamic_threshold,1)
logging.debug(f"Threshold for {table_title} set to {dynamic_threshold}% ")
else: else:
dynamic_threshold=0 note = "" #no threshold applied
absolute_floor = 50 # Minimum absolute value threshold total = original_total
# Filter results using early termination
filtered_sub_result = []
for row in sub_result:
value = row[1]
percentage = (value / original_total * 100) if original_total else 0
# Exit condition: below both thresholds
if percentage < dynamic_threshold and value < absolute_floor:
break
filtered_sub_result.append(row)
sub_result = filtered_sub_result # Keep only significant rows
sub_template_path = template_dir+'mailstats-sub-table.html.pt' sub_template_path = template_dir+'mailstats-sub-table.html.pt'
# Load the template # Load the template
@@ -865,7 +1038,7 @@ def render_sub_table(table_title, table_headers, found_values, get_character=Non
try: try:
rendered_html = template(array_2d=sub_result, column_headers=table_headers, rendered_html = template(array_2d=sub_result, column_headers=table_headers,
title=table_title, classname=get_first_word(table_title), title=table_title, classname=get_first_word(table_title),
threshold=dynamic_threshold) threshold=note)
except Exception as e: except Exception as e:
raise ValueError(f"{table_title}: A chameleon controller render error occurred: {e}") raise ValueError(f"{table_title}: A chameleon controller render error occurred: {e}")
except Exception as e: except Exception as e:
@@ -975,6 +1148,9 @@ def replace_between(text, start, end, replacement):
replaced_text = re.sub(pattern, replacement, text, flags=re.DOTALL) replaced_text = re.sub(pattern, replacement, text, flags=re.DOTALL)
return replaced_text return replaced_text
def assemble_heading_row(label,value):
return f"<tr><td>{label}</td><td>{value}</td><tr>"
def get_heading(): def get_heading():
# #
# Needs from anaytsis # Needs from anaytsis
@@ -992,44 +1168,50 @@ def get_heading():
# Clam Version/DB Count/Last DB update # Clam Version/DB Count/Last DB update
clam_output = subprocess.getoutput("freshclam -V") clam_output = subprocess.getoutput("freshclam -V")
clam_info = f"Clam Version/DB Count/Last DB update: {clam_output}" clam_info = assemble_heading_row("Clam Version/DB Count/Last DB update:", clam_output)
# SpamAssassin Version # SpamAssassin Version
sa_output = subprocess.getoutput("spamassassin -V") sa_output = subprocess.getoutput("spamassassin -V")
sa_info = f"SpamAssassin Version: {sa_output}" sa_info = assemble_heading_row("SpamAssassin Version: ",sa_output)
# Tag level and Reject level # Tag level and Reject level
tag_reject_info = f"Tag level: {SATagLevel}; Reject level: {SARejectLevel} {warnnoreject}" tag_reject_info = assemble_heading_row("Tag level:",SATagLevel)
tag_reject_info += assemble_heading_row("Reject level: ",f"{SARejectLevel} {warnnoreject}")
# SMTP connection stats # SMTP connection stats
smtp_stats = f"External SMTP connections accepted: {totalexternalsmtpsessions}\n"\ smtp_stats = assemble_heading_row("External SMTP connections accepted:",totalexternalsmtpsessions)
f"Internal SMTP connections accepted: {totalinternalsmtpsessions}" smtp_stats += assemble_heading_row("Internal SMTP connections accepted:",totalinternalsmtpsessions)
if len(connection_type_counts)>0: if len(connection_type_counts)>0:
for connection_type in connection_type_counts.keys(): for connection_type in connection_type_counts.keys():
smtp_stats += f"\nCount of {connection_type} connections: {connection_type_counts[connection_type]}" smtp_stats += assemble_heading_row(f"\nCount of {connection_type} connections:",connection_type_counts[connection_type])
if len(total_ports)>0: if len(total_ports)>0:
for port_number in total_ports.keys(): for port_number in total_ports.keys():
smtp_stats += f"\nCount of port {port_number} connections: {total_ports[port_number]}" smtp_stats += assemble_heading_row(f"\nCount of port {port_number} connections: ",total_ports[port_number])
smtp_stats = smtp_stats + f"\nEmails per hour: {emailperhour:.1f}/hr\n"\ rows = [
f"Average spam score (accepted): {spamavg or 0:.2f}\n"\ assemble_heading_row("Emails per hour:", f"{(emailperhour if emailperhour is not None else 0):.1f}/hr"),
f"Average spam score (rejected): {rejectspamavg or 0:.2f}\n"\ assemble_heading_row("Average spam score (accepted):", f"{(spamavg if spamavg is not None else 0):.2f}"),
f"Average ham score: {hamavg or 0:.2f}\n"\ assemble_heading_row("Average spam score (rejected):", f"{(rejectspamavg if rejectspamavg is not None else 0):.2f}"),
f"Number of DMARC reporting emails sent: {DMARCSendCount or 0} (not shown on table)" assemble_heading_row("Average ham score:", f"{(hamavg if hamavg is not None else 0):.2f}"),
assemble_heading_row("Number of DMARC reporting emails sent:", f"{DMARCSendCount if DMARCSendCount is not None else 0} (not shown on table)"),
]
smtp_stats += " ".join(rows) # or "\n".join(rows) if assemble_heading_row doesnt add its own newline
# DMARC approved emails # DMARC approved emails
dmarc_info = "" dmarc_info = ""
if hamcount != 0: if hamcount != 0:
dmarc_ok_percentage = DMARCOkCount * 100 / hamcount dmarc_ok_percentage = DMARCOkCount * 100 / hamcount
dmarc_info = f"Number of emails approved through DMARC: {DMARCOkCount or 0} ({dmarc_ok_percentage:.2f}% of Ham count)" dmarc_info = assemble_heading_row("Number of emails approved through DMARC:",f"{DMARCOkCount or 0} ({dmarc_ok_percentage:.2f}% of Ham count)")
# Accumulate all strings # Accumulate all strings
header_str = "\n".join([clam_info, sa_info, tag_reject_info, smtp_stats, dmarc_info]) #header_str = "<br />".join([clam_info, sa_info, tag_reject_info, smtp_stats, dmarc_info])
# switch newlines to <br /> # switch newlines to <br />
header_str = header_str.replace("\n","<br />") #header_str = header_str.replace("\n","<br />")
return header_str header_str1 = clam_info + sa_info + tag_reject_info
header_str2 = smtp_stats + dmarc_info
return header_str1,header_str2
def scan_mail_users(): def scan_mail_users():
# #
@@ -1128,10 +1310,62 @@ def display_keys_and_values(data):
raise ValueError("Input must be a list of dictionaries or a list of lists.") raise ValueError("Input must be a list of dictionaries or a list of lists.")
def extract_blacklist_domain(text): def extract_blacklist_domain(text):
match = re.search(r'http://www\.surbl\.org', text) """
if match: Compare 'text' against comma-separated URL strings from global vars
return "www.surbl.org" RBLList, SBLList, and UBLList. Return the first matching entry or "".
return None Match is done on exact hostname substring OR the base domain (eTLD+1),
so 'black.uribl.com' will match text containing 'lookup.uribl.com'.
"""
s = text if isinstance(text, str) else str(text or "")
s_lower = s.lower()
logging.debug(f"extract blacklist called:{text}")
combined = ",".join([RBLList, SBLList, UBLList])
def hostname_from(sval: str) -> str:
sval = (sval or "").strip().lower()
if "://" in sval:
# Strip scheme using simple split to avoid needing urlparse
sval = sval.split("://", 1)[1]
# Strip path and port if present
sval = sval.split("/", 1)[0]
sval = sval.split(":", 1)[0]
# Remove leading wildcards/dots
sval = sval.lstrip(".")
if sval.startswith("*."):
sval = sval[2:]
return sval
def base_domain(hostname: str) -> str:
parts = hostname.split(".")
if len(parts) >= 3 and parts[-2] in ("co", "org", "gov", "ac") and parts[-1] == "uk":
return ".".join(parts[-3:])
if len(parts) >= 2:
return ".".join(parts[-2:])
return hostname
def boundary_re(term: str):
# Match term when not part of a larger domain label
return re.compile(r"(?<![A-Za-z0-9-])" + re.escape(term) + r"(?![A-Za-z0-9-])")
for part in combined.split(","):
entry = part.strip()
logging.debug(f"Comparing: {entry}")
if not entry:
continue
entry_host = hostname_from(entry)
entry_base = base_domain(entry_host)
# 1) Try matching the full entry host (e.g., black.uribl.com)
if entry_host and boundary_re(entry_host).search(s_lower):
return entry
# 2) Fallback: match by base domain (e.g., uribl.com) to catch lookup.uribl.com, etc.
if entry_base and boundary_re(entry_base).search(s_lower):
return entry
return ""
def set_log_level(level): def set_log_level(level):
"""Dynamically adjust logging level (e.g., 'DEBUG', 'INFO', 'ERROR').""" """Dynamically adjust logging level (e.g., 'DEBUG', 'INFO', 'ERROR')."""
@@ -1145,6 +1379,41 @@ def format_duration(seconds: float) -> str:
return str(timedelta(seconds=seconds)) return str(timedelta(seconds=seconds))
DB_CONFIG_PATH = '/etc/mailstats/db.php'
def parse_php_config(path):
# Read file as text and extract key-value pairs using regex
try:
with open(path, 'r') as f:
content = f.read()
cfg = {}
for match in re.finditer(r"'(\w+)'\s*=>\s*'([^']*)'", content):
cfg[match.group(1)] = match.group(2)
return cfg
except Exception as e:
logging.error(f"Could not parse PHP config file: {e}")
return {}
def load_db_config():
db_host = os.environ.get('MAILSTATS_DB_HOST', 'localhost')
db_user = os.environ.get('MAILSTATS_DB_USER', '')
db_pass = os.environ.get('MAILSTATS_DB_PASS', '')
db_name = os.environ.get('MAILSTATS_DB_NAME', '')
if db_user == '' or db_pass == '' or db_name == '':
if os.path.isfile(DB_CONFIG_PATH) and os.access(DB_CONFIG_PATH, os.R_OK):
cfg = parse_php_config(DB_CONFIG_PATH)
db_host = cfg.get('host', db_host)
db_user = cfg.get('user', db_user)
db_pass = cfg.get('pass', db_pass)
db_name = cfg.get('name', db_name)
if db_user == '' or db_pass == '' or db_name == '':
logging.error('DB credentials missing (env and config file).')
raise RuntimeError('DB credentials missing (env and config file)')
return db_host, db_user, db_pass, db_name
if __name__ == "__main__": if __name__ == "__main__":
start_time = datetime.now() start_time = datetime.now()
try: try:
@@ -1228,18 +1497,17 @@ if __name__ == "__main__":
count_records_to_db = 0; count_records_to_db = 0;
# Db save control # Db save control
saveData = get_value(ConfigDB,"mailstats","SaveDataToMySQL","no") == 'yes' or forceDbSave saveData = get_value(ConfigDB,"mailstats","SaveDataToMySQL","yes") == 'yes' or forceDbSave
logging.debug(f"Save Mailstats to DB set:{saveData} ") logging.debug(f"Save Mailstats to DB set:{saveData} ")
if saveData: if saveData:
# Connect to MySQL DB for saving # Database config retrieval
DBName = "mailstats" try:
DBHost = get_value(ConfigDB, 'mailstats', 'DBHost', "localhost") DBHost, DBUser, DBPassw, DBName = load_db_config()
DBPort = int(get_value(ConfigDB, 'mailstats', 'DBPort', "3306")) # Ensure port is an integer DBPort = 3306 # If you want configurability, load this from config too
DBPassw = 'mailstats' UnixSocket = "/var/lib/mysql/mysql.sock"
DBUser = 'mailstats' except RuntimeError as err:
UnixSocket = "/var/lib/mysql/mysql.sock" logging.error(f"Database config error: {err}")
saveData = False
# Try to establish a database connection # Try to establish a database connection
try: try:
conn = pymysql.connect( conn = pymysql.connect(
@@ -1249,7 +1517,7 @@ if __name__ == "__main__":
database=DBName, database=DBName,
port=DBPort, port=DBPort,
unix_socket=UnixSocket, unix_socket=UnixSocket,
cursorclass=pymysql.cursors.DictCursor # Optional: use DictCursor for dict output cursorclass=pymysql.cursors.DictCursor
) )
cursor = conn.cursor() cursor = conn.cursor()
# Check if the table exists before creating it # Check if the table exists before creating it
@@ -1257,47 +1525,50 @@ if __name__ == "__main__":
cursor.execute(check_table_query) cursor.execute(check_table_query)
table_exists = cursor.fetchone() table_exists = cursor.fetchone()
if not table_exists: if not table_exists:
# Create table if it doesn't exist
cursor.execute(""" cursor.execute("""
CREATE TABLE IF NOT EXISTS SummaryLogs ( CREATE TABLE IF NOT EXISTS SummaryLogs (
id INT AUTO_INCREMENT PRIMARY KEY, id INT AUTO_INCREMENT PRIMARY KEY,
Date DATE, Date DATE,
Hour INT, Hour INT,
logData TEXT logData TEXT
) )
""") """)
# Delete existing records for the given date # Delete existing records for the given date
try: try:
delete_query = """ delete_query = """
DELETE FROM SummaryLogs DELETE FROM SummaryLogs
WHERE Date = %s WHERE Date = %s
""" """
cursor.execute(delete_query, (analysis_date,)) # Don't forget the extra comma for tuple cursor.execute(delete_query, (analysis_date,))
# Get the number of records deleted
rows_deleted = cursor.rowcount rows_deleted = cursor.rowcount
if rows_deleted > 0: if rows_deleted > 0:
logging.debug(f"Deleted {rows_deleted} rows for {analysis_date} ") logging.debug(f"Deleted {rows_deleted} rows for {analysis_date}")
except pymysql.Error as e: except pymysql.Error as e:
logging.error(f"SQL Delete failed ({delete_query}) ({e}) ") logging.error(f"SQL Delete failed ({delete_query}) ({e})")
# Commit changes & close resources after all DB operations
conn.commit()
#cursor.close()
#conn.close()
except pymysql.Error as e: except pymysql.Error as e:
logging.error(f"Unable to connect to {DBName} on {DBHost} port {DBPort} error ({e}) ") logging.error(f"Unable to connect to {DBName} on {DBHost} port {DBPort} error ({e})")
saveData = False saveData = False
nolinks = not saveData nolinks = not saveData
# Not sure we need these... # Needed to identify blacklist used to reject emails.
# if (ConfigDB,"qpsmtpd","RHSBL").lower() == 'enabled': if get_value(ConfigDB,"qpsmtpd","RHSBL").lower() == 'enabled':
# RBLList = get_value(ConfigDB,"qpsmtpd","RBLList") RBLList = get_value(ConfigDB,"qpsmtpd","RBLList")
# else: else:
# RBLList = "" RBLList = ""
# if (ConfigDB,"qpsmtpd","RBLList").lower() == 'enabled': if get_value(ConfigDB,"qpsmtpd","DNSBL").lower() == 'enabled':
# SBLLIst = get_value(ConfigDB,"qpsmtpd","SBLLIst") SBLList = get_value(ConfigDB,"qpsmtpd","SBLList")
# else: else:
# RBLList = "" SBLList = ""
# if (ConfigDB,"qpsmtpd","RBLList").lower() == 'enabled': if get_value(ConfigDB,"qpsmtpd","URIBL").lower() == 'enabled':
# UBLList = get_value(ConfigDB,"qpsmtpd","UBLLIst") UBLList = get_value(ConfigDB,"qpsmtpd","UBLList")
# else: else:
# RBLList = "" UBLList = ""
FetchmailIP = '127.0.0.200'; #Apparent Ip address of fetchmail deliveries FetchmailIP = '127.0.0.200'; #Apparent Ip address of fetchmail deliveries
WebmailIP = '127.0.0.1'; #Apparent Ip of Webmail sender WebmailIP = '127.0.0.1'; #Apparent Ip of Webmail sender
@@ -1526,13 +1797,17 @@ if __name__ == "__main__":
rejReason = match.group(1) rejReason = match.group(1)
found_qpcodes[parsed_data['error-plugin']+"-"+rejReason] += 1 found_qpcodes[parsed_data['error-plugin']+"-"+rejReason] += 1
else: else:
found_qpcodes[parsed_data['action1']] += 1 if parsed_data['action1'] == "":
logging.warning(f"Found blank action1 {timestamp} {parsed_data['id']} {parsed_data['ip']} {parsed_data['sendurl']}")
else:
found_qpcodes[parsed_data['action1']] += 1
#Check for blacklist rejection #Check for blacklist rejection
error_plugin = parsed_data['error-plugin'].strip() error_plugin = parsed_data['error-plugin'].strip()
if error_plugin == 'rhsbl' or error_plugin == 'dnsbl': if error_plugin == 'rhsbl' or error_plugin == 'dnsbl':
blacklist_domain = extract_blacklist_domain(parsed_data['sender']) blacklist_domain = extract_blacklist_domain(parsed_data['sender'])
blacklist_found[blacklist_domain] += 1 if blacklist_domain:
blacklist_found[blacklist_domain] += 1
#Log the recipients and deny or accept and spam-tagged counts #Log the recipients and deny or accept and spam-tagged counts
# Try to find an existing record for the email # Try to find an existing record for the email
@@ -1561,6 +1836,8 @@ if __name__ == "__main__":
else: else:
email = None email = None
if email: if email:
if '@' in email:
email = email.lower()
record = next((item for item in recipients_found if item['email'] == email), None) record = next((item for item in recipients_found if item['email'] == email), None)
if not record: if not record:
# If email is not in the array, we add it # If email is not in the array, we add it
@@ -1673,6 +1950,7 @@ if __name__ == "__main__":
try: try:
match = geoip_pattern.match(data['MESSAGE']) match = geoip_pattern.match(data['MESSAGE'])
if match: if match:
logging.debug(f"Found bad country message {data['MESSAGE']} {match.group(1)} ")
j += 1 j += 1
country = match.group(1) country = match.group(1)
found_countries[country] += 1 found_countries[country] += 1
@@ -1770,15 +2048,17 @@ if __name__ == "__main__":
total_html = rendered_html total_html = rendered_html
# Add in the header information # Add in the header information
header_rendered_html = get_heading() header_rendered_html1,header_rendered_html2 = get_heading()
total_html = insert_string_after(total_html,header_rendered_html, "<!---Add in header information here -->") total_html = insert_string_after(total_html,header_rendered_html1, "<!---Add in table1 information here -->")
total_html = insert_string_after(total_html,header_rendered_html2, "<!---Add in table2 information here -->")
header_rendered_html = header_rendered_html1 + header_rendered_html2
#add in the subservient tables..(remeber they appear in the reverse order of below!) #add in the subservient tables..(remeber they appear in the reverse order of below!)
#virus codes #virus codes
virus_headers = ["Virus",'Count','Percent'] virus_headers = ["Virus",'Count','Percent']
virus_title = 'Viruses found' virus_title = 'Viruses found'
virus_rendered_html = render_sub_table(virus_title,virus_headers,found_viruses,suppress_threshold=True) virus_rendered_html = render_sub_table(virus_title,virus_headers,found_viruses)
# Add it to the total # Add it to the total
total_html = insert_string_after(total_html,virus_rendered_html, "<!---Add in sub tables here -->") total_html = insert_string_after(total_html,virus_rendered_html, "<!---Add in sub tables here -->")
@@ -1794,7 +2074,7 @@ if __name__ == "__main__":
junk_mail_count_headers = ['Username','Count', 'Percent'] junk_mail_count_headers = ['Username','Count', 'Percent']
junk_mail_counts = scan_mail_users() junk_mail_counts = scan_mail_users()
junk_mail_count_title = 'Junk mail counts' junk_mail_count_title = 'Junk mail counts'
junk_rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts,suppress_threshold=True) junk_rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts)
# Add it to the total # Add it to the total
total_html = insert_string_after(total_html,junk_rendered_html, "<!---Add in sub tables here -->") total_html = insert_string_after(total_html,junk_rendered_html, "<!---Add in sub tables here -->")
@@ -1802,21 +2082,21 @@ if __name__ == "__main__":
#Recipient counts #Recipient counts
recipient_count_headers = ["Email",'Queued','Rejected','Spam tagged','Accepted Percent'] recipient_count_headers = ["Email",'Queued','Rejected','Spam tagged','Accepted Percent']
recipient_count_title = 'Incoming email recipients' recipient_count_title = 'Incoming email recipients'
recipient_rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found,suppress_threshold=True) recipient_rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found)
# Add it to the total # Add it to the total
total_html = insert_string_after(total_html,recipient_rendered_html, "<!---Add in sub tables here -->") total_html = insert_string_after(total_html,recipient_rendered_html, "<!---Add in sub tables here -->")
#Geoip Country codes #Geoip Country codes
geoip_headers = ['Country','Count','Percent','Rejected?'] geoip_headers = ['Country','Count','Percent','Rejected?']
geoip_title = 'Geoip results' geoip_title = 'Geoip results'
geoip_rendered_html = render_sub_table(geoip_title,geoip_headers,found_countries,get_character_in_reject_list) geoip_rendered_html = render_sub_table(geoip_title,geoip_headers,found_countries,get_character_in_reject_list,show_all=False)
# Add it to the total # Add it to the total
total_html = insert_string_after(total_html,geoip_rendered_html, "<!---Add in sub tables here -->") total_html = insert_string_after(total_html,geoip_rendered_html, "<!---Add in sub tables here -->")
#Blacklist counts #Blacklist counts
blacklist_headers = ['URL','Count','Percent'] blacklist_headers = ['URL','Count','Percent']
blacklist_title = 'Blacklist used' blacklist_title = 'Blacklist used'
blacklist_rendered_html = render_sub_table(blacklist_title,blacklist_headers,blacklist_found,suppress_threshold=True) blacklist_rendered_html = render_sub_table(blacklist_title,blacklist_headers,blacklist_found)
# Add it to the total # Add it to the total
total_html = insert_string_after(total_html,blacklist_rendered_html, "<!---Add in sub tables here -->") total_html = insert_string_after(total_html,blacklist_rendered_html, "<!---Add in sub tables here -->")

15
root/usr/bin/runallmailstats.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
# Extract the earliest date from the journalctl header for qpsmtpd service
earliest_date=$(journalctl -u qpsmtpd | head -n 1 | sed -n 's/.*Logs begin at [A-Za-z]* \([0-9-]*\).*/\1/p')
# Get yesterday's date
yesterday=$(date -d 'yesterday' +%F)
current_date="$earliest_date"
# Loop from earliest date to yesterday
while [[ "$current_date" < "$yesterday" || "$current_date" == "$yesterday" ]]; do
runmailstats.sh "$current_date"
current_date=$(date -I -d "$current_date + 1 day")
done

View File

@@ -1,17 +0,0 @@
#!/bin/bash
#exec 1> >(logger -t $(basename $0)) 2>&1
perl /usr/bin/mailstats.pl /var/log/qpsmtpd/\@* /var/log/qpsmtpd/current /var/log/sqpsmtpd/\@* /var/log/sqpsmtpd/current
# and run new python one - start by copying and decoding log files
yesterday_date=$(date -d "yesterday" +'%mm %d')
#cd /var/log/qpsmtpd
#cat \@* current >/opt/mailstats/logs/current1 2>/dev/null
#cd /var/log/sqpsmtpd
#cat \@* current >/opt/mailstats/logs/current2 2>/dev/null
cd /opt/mailstats/logs
#cat current1 current2 2>/dev/null | /usr/local/bin/tai64nlocal | grep "$yesterday_date" > current1.log
python3 /usr/bin/mailstats-convert-log-sme10-to-sme11.py
yesterday_date=$(date -d "yesterday" +'%b %d')
cat output_log.txt | grep "$yesterday_date" | sort >current.log
ls -l
python3 /usr/bin/mailstats.py
echo "Done"

View File

@@ -77,7 +77,7 @@
<div class=dbwanted> <div class=dbwanted>
<!--
<h2 class='subh2'><%=l('mst_Details_for_connection_to_database')%></h2> <h2 class='subh2'><%=l('mst_Details_for_connection_to_database')%></h2>
<p><span class=label> <p><span class=label>
@@ -108,7 +108,7 @@
% param 'DBPassword' => $mst_data->{DBPassword} unless param 'DBPassword'; % param 'DBPassword' => $mst_data->{DBPassword} unless param 'DBPassword';
%=password_field 'DBPassword', class => 'pass13 sme-password', autocomplete => 'off' %=password_field 'DBPassword', class => 'pass13 sme-password', autocomplete => 'off'
</span></p> </span></p>
-->
</div> </div>

View File

@@ -6,13 +6,17 @@ Summary: Daily mail statistics for SME Server
%define name smeserver-mailstats %define name smeserver-mailstats
Name: %{name} Name: %{name}
%define version 11.1 %define version 11.1
%define release 4 %define release 10
%define full_version %{version}.%{release})
Version: %{version} Version: %{version}
Release: %{release}%{?dist} Release: %{release}%{?dist}
License: GPL License: GPL
Group: SME/addon Group: SME/addon
Source: %{name}-%{version}.tgz Source: %{name}-%{version}.tgz
%global _binaries_in_noarch_packages_terminate_build 0
%global debug_package %{nil}
BuildRoot: /var/tmp/%{name}-%{version}-%{release}-buildroot BuildRoot: /var/tmp/%{name}-%{version}-%{release}-buildroot
BuildArchitectures: noarch BuildArchitectures: noarch
Requires: smeserver-release => 9.0 Requires: smeserver-release => 9.0
@@ -25,16 +29,91 @@ Requires: python36
# So install as: dnf install smeserver-mailstats --enablerepo=epel,smecontribs # So install as: dnf install smeserver-mailstats --enablerepo=epel,smecontribs
Requires: html2text Requires: html2text
Requires: python3-chameleon Requires: python3-chameleon
Requires: python3-mysql Requires: python3-mysql
Requires: python3-matplotlib Requires: python3-matplotlib
Requires: python3-pip Requires: python3-pip
Requires: systemd-libs
AutoReqProv: no AutoReqProv: no
%description %description
A script that via cron.d e-mails mail statistics to admin on a daily basis. A script that via cron.d e-mails mail statistics to admin on a daily basis.
See http://www.contribs.org/bugzilla/show_bug.cgi?id=819 See https://wiki.koozali.org/mailstats
%prep
%setup
%build
perl createlinks
%install
/bin/rm -rf $RPM_BUILD_ROOT
(cd root ; /usr/bin/find . -depth -print | /bin/cpio -dump $RPM_BUILD_ROOT)
chmod +x $RPM_BUILD_ROOT/usr/bin/runmailstats.sh
now=$(date +"%Y-%m-%d %H:%M:%S")
# Replace placeholders in the Python program using sed
perl -pi -e 'if (!$done && s/^Mailstats_version *=.*/Mailstats_version = '\''%{full_version}'\'/') { $done = 1 }' $RPM_BUILD_ROOT/usr/bin/mailstats.py
perl -pi -e 'if (!$done && s/^build_date_time *=.*/build_date_time = "'"$now"'"/) { $done = 1 }' $RPM_BUILD_ROOT/usr/bin/mailstats.py
/bin/rm -f %{name}-%{version}-filelist
/sbin/e-smith/genfilelist --file '/etc/mailstats/db.php' 'attr(0640, root, apache)' $RPM_BUILD_ROOT | grep -v "\.pyc" | grep -v "\.pyo" > %{name}-%{version}-filelist
install -Dpm 0755 journalwrap %{buildroot}%{_bindir}/journalwrap
%pre
/usr/bin/pip3 install -q pymysql
/usr/bin/pip3 install -q numpy
/usr/bin/pip3 install -q pandas
%clean
/bin/rm -rf $RPM_BUILD_ROOT
%files -f %{name}-%{version}-filelist
%defattr(-,root,root)
#%attr(0640, root, apache) %config(noreplace) /etc/mailstats/db.php
%{_bindir}/journalwrap
#%{_libdir}/libjournalwrap.so
%post
/sbin/ldconfig
#Remove www from systemd-journal group as is potential security risk
gpasswd -d www systemd-journal
# and set setuid bit for c wrapper called from log detail web page
chmod u+s /usr/bin/journalwrap
%postun
/sbin/ldconfig
%changelog %changelog
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-10.sme
- Fix version and build date from spec file [SME: 13121]
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-8.sme
- Remove www from systemd-journal group and setuid bit in journal wrapper [SME: 13121]
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-7.sme
- Truncate Geoip table and add other category [SME: 13121]
- Cope with blank data in action1 [SME: 13121]
* Thu Sep 04 2025 Brian Read <brianr@koozali.org> 11.1-6.sme
- Add favicon to mailstats table, summary and detailed pages [SME: 13121]
- Bring DB config reading for mailstats itself inline with php summary and detailed logs - using /etc/mailstats/db.php [SME: 13121]
- Remove DB config fields from the SM2 config panel {sme: 13121]
- Arrange for password to be generated and mailstats user to be set with limited permissions [SME: 13121]
* Tue Sep 02 2025 Brian Read <brianr@koozali.org> 11.1-5.sme
- Speed up Journal access [SME: 13121]
- Fix missing blacklist URL [SME: 13121]
- Add extra security to php show summary page [SME: 13121]
- Fix up CSS for Summary Page [SME: 13121]
- Get Detail logs page working and prettyfy [SME: 13121]
- Add in C wrapper source code to interrogate journal [SME: 13121]
- Get permission and ownership right for /etc/mailstats/db.php [SME: 13121]
- Refactor main table header into two tables side by side [SME: 13121]
* Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-4.sme * Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-4.sme
- More fixes for Journal bytes instead of characters [SME: 13117] - More fixes for Journal bytes instead of characters [SME: 13117]
@@ -117,34 +196,3 @@ See http://www.contribs.org/bugzilla/show_bug.cgi?id=819
* Sat May 26 2012 Brian J read <brianr@bjsystems.co.uk> 1.0-1.sme * Sat May 26 2012 Brian J read <brianr@bjsystems.co.uk> 1.0-1.sme
- Initial version - Initial version
%prep
%setup
%build
perl createlinks
%install
/bin/rm -rf $RPM_BUILD_ROOT
(cd root ; /usr/bin/find . -depth -print | /bin/cpio -dump $RPM_BUILD_ROOT)
chmod +x $RPM_BUILD_ROOT/usr/bin/runmailstats.sh
# Define the placeholder and generate the current date and time
now=$(date +"%Y-%m-%d %H:%M:%S")
# Replace the placeholder in the Python program located at %{BUILDROOT}/usr/bin
sed -i "s|__BUILD_DATE_TIME__|$now|" $RPM_BUILD_ROOT/usr/bin/mailstats.py
/bin/rm -f %{name}-%{version}-filelist
/sbin/e-smith/genfilelist $RPM_BUILD_ROOT | grep -v "\.pyc" | grep -v "\.pyo" > %{name}-%{version}-filelist
%pre
/usr/bin/pip3 install -q pymysql
/usr/bin/pip3 install -q numpy
/usr/bin/pip3 install -q pandas
/usr/bin/pip3 install -q plotly
%clean
/bin/rm -rf $RPM_BUILD_ROOT
%files -f %{name}-%{version}-filelist
%defattr(-,root,root)