16 Commits

Author SHA1 Message Date
72e7f2a5c5 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-10.sme
- Fix version and build date from spec file  [SME: 13121]
2025-09-13 08:56:14 +01:00
248bbed240 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-9.sme
- Fix version and build date from spec file  [SME: 13121]
2025-09-12 18:20:15 +01:00
a9dbafc584 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-8.sme
- Remove www from systemd-journal group and setuid bit in journal wrapper [SME: 13121]
2025-09-12 12:23:48 +01:00
55cb7a6f05 * Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-7.sme
- Truncate Geoip table and add other category [SME: 13121]
- Cope with blank data in action1 [SME: 13121]
2025-09-12 11:26:35 +01:00
1b757b1336 * Thu Sep 04 2025 Brian Read <brianr@koozali.org> 11.1-6.sme
- Add favicon to mailstats table, summary and detailed pages [SME: 13121]
- Bring DB config reading for mailstats itself inline with php summary and detailed logs - using /etc/mailstats/db.php [SME: 13121]
- Remove DB config fields from the SM2 config panel {sme: 13121]
- Arrange for password to be generated and mailstats user to be set with limited permissions [SME: 13121]
2025-09-08 15:24:18 +01:00
52b33e166a Sort out DB params access for mailstats, remove DB config from SM2 2025-09-07 09:18:39 +01:00
88bc38adf3 Add favicon to table, summ,ary and details webpages 2025-09-04 19:28:36 +01:00
b070554fdd Get journal api wrapper working for detailed logs 2025-09-04 13:17:44 +01:00
2dd3d234df Add in two tables on header, sort out permission and ownership of params file 2025-09-04 10:04:25 +01:00
d94bf8e033 Get detail logs page working - WIP 2025-09-03 11:00:00 +01:00
5deb31cd92 Extra security for php part of mailstats web 2025-09-02 11:23:48 +01:00
f86021b8c9 Fix missing blacklist URLs from report 2025-09-02 10:17:26 +01:00
a77cb094df Optimise journal access speeding up processing 2025-09-02 08:48:48 +01:00
d81543187f * Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-4.sme
- More fixes for Journal bytes instead of characters [SME: 13117]
2025-09-01 15:29:28 +01:00
76ca0f528c * Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-3.sme
- Sort out ASCII escape codes in return from journalctl API  [SME: 13117]
- Add in Status enabled t default for mailstats DB [SME: 13118]
2025-09-01 10:37:44 +01:00
1858edc41c First build with Koji 2025-08-30 11:08:40 +01:00
24 changed files with 1509 additions and 526 deletions

7
.gitignore vendored
View File

@@ -2,14 +2,7 @@
*.log
*spec-20*
*.tgz
current.*
*.xz
current1
current2
*.html
*.txt
accounts
configuration
domains
hosts
*el8*

BIN
additional/journalwrap Executable file

Binary file not shown.

View File

@@ -6,8 +6,9 @@ $event = 'smeserver-mailstats-update';
#see the /etc/systemd/system-preset/49-koozali.preset should be present for systemd integration on all you yum update event
foreach my $file (qw(
/etc/systemd/system-preset/49-koozali.preset
/etc/e-smith/sql/init/99smeserver-mailstats.sql
/etc/systemd/system-preset/49-koozali.preset
/etc/mailstats/db.php
/etc/e-smith/sql/init/99mailstats
/etc/httpd/conf/httpd.conf
))
{
@@ -20,7 +21,7 @@ event_link('systemd-reload', $event, '50');
#event_link('action', $event, '30');
#services we need to restart
safe_symlink('restart', "root/etc/e-smith/events/$event/services2adjust/httpd-e-smith");
safe_symlink("restart", "root/etc/e-smith/events/$event/services2adjust/mysql.init");;
#and Server Mmanager panel link
#panel_link('somefunction', 'manager');
templates2events("/etc/e-smith/sql/init/99smeserver-mailstats.sql", "post-upgrade");
#templates2events("/etc/e-smith/sql/init/99smeserver-mailstats.sql", "post-upgrade");

179
journalwrap.c Normal file
View File

@@ -0,0 +1,179 @@
#include <systemd/sd-journal.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <errno.h>
#include <time.h>
#ifndef MAX_OUTPUT_BYTES
#define MAX_OUTPUT_BYTES (2 * 1000 * 1000) // 2 MB
#endif
static int append_bytes(char **buf, size_t *len, size_t *cap, const char *src, size_t n) {
if (*len + n + 1 > *cap) {
size_t newcap = (*cap == 0) ? 8192 : *cap;
while (*len + n + 1 > newcap) {
newcap *= 2;
if (newcap > (size_t)(MAX_OUTPUT_BYTES + 65536)) {
newcap = (size_t)(MAX_OUTPUT_BYTES + 65536);
break;
}
}
char *nbuf = realloc(*buf, newcap);
if (!nbuf) return -1;
*buf = nbuf; *cap = newcap;
}
memcpy(*buf + *len, src, n);
*len += n;
(*buf)[*len] = '\0';
return 0;
}
static int append_cstr(char **buf, size_t *len, size_t *cap, const char *s) {
return append_bytes(buf, len, cap, s, strlen(s));
}
static size_t min_size(size_t a, size_t b) { return a < b ? a : b; }
static void sanitize_text(char *s, size_t n) {
for (size_t i = 0; i < n; i++) if (s[i] == '\0') s[i] = ' ';
}
static void format_ts(char *out, size_t outsz, uint64_t usec) {
time_t sec = (time_t)(usec / 1000000ULL);
struct tm tm;
localtime_r(&sec, &tm);
strftime(out, outsz, "%Y-%m-%d %H:%M:%S", &tm);
}
static const char* field_value(const void *data, size_t len, const char *key, size_t *vlen) {
size_t klen = strlen(key);
if (len < klen + 1) return NULL;
const char *p = (const char *)data;
if (memcmp(p, key, klen) != 0 || p[klen] != '=') return NULL;
*vlen = len - (klen + 1);
return p + klen + 1;
}
static int append_entry_line(sd_journal *j, char **buf, size_t *len, size_t *cap) {
uint64_t usec = 0;
(void)sd_journal_get_realtime_usec(j, &usec);
char ts[32];
format_ts(ts, sizeof(ts), usec);
const void *data = NULL;
size_t dlen = 0;
const char *message = NULL;
size_t mlen = 0;
int r = sd_journal_get_data(j, "MESSAGE", &data, &dlen);
if (r >= 0) message = field_value(data, dlen, "MESSAGE", &mlen);
const char *ident = NULL;
size_t ilen = 0;
r = sd_journal_get_data(j, "SYSLOG_IDENTIFIER", &data, &dlen);
if (r >= 0) {
ident = field_value(data, dlen, "SYSLOG_IDENTIFIER", &ilen);
} else if (sd_journal_get_data(j, "_COMM", &data, &dlen) >= 0) {
ident = field_value(data, dlen, "_COMM", &ilen);
}
if (append_cstr(buf, len, cap, "[") < 0) return -1;
if (append_cstr(buf, len, cap, ts) < 0) return -1;
if (append_cstr(buf, len, cap, "] ") < 0) return -1;
if (ident && ilen > 0) {
if (append_bytes(buf, len, cap, ident, ilen) < 0) return -1;
if (append_cstr(buf, len, cap, ": ") < 0) return -1;
}
if (message && mlen > 0) {
char *tmp = malloc(mlen);
if (!tmp) return -1;
memcpy(tmp, message, mlen);
sanitize_text(tmp, mlen);
size_t to_copy = min_size(mlen, (size_t)(MAX_OUTPUT_BYTES > *len ? MAX_OUTPUT_BYTES - *len : 0));
int ok = append_bytes(buf, len, cap, tmp, to_copy);
free(tmp);
if (ok < 0) return -1;
} else {
const char *keys[] = {"PRIORITY","SYSLOG_IDENTIFIER","_COMM","_EXE","_CMDLINE","MESSAGE"};
for (size_t i = 0; i < sizeof(keys)/sizeof(keys[0]); i++) {
if (sd_journal_get_data(j, keys[i], &data, &dlen) < 0) continue;
if (append_cstr(buf, len, cap, (i == 0 ? "" : " ")) < 0) return -1;
if (append_bytes(buf, len, cap, (const char*)data, min_size(dlen, (size_t)(MAX_OUTPUT_BYTES - *len))) < 0) return -1;
}
}
if (*len < MAX_OUTPUT_BYTES) {
if (append_cstr(buf, len, cap, "\n") < 0) return -1;
}
return 0;
}
static char* journal_get_by_pid_impl(int pid) {
if (pid <= 0) { char *z = malloc(1); if (z) z[0] = '\0'; return z; }
sd_journal *j = NULL;
if (sd_journal_open(&j, SD_JOURNAL_LOCAL_ONLY) < 0) {
char *z = malloc(1); if (z) z[0] = '\0'; return z;
}
char match[64];
snprintf(match, sizeof(match), "_PID=%d", pid);
if (sd_journal_add_match(j, match, 0) < 0) {
sd_journal_close(j);
char *z = malloc(1); if (z) z[0] = '\0'; return z;
}
sd_journal_seek_head(j);
char *buf = NULL; size_t len = 0, cap = 0;
int r;
while ((r = sd_journal_next(j)) > 0) {
if (len >= MAX_OUTPUT_BYTES) break;
if (append_entry_line(j, &buf, &len, &cap) < 0) {
free(buf); sd_journal_close(j); return NULL;
}
}
if (len >= MAX_OUTPUT_BYTES) {
const char *trunc = "[output truncated]\n";
(void)append_bytes(&buf, &len, &cap, trunc, strlen(trunc));
}
if (!buf) { buf = malloc(1); if (!buf) { sd_journal_close(j); return NULL; } buf[0] = '\0'; }
sd_journal_close(j);
return buf;
}
#ifdef __GNUC__
__attribute__((visibility("default")))
#endif
char* journal_get_by_pid(int pid) { return journal_get_by_pid_impl(pid); }
#ifdef __GNUC__
__attribute__((visibility("default")))
#endif
void journal_free(char* p) { free(p); }
#ifdef BUILD_CLI
static int parse_pid(const char *s, int *out) {
if (!s || !*s) return -1;
char *end = NULL;
errno = 0;
long v = strtol(s, &end, 10);
if (errno != 0 || end == s || *end != '\0' || v <= 0 || v > 0x7fffffffL) return -1;
*out = (int)v; return 0;
}
int main(int argc, char **argv) {
if (argc != 2) { fprintf(stderr, "Usage: %s <pid>\n", argv[0]); return 2; }
int pid = 0;
if (parse_pid(argv[1], &pid) != 0) { fprintf(stderr, "Invalid pid\n"); return 2; }
char *out = journal_get_by_pid_impl(pid);
if (!out) { fprintf(stderr, "Out of memory or error\n"); return 1; }
fputs(out, stdout);
free(out);
return 0;
}
#endif

View File

@@ -0,0 +1 @@
enabled

View File

@@ -0,0 +1 @@
report

View File

@@ -0,0 +1,16 @@
{
use MIME::Base64 qw(encode_base64);
my $rec = $DB->get('mailstats') || $DB->new_record('mailstats', {type => 'report'});
my $pw = $rec->prop('DBPass');
return "" if $pw;
my $length = shift || 16;
my @chars = ('A'..'Z', 'a'..'z', 0..9, qw(! @ $ % ^ & * ? _ - + =));
$pw = '';
$pw .= $chars[rand @chars] for 1..$length;
$rec->set_prop('DBPass', $pw);
return ""
}

View File

@@ -0,0 +1,24 @@
{
my $db = $mailstats{DBName} || 'mailstats';
my $user = $mailstats{DBUser} || 'mailstats_rw';
my $pass = $mailstats{DBPass} || 'changeme';
$OUT .= <<END
#! /bin/sh
if [ -d /var/lib/mysql/mailstats ]; then
exit
fi
/usr/bin/mariadb <<EOF
CREATE DATABASE $db DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci;
USE $db;
CREATE TABLE IF NOT EXISTS SummaryLogs (
id INT AUTO_INCREMENT PRIMARY KEY,
Date DATE,
Hour INT,
logData TEXT
);
CREATE USER $user@localhost IDENTIFIED BY '$pass';
GRANT SELECT, INSERT, UPDATE, DELETE ON $db.* TO $user@localhost;
FLUSH PRIVILEGES;
EOF
END
}

View File

@@ -1,97 +0,0 @@
CREATE DATABASE IF NOT EXISTS `mailstats`;
USE `mailstats`;
CREATE TABLE IF NOT EXISTS `ColumnStats` (
`ColumnStatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`timeid` int(11) NOT NULL default '0',
`descr` varchar(20) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`ColumnStatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `JunkMailStats` (
`JunkMailstatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`user` varchar(12) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) default NULL,
PRIMARY KEY (`JunkMailstatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `SARules` (
`SARulesid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`rule` varchar(50) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`totalhits` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`SARulesid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `SAscores` (
`SAscoresid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`acceptedcount` bigint(20) NOT NULL default '0',
`rejectedcount` bigint(20) NOT NULL default '0',
`hamcount` bigint(20) NOT NULL default '0',
`acceptedscore` decimal(20,2) NOT NULL default '0.00',
`rejectedscore` decimal(20,2) NOT NULL default '0.00',
`hamscore` decimal(20,2) NOT NULL default '0.00',
`totalsmtp` bigint(20) NOT NULL default '0',
`totalrecip` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`SAscoresid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `VirusStats` (
`VirusStatsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`descr` varchar(40) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`VirusStatsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `date` (
`dateid` int(11) NOT NULL auto_increment,
`date` date NOT NULL default '0000-00-00',
PRIMARY KEY (`dateid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `domains` (
`domainsid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`domain` varchar(40) NOT NULL default '',
`type` varchar(10) NOT NULL default '',
`total` bigint(20) NOT NULL default '0',
`denied` bigint(20) NOT NULL default '0',
`xfererr` bigint(20) NOT NULL default '0',
`accept` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`domainsid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `qpsmtpdcodes` (
`qpsmtpdcodesid` int(11) NOT NULL auto_increment,
`dateid` int(11) NOT NULL default '0',
`reason` varchar(40) NOT NULL default '',
`count` bigint(20) NOT NULL default '0',
`servername` varchar(30) NOT NULL default '',
PRIMARY KEY (`qpsmtpdcodesid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE IF NOT EXISTS `time` (
`timeid` int(11) NOT NULL auto_increment,
`time` time NOT NULL default '00:00:00',
PRIMARY KEY (`timeid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE USER 'mailstats'@'localhost' IDENTIFIED BY 'mailstats';
GRANT ALL PRIVILEGES ON mailstats.* TO 'mailstats'@'localhost';
FLUSH PRIVILEGES;

View File

@@ -1,6 +1,6 @@
{
# mailstats
my $status = $mailstats{'Status'} || 'disabled';
my $status = $mailstats{'status'} || 'disabled';
if ($status eq 'enabled')
{

View File

@@ -0,0 +1,24 @@
{
# Load SME::ConfigDB to read values from DB
my $cdb = esmith::ConfigDB->open() || die "Cannot open configuration DB\n";
# Get the fragment (report database definition)
my $report = $cdb->get('mailstats');
my $dbhost = $report->prop('DBHost') || 'localhost';
my $dbport = $report->prop('DBPort') || '3306';
my $dbuser = $report->prop('DBUser') || 'mailstats_rw';
# Assume password is stored in a property 'DBPass'
my $dbpass = $report->prop('DBPass') || 'changeme';
my $dbname = $report->key || 'mailstats';
$OUT = <<"END";
<?php
return [
'host' => '$dbhost',
'user' => '$dbuser',
'pass' => '$dbpass',
'name' => '$dbname',
];
END
}

View File

@@ -0,0 +1,7 @@
<?php
return [
'host' => 'localhost',
'user' => 'mailstats', //Should be mailstat-ro
'pass' => 'mailstats', //Will be randon strong password
'name' => 'mailstats',
];

View File

@@ -207,4 +207,141 @@ p.cssvalid,p.htmlvalid {float:left;margin-right:20px}
.maindiv {width:100%;overflow-x:auto;font-size:1cqw}
.traffictable {border-collapse:collapse;width:98%}
.divseeinbrowser{text-align:center;}
.bordercollapse{border-collapse:collapse;}
.bordercollapse{border-collapse:collapse;}
/* ==============================================
Summary Logs Section (scoped under .mailstats-summary)
============================================== */
.mailstats-summary .summary-container {
width: 100%;
overflow-x: auto;
font-size: 0.85vw;
}
/* Table styling */
.mailstats-summary .summary-table {
border-collapse: collapse;
width: 98%;
font-size: inherit;
}
.mailstats-summary .summary-table th {
text-align: left;
padding: 0.5em;
border-bottom: 2px solid #ddd;
background-color: #f8f8f8;
}
.mailstats-summary .summary-table td {
padding: 0.5em;
border-bottom: 1px solid #ddd;
word-break: break-word; /* Allows breaking long words at arbitrary points */
overflow-wrap: break-word; /* Modern standard for breaking long words */
hyphens: auto; /* Optionally adds hyphenation if supported */
}
/* Zebra striping */
.mailstats-summary .summary-table tbody tr:nth-child(even) {
background-color: #fafafa;
}
/* Pagination */
.mailstats-summary .pagination {
margin-top: 1em;
}
.mailstats-summary .pagination a {
text-decoration: none;
color: #0066cc;
padding: 0.3em 0.6em;
}
.mailstats-summary .pagination a:hover {
text-decoration: underline;
}
.mailstats-summary table.stripes {
border-collapse: collapse;
width: 95%;
overflow-x: auto;
margin: 0.6% auto;
}
/* Optional zebra striping */
.mailstats-summary table.stripes tbody tr:nth-child(even) {
background-color: #fafafa;
}
/* ==============================================
Log Detail Page (scoped under .mailstats-detail)
============================================== */
.mailstats-detail .detail-container {
width: 100%;
max-width: 1200px;
margin: 1em auto;
padding: 0 1em;
}
/* Preformatted log box */
.mailstats-detail .log {
white-space: pre-wrap;
word-wrap: break-word;
background: #111;
color: #eee;
padding: 1em;
border-radius: 6px;
font-family: monospace, monospace;
font-size: 0.75em;
line-height: 1.4;
overflow-x: auto;
}
/* Back link styling */
.mailstats-detail a {
color: #0066cc;
text-decoration: none;
}
.mailstats-detail a:hover {
text-decoration: underline;
}
/* ==============================================
Status header at top of table (scoped under emailstatus)
============================================== */
.emailstatus-wrapper {
font-family: Arial, sans-serif;
padding: 20px;
}
.emailstatus-header {
text-align: center;
margin-bottom: 20px;
}
.emailstatus-tablecontainer {
display: flex;
gap: 20px;
flex-wrap: wrap;
}
.emailstatus-table {
border-collapse: collapse;
min-width: 300px;
flex: 1 1 45%;
}
.emailstatus-table th {
background-color: #a9a9a9;
color: black;
text-align: left;
padding: 8px;
}
.emailstatus-table td {
padding: 8px;
border: 1px solid #ddd;
}
.emailstatus-table tr:nth-child(even) {
background-color: #f9f9f9;
}
@media (max-width: 768px) {
.emailstatus-tablecontainer {
flex-direction: column;
}
}

View File

@@ -1,51 +1,244 @@
<?php
header('Content-Type: text/plain');
// Security headers
header('Content-Type: text/html; charset=UTF-8');
header("Content-Security-Policy: default-src 'self'; script-src 'none'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; base-uri 'none'; object-src 'none'; frame-ancestors 'none'");
header('X-Content-Type-Options: nosniff');
header('Referrer-Policy: no-referrer');
header('Permissions-Policy: geolocation=(), microphone=(), camera=()');
header('Cache-Control: no-store, no-cache, must-revalidate, max-age=0');
header('Pragma: no-cache');
if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off') {
header('Strict-Transport-Security: max-age=31536000; includeSubDomains');
}
$input_param = isset($_GET['id']) ? $_GET['id'] : '9999';
function e($s) {
return htmlspecialchars((string)$s, ENT_QUOTES | ENT_SUBSTITUTE, 'UTF-8');
}
// Set the directory and file names
$directory = "/opt/mailstats/logs";
$files = ['current1', 'current2'];
// Configuration: env first, then fallback to optional file
$servername = getenv('MAILSTATS_DB_HOST') ?: 'localhost';
$username = getenv('MAILSTATS_DB_USER') ?: '';
$password = getenv('MAILSTATS_DB_PASS') ?: '';
$dbname = getenv('MAILSTATS_DB_NAME') ?: '';
function process_file($file_path, $input_param) {
$file = fopen($file_path, 'r');
$match = "/ $input_param /";
$endmatch = "/cleaning up after $input_param/";
while (($line = fgets($file)) !== false) {
// Check if the line contains the input_parameter
if (preg_match($match,$line) === 1) {
echo $line;
} elseif (preg_match($endmatch,$line) === 1) {
echo $line;
exit();
if ($username === '' || $password === '' || $dbname === '') {
$cfgPath = '/etc/mailstats/db.php'; // optional fallback config file
if (is_readable($cfgPath)) {
ob_start();
$cfg = include $cfgPath;
ob_end_clean();
$servername = $cfg['host'] ?? $servername;
$username = $cfg['user'] ?? $username;
$password = $cfg['pass'] ?? $password;
$dbname = $cfg['name'] ?? $dbname;
}
}
if ($username === '' || $password === '' || $dbname === '') {
error_log('DB credentials missing (env and config file).');
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Input validation: id
$id = isset($_GET['id']) ? filter_var($_GET['id'], FILTER_VALIDATE_INT) : null;
if ($id === false || $id === null || $id < 1) {
http_response_code(400);
exit('Invalid id');
}
// DB connect with exceptions
mysqli_report(MYSQLI_REPORT_ERROR | MYSQLI_REPORT_STRICT);
try {
$conn = new mysqli($servername, $username, $password, $dbname);
$conn->set_charset('utf8mb4');
} catch (mysqli_sql_exception $e) {
error_log('DB connect failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Fetch the record and extract PID from JSON logData
try {
$stmt = $conn->prepare('SELECT id, logData FROM SummaryLogs WHERE id = ?');
$stmt->bind_param('i', $id);
$stmt->execute();
$res = $stmt->get_result();
$row = $res->fetch_assoc();
$stmt->close();
} catch (mysqli_sql_exception $e) {
error_log('Query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
if (!$row) {
http_response_code(404);
exit('Record not found');
}
$logData = $row['logData'];
$pid = null;
$data = json_decode($logData, true, 512, JSON_INVALID_UTF8_SUBSTITUTE);
if (is_array($data)) {
foreach (['id','pid', 'PID', 'Pid', 'process_id', 'ProcessId'] as $k) {
if (isset($data[$k]) && (is_int($data[$k]) || ctype_digit((string)$data[$k]))) {
$pid = (int)$data[$k];
break;
}
}
fclose($file);
}
function tai64nToDate($tai64n) {
// Check if the input TAI64N string is valid
if (preg_match('/^@([0-9a-f]{8})([0-9a-f]{8})$/', $tai64n, $matches)) {
// First part: seconds since epoch
$sec_hex = $matches[1];
// Second part: nanoseconds in hex
$nsec_hex = $matches[2];
if (!$pid || $pid < 1) {
http_response_code(422);
exit('PID not found in this record');
}
// Convert hex to decimal
$seconds = hexdec($sec_hex);
$nanoseconds = hexdec($nsec_hex);
// Calculate the full timestamp in seconds
$timestamp = $seconds + ($nanoseconds / 1e9); // Nanoseconds to seconds
// Format timestamp to 'Y-m-d H:i:s'
return date('Y-m-d H:i:s', $timestamp);
} else {
throw new InvalidArgumentException("Invalid TAI64N format.");
// Journal retrieval using C wrapper
define('FFI_LIB', 'libjournalwrap.so'); // adjust if needed
define('WRAPPER_BIN', '/usr/bin/journalwrap'); // fallback executable path
define('MAX_OUTPUT_BYTES', 2_000_000); // 2MB safety cap
function getJournalByPidViaFFI(int $pid): ?string {
if (!extension_loaded('FFI')) {
return null;
}
try {
// Adjust the function signatures to match your wrapper
$ffi = FFI::cdef("
char* journal_get_by_pid(int pid);
void journal_free(char* p);
", FFI_LIB);
$cstr = $ffi->journal_get_by_pid($pid);
if ($cstr === null) {
return '';
}
$out = FFI::string($cstr);
$ffi->journal_free($cstr);
return $out;
} catch (Throwable $e) {
error_log('FFI journal wrapper failed: ' . $e->getMessage());
return null;
}
}
chdir($directory);
foreach ($files as $file) {
process_file($file, $input_param);
function getJournalByPidViaExec(int $pid): ?string {
// Fallback to an external wrapper binary (must be safe and not use shell)
$cmd = WRAPPER_BIN . ' ' . (string)$pid;
$descriptorspec = [
0 => ['pipe', 'r'],
1 => ['pipe', 'w'],
2 => ['pipe', 'w'],
];
$pipes = [];
$proc = proc_open($cmd, $descriptorspec, $pipes, null, null, ['bypass_shell' => true]);
if (!\is_resource($proc)) {
error_log('Failed to start journal wrapper binary');
return null;
}
fclose($pipes[0]); // no stdin
stream_set_blocking($pipes[1], false);
stream_set_blocking($pipes[2], false);
$stdout = '';
$stderr = '';
$start = microtime(true);
$timeout = 10.0; // seconds
$readChunk = 65536;
while (true) {
$status = proc_get_status($proc);
$running = $status['running'];
$read = [$pipes[1], $pipes[2]];
$write = null;
$except = null;
$tv_sec = 0;
$tv_usec = 300000; // 300ms
stream_select($read, $write, $except, $tv_sec, $tv_usec);
foreach ($read as $r) {
if ($r === $pipes[1]) {
$chunk = fread($pipes[1], $readChunk);
if ($chunk !== false && $chunk !== '') {
$stdout .= $chunk;
}
} elseif ($r === $pipes[2]) {
$chunk = fread($pipes[2], $readChunk);
if ($chunk !== false && $chunk !== '') {
$stderr .= $chunk;
}
}
}
if (!$running) {
break;
}
if ((microtime(true) - $start) > $timeout) {
proc_terminate($proc);
$stderr .= "\n[terminated due to timeout]";
break;
}
if (strlen($stdout) + strlen($stderr) > MAX_OUTPUT_BYTES) {
proc_terminate($proc);
$stderr .= "\n[terminated due to output size limit]";
break;
}
}
foreach ($pipes as $p) {
if (is_resource($p)) {
fclose($p);
}
}
$exitCode = proc_close($proc);
if ($exitCode !== 0 && $stderr !== '') {
error_log('journal wrapper stderr: ' . $stderr);
}
return $stdout;
}
$logs = getJournalByPidViaFFI($pid);
if ($logs === null) {
$logs = getJournalByPidViaExec($pid);
}
if ($logs === null) {
http_response_code(500);
exit('Unable to read journal for this PID');
}
// Safety cap to avoid rendering gigantic outputs
if (strlen($logs) > MAX_OUTPUT_BYTES) {
$logs = substr($logs, 0, MAX_OUTPUT_BYTES) . "\n[output truncated]";
}
// Done with DB
$conn->close();
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Log details for PID <?= e($pid) ?> (record <?= e($id) ?>)</title>
<link rel="stylesheet" type="text/css" href="css/mailstats.css" />
<link rel="icon" type="image/x-icon" href="favicon.ico">
</head>
<body>
<div class="mailstats-detail">
<div class="detail-container">
<h1>Log details for PID <?= e($pid) ?> (record <?= e($id) ?>)</h1>
<p><a href="javascript:history.back()">Back</a></p>
<pre class="log"><?= e($logs) ?></pre>
</div>
</div>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -1,102 +1,192 @@
<?php
// Database configuration
$servername = "localhost";
$username = "mailstats";
$password = "mailstats";
$dbname = "mailstats";
// Default date to yesterday
$date = isset($_GET['date']) ? $_GET['date'] : date('Y-m-d', strtotime('-1 day'));
// Default hour to 99 (means all the hours)
$hour = isset($_GET['hour']) ? $_GET['hour'] : 99;
// Create connection
$conn = new mysqli($servername, $username, $password, $dbname);
// Check connection
if ($conn->connect_error) {
die("Connection failed: " . $conn->connect_error);
// Set security headers (must be sent before output)
header('Content-Type: text/html; charset=UTF-8');
header("Content-Security-Policy: default-src 'self'; script-src 'none'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; base-uri 'none'; object-src 'none'; frame-ancestors 'none'");
header('X-Content-Type-Options: nosniff');
header('Referrer-Policy: no-referrer');
header('Permissions-Policy: geolocation=(), microphone=(), camera=()');
header('Cache-Control: no-store, no-cache, must-revalidate, max-age=0');
header('Pragma: no-cache');
if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off') {
header('Strict-Transport-Security: max-age=31536000; includeSubDomains');
}
// Prepare and execute the query
if ($hour == 99){
$sql = "SELECT * FROM SummaryLogs WHERE Date = ?";
$stmt = $conn->prepare($sql);
$stmt->bind_param("s", $date);
} else {
$sql = "SELECT * FROM SummaryLogs WHERE Date = ? AND Hour = ?";
$stmt = $conn->prepare($sql);
$stmt->bind_param("si", $date, $hour);
// Helper for safe HTML encoding
function e($s) {
return htmlspecialchars((string)$s, ENT_QUOTES | ENT_SUBSTITUTE, 'UTF-8');
}
// Configuration: read DB credentials from environment
$servername = getenv('MAILSTATS_DB_HOST') ?: '';
$username = getenv('MAILSTATS_DB_USER') ?: '';
$password = getenv('MAILSTATS_DB_PASS') ?: '';
$dbname = getenv('MAILSTATS_DB_NAME') ?: '';
// Otherwise try config in /etc/mailstats
if ($username === '' || $password === '' || $dbname === '') {
$cfgPath = '/etc/mailstats/db.php';
if (is_readable($cfgPath)) {
ob_start();
$cfg = include $cfgPath;
ob_end_clean();
$servername = $cfg['host'] ?? $servername ?: 'localhost';
$username = $cfg['user'] ?? $username;
$password = $cfg['pass'] ?? $password;
$dbname = $cfg['name'] ?? $dbname;
}
}
// Fail fast if credentials are not provided via environment
if ($username === '' || $password === '' || $dbname === '') {
error_log('Configuration error: DB credentials not set via environment.');
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Robust input handling
$defaultDate = date('Y-m-d', strtotime('-1 day'));
$date = isset($_GET['date']) ? $_GET['date'] : $defaultDate;
if (!preg_match('/^\d{4}-\d{2}-\d{2}$/', $date)) {
http_response_code(400);
exit('Invalid date');
}
// hour: allow 023 or special 99 meaning “all hours”
$hour = isset($_GET['hour']) ? filter_var($_GET['hour'], FILTER_VALIDATE_INT) : 99;
if ($hour === false || ($hour !== 99 && ($hour < 0 || $hour > 23))) {
http_response_code(400);
exit('Invalid hour');
}
// Pagination
$page = isset($_GET['page']) ? filter_var($_GET['page'], FILTER_VALIDATE_INT) : 1;
if ($page === false || $page < 1) { $page = 1; }
$pageSize = isset($_GET['page_size']) ? filter_var($_GET['page_size'], FILTER_VALIDATE_INT) : 50;
if ($pageSize === false) { $pageSize = 50; }
// Bound page size to prevent huge result sets
if ($pageSize < 1) { $pageSize = 1; }
if ($pageSize > 100) { $pageSize = 100; }
$limit = $pageSize;
$offset = ($page - 1) * $pageSize;
// Use mysqli with exceptions and UTF-8
mysqli_report(MYSQLI_REPORT_ERROR | MYSQLI_REPORT_STRICT);
try {
$conn = new mysqli($servername, $username, $password, $dbname);
$conn->set_charset('utf8mb4');
} catch (mysqli_sql_exception $e) {
error_log('DB connect failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Build WHERE clause and bind parameters safely
$where = 'Date = ?';
$bindTypesCount = 's';
$bindValuesCount = [$date];
if ($hour !== 99) {
$where .= ' AND Hour = ?';
$bindTypesCount .= 'i';
$bindValuesCount[] = $hour;
}
// Count query for total rows (for display/pagination info)
try {
$sqlCount = "SELECT COUNT(*) AS total FROM SummaryLogs WHERE $where";
$stmtCount = $conn->prepare($sqlCount);
$stmtCount->bind_param($bindTypesCount, ...$bindValuesCount);
$stmtCount->execute();
$resultCount = $stmtCount->get_result();
$rowCount = $resultCount->fetch_assoc();
$totalRows = (int)$rowCount['total'];
$stmtCount->close();
} catch (mysqli_sql_exception $e) {
error_log('Count query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
// Data query with ORDER and LIMIT/OFFSET
try {
$sql = "SELECT id, logData FROM SummaryLogs WHERE $where ORDER BY id DESC LIMIT ? OFFSET ?";
// Bind types: existing where types + limit (i) + offset (i)
$bindTypesData = $bindTypesCount . 'ii';
$bindValuesData = $bindValuesCount;
$bindValuesData[] = $limit;
$bindValuesData[] = $offset;
$stmt = $conn->prepare($sql);
$stmt->bind_param($bindTypesData, ...$bindValuesData);
$stmt->execute();
$result = $stmt->get_result();
} catch (mysqli_sql_exception $e) {
error_log('Data query failed: ' . $e->getMessage());
http_response_code(500);
exit('Service temporarily unavailable.');
}
$stmt->execute();
$result = $stmt->get_result();
$result_count = $result->num_rows;
function generateLogDataTable($logData) {
$data = json_decode($logData, true);
if (is_null($data)) {
return "Invalid JSON data";
}
//// Remove entries with the key "logterse"
//if (isset($data['logterse'])) {
//unset($data['logterse']);
//}
// Defensive decode with substitution for invalid UTF-8
$data = json_decode($logData, true, 512, JSON_INVALID_UTF8_SUBSTITUTE);
// Remove entries with the key "logterse" and remove entries with empty values
if (!is_array($data)) {
return '<em>Invalid JSON data</em>';
}
// Remove entries with key 'logterse' and entries with empty values
foreach ($data as $key => $value) {
if ($key === 'logterse' || empty($value)) {
if ($key === 'logterse' || $value === '' || $value === null) {
unset($data[$key]);
}
}
// Handle adjacent duplicates by merging keys
// Merge adjacent duplicates by value
$mergedData = [];
$previousValue = null;
foreach ($data as $key => $value) {
if ($value === $previousValue) {
// Merge the current key with the previous key
// Normalize non-scalar values for display
if (is_array($value) || is_object($value)) {
$value = json_encode($value, JSON_UNESCAPED_UNICODE | JSON_UNESCAPED_SLASHES);
}
$valueStr = (string)$value;
if ($valueStr === $previousValue) {
end($mergedData);
$lastKey = key($mergedData);
$newKey = "$lastKey/$key";
$mergedData[$newKey] = $value;
// Remove the old entry
$newKey = $lastKey . '/' . $key;
$mergedData[$newKey] = $valueStr;
unset($mergedData[$lastKey]);
} else {
// Otherwise, add a new entry
$mergedData[$key] = $value;
$mergedData[$key] = $valueStr;
}
$previousValue = $valueStr;
}
// Optional truncation to keep rendering safe
$maxValueLen = 500;
foreach ($mergedData as $k => $v) {
if (mb_strlen($v, 'UTF-8') > $maxValueLen) {
$mergedData[$k] = mb_substr($v, 0, $maxValueLen, 'UTF-8') . '…';
}
$previousValue = $value;
}
$keys = array_keys($mergedData);
$values = array_values($mergedData);
$output = '<table class="stripes" style="border-collapse: collapse; width:95%;overflow-x:auto; margin: 0.6% auto 0.6% auto;"><tbody>';
#$output = '<table class="stripes" style="border-collapse: collapse; width:95%;overflow-x:auto; margin:2%"><tbody>';
$output = '<table class="mailstats-summary stripes"><tbody>';
// Divide keys and values into sets of 6
$chunks = array_chunk($keys, 6);
foreach ($chunks as $chunkIndex => $chunk) {
if ($chunkIndex > 0) {
// Add spacing between different sets
#$output .= '<tr><td colspan="6" style="height: 1em;"></td></tr>';
}
$output .= '<tr>';
foreach ($chunk as $key) {
$output .= '<th>' . htmlspecialchars($key) . '</th>';
$output .= '<th>' . e($key) . '</th>';
}
$output .= '</tr><tr>';
foreach ($chunk as $i => $key) {
$val = htmlspecialchars($values[$chunkIndex * 6+ $i]);
if ($key == 'id'){
$output .= '<td>' . "<a href='./ShowDetailedLogs.php?id=".$val."'</a>".$val."</td>";
} else {
$output .= '<td>' . $val . '</td>';
}
$val = $values[$chunkIndex * 6 + $i];
$output .= '<td>' . e($val) . '</td>';
}
$output .= '</tr>';
}
@@ -106,61 +196,89 @@ function generateLogDataTable($logData) {
}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<link rel='stylesheet' type='text/css' href='css/mailstats.css' />
<title>Summary Logs</title>
<!-- <style>
table {
xxwidth: 100%;
xxborder-collapse: collapse;
}
table, th, td {
xxborder: 1px solid black;
}
th, td {
xxpadding: 8px;
xxtext-align: left;
}
</style>
-->
<link rel="stylesheet" type="text/css" href="css/mailstats.css" />
<link rel="icon" type="image/x-icon" href="favicon.ico">
</head>
<body>
<div style="width:100%;overflow-x:auto;font-size:0.726cqw">"
<h1>Summary Logs for Date: <?= htmlspecialchars($date) ?> <?= $hour == 99 ? 'for All Hours' : 'and Hour: ' . htmlspecialchars($hour) ?></h1>
<h3>Found <?= $result_count ?> records.</h3>
<table style="border-collapse:collapse;width:98%">
<thead>
<tr>
<th>Id</th>
<!--<th>Date</th>-->
<!--<th>Hour</th>-->
<th>Log Data</th>
</tr>
</thead>
<tbody>
<?php if ($result->num_rows > 0): ?>
<?php while($row = $result->fetch_assoc()): ?>
<div class="mailstats-summary">
<div class="summary-container">
<h1>
Summary Logs for Date: <?= e($date) ?>
<?= $hour === 99 ? ' (All Hours)' : ' at Hour: ' . e($hour) ?>
</h1>
<?php
$startRow = $totalRows > 0 ? ($offset + 1) : 0;
$endRow = min($offset + $limit, $totalRows);
?>
<h3>Found <?= e($totalRows) ?> records. Showing <?= e($startRow) ?><?= e($endRow) ?>.</h3>
<table class="summary-table">
<thead>
<tr>
<td><?= htmlspecialchars($row['id']) ?></td>
<td><?= generateLogDataTable($row['logData']) ?></td>
<th>Id</th>
<th>Details</th>
<th>Log Data</th>
</tr>
<?php endwhile; ?>
<?php else: ?>
<tr>
<td colspan="4">No records found for the specified date and hour.</td>
</tr>
<?php endif; ?>
</tbody>
</table>
</thead>
<tbody>
<?php if ($result && $result->num_rows > 0): ?>
<?php while ($row = $result->fetch_assoc()): ?>
<?php
$id = (int)$row['id'];
$detailUrl = './ShowDetailedLogs.php?id=' . rawurlencode((string)$id);
?>
<tr>
<td><?= e($id) ?></td>
<td><a href="<?= e($detailUrl) ?>">View details</a></td>
<td><?= generateLogDataTable($row['logData']) ?></td>
</tr>
<?php endwhile; ?>
<?php else: ?>
<tr>
<td colspan="3">No records found for the specified date and hour.</td>
</tr>
<?php endif; ?>
</tbody>
</table>
<?php
// Pagination
$baseParams = [
'date' => $date,
'hour' => $hour,
'page_size' => $pageSize
];
$prevPage = $page > 1 ? $page - 1 : null;
$nextPage = ($offset + $limit) < $totalRows ? $page + 1 : null;
?>
<div class="pagination">
<?php if ($prevPage !== null): ?>
<?php
$paramsPrev = $baseParams; $paramsPrev['page'] = $prevPage;
$urlPrev = '?' . http_build_query($paramsPrev, '', '&', PHP_QUERY_RFC3986);
?>
<a href="<?= e($urlPrev) ?>">&laquo; Previous</a>
<?php endif; ?>
<?php if ($nextPage !== null): ?>
<?php
$paramsNext = $baseParams; $paramsNext['page'] = $nextPage;
$urlNext = '?' . http_build_query($paramsNext, '', '&', PHP_QUERY_RFC3986);
?>
<?php if ($prevPage !== null): ?> | <?php endif; ?>
<a href="<?= e($urlNext) ?>">Next &raquo;</a>
<?php endif; ?>
</div>
</div>
</div>
<?php
// Close the connection
$stmt->close();
$conn->close();
if (isset($stmt) && $stmt instanceof mysqli_stmt) { $stmt->close(); }
if (isset($conn) && $conn instanceof mysqli) { $conn->close(); }
?>
</body>
</html>
</html>

View File

@@ -1,7 +1,7 @@
<div class="${classname}">
<h2>${title}</h2>
<tal:block condition="threshold != 0">
<span class='greyed-out'>Display threshold set to ${threshold}%</span>
<span class='greyed-out'>${threshold}</span>
</tal:block>
<tal:block condition="threshold == 0">
<br>

View File

@@ -3,6 +3,7 @@
<meta charset="utf-8">
<title>SMEServer Mailstats</title>
<link rel='stylesheet' type='text/css' href='css/mailstats.css' />
<link rel="icon" type="image/x-icon" href="favicon.ico">
<!-- Check links -->
<!--css here-->
</head>
@@ -16,9 +17,30 @@
<br />
<h2>${structure:title}</h2>
<br />
<div class="headerpanel">
<div class = "innerheaderpanel">
<!---Add in header information here -->
<div class="emailstatus-wrapper">
<h2 class="emailstatus-header">Email System Status</h2>
<div class="emailstatus-tablecontainer">
<!-- Table 1 -->
<table class="emailstatus-table">
<thead>
<tr>
<th colspan="2">Security & Filtering</th>
</tr>
</thead>
<tbody>
<!---Add in table1 information here -->
</tbody>
</table>
<table class="emailstatus-table">
<thead>
<tr>
<th colspan="2">Mail Traffic Statistics</th>
</tr>
</thead>
<tbody>
<!---Add in table2 information here -->
</tbody>
</table>
</div>
</div>
<br />

View File

@@ -85,8 +85,8 @@ import argparse
import tempfile
#import mysql.connector
import numpy as np
import plotly.graph_objects as go
import plotly.express as px
#import plotly.graph_objects as go
#import plotly.express as px
import colorsys
import pymysql
import json
@@ -108,12 +108,14 @@ enable_graphs = True; #This could be a DB entry if required.
try:
import matplotlib.pyplot as plt
except ImportError:
logging.debug("Matplotlib is not installed - no graphs")
logging.warning("Matplotlib is not installed - no graphs")
enable_graphs = False;
Mailstats_version = '1.2'
Mailstats_version = '1.3'
build_date_time = "2024-06-18 12:03:40OURCE"
build_date_time = build_date_time[:19] #Take out crap that sneaks in.
#Take out the crap that sneaks in...
build_date_time = build_date_time[:19]
Mailstats_version = Mailstats_version[:6]
#if build_date_time == "2024-06-18 12:03:40OURCE":
# build_date_time = "Unknown"
@@ -123,7 +125,6 @@ data_file_path = script_dir+'/../..' #back to the top
now = datetime.now()
yesterday = now - timedelta(days=1)
formatted_yesterday = yesterday.strftime("%Y-%m-%d")
#html_page_path = data_file_path+"/home/e-smith/files/ibays/mesdb/html/mailstats/"
html_page_dir = data_file_path+"/opt/mailstats/html/"
template_dir = data_file_path+"/opt/mailstats/templates/"
logs_dir = data_file_path+"/opt/mailstats/logs/"
@@ -150,6 +151,10 @@ PERCENT = TOTALS + 1
ColTotals = 24
ColPercent = 25
def strip_ansi_codes(text):
ansi_escape = re.compile(r'\x1b\[[0-9;]*m')
return ansi_escape.sub('', text)
def replace_bracket_content(input_filename, output_filename):
import re
@@ -169,90 +174,145 @@ def replace_bracket_content(input_filename, output_filename):
def get_logs_from_Journalctl(date='yesterday'):
# JSON-pretty output example from journalctl
# {
# "__CURSOR" : "s=21b4f015be0c4f1fb71ac439a8365ee7;i=385c;b=dd778625547f4883b572daf53ae93cd4;m=ca99d6d;t=62d6316802b05;x=71b24e9f19f3b99a",
# "__REALTIME_TIMESTAMP" : "1738753462774533",
# "__MONOTONIC_TIMESTAMP" : "212442477",
# "_BOOT_ID" : "dd778625547f4883b572daf53ae93cd4",
# "_MACHINE_ID" : "f20b7edad71a44e59f9e9b68d4870b19",
# "PRIORITY" : "6",
# "SYSLOG_FACILITY" : "3",
# "_UID" : "0",
# "_GID" : "0",
# "_SYSTEMD_SLICE" : "system.slice",
# "_CAP_EFFECTIVE" : "1ffffffffff",
# "_TRANSPORT" : "stdout",
# "_COMM" : "openssl",
# "_EXE" : "/usr/bin/openssl",
# "_HOSTNAME" : "sme11.thereadclan.me.uk",
# "_STREAM_ID" : "8bb0ef8920af4ae09b424a2e30abcdf7",
# "SYSLOG_IDENTIFIER" : "qpsmtpd-init",
# "MESSAGE" : "Generating DH parameters, 2048 bit long safe prime, generator 2",
# "_PID" : "2850",
# }
# and the return from here:
# {
# '_TRANSPORT': 'stdout', 'PRIORITY': 6, 'SYSLOG_FACILITY': 3, '_CAP_EFFECTIVE': '0', '_SYSTEMD_SLICE': 'system.slice',
# '_BOOT_ID': UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f'), '_MACHINE_ID': UUID('f20b7eda-d71a-44e5-9f9e-9b68d4870b19'),
# '_HOSTNAME': 'sme11.thereadclan.me.uk', '_STREAM_ID': '06c860deea374544a2b561f55394d728', 'SYSLOG_IDENTIFIER': 'qpsmtpd-forkserver',
# '_UID': 453, '_GID': 453, '_COMM': 'qpsmtpd-forkser', '_EXE': '/usr/bin/perl',
# '_CMDLINE': '/usr/bin/perl -Tw /usr/bin/qpsmtpd-forkserver -u qpsmtpd -l 0.0.0.0 -p 25 -c 40 -m 5',
# '_SYSTEMD_CGROUP': '/system.slice/qpsmtpd.service', '_SYSTEMD_UNIT': 'qpsmtpd.service',
# '_SYSTEMD_INVOCATION_ID': 'a2b7889a307748daaeb60173d31c5e0f', '_PID': 93647,
# 'MESSAGE': '93647 Connection from localhost [127.0.0.1]',
# '__REALTIME_TIMESTAMP': datetime.datetime(2025, 4, 2, 0, 1, 11, 668929),
# '__MONOTONIC_TIMESTAMP': journal.Monotonic(timestamp=datetime.timedelta(11, 53118, 613602),
# bootid=UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f')),
# '__CURSOR': 's=21b4f015be0c4f1fb71ac439a8365ee7;i=66d2c;b=465c620236ac4a8b98e91581e8fec68f;m=e9a65ed862;t=
# }
"""
Retrieve and parse journalctl logs for a specific date and units,
returning them as a sorted list of dictionaries.
"""
try:
# Parse the input date to calculate the start and end of the day
if date.lower() == "yesterday":
target_date = datetime.now() - timedelta(days=1)
else:
target_date = datetime.strptime(date, "%Y-%m-%d")
# Define the time range for the specified date
since = target_date.strftime("%Y-%m-%d 00:00:00")
until = target_date.strftime("%Y-%m-%d 23:59:59")
# Convert times to microseconds for querying
since_microseconds = int(datetime.strptime(since, "%Y-%m-%d %H:%M:%S").timestamp() * 1_000_000)
until_microseconds = int(datetime.strptime(until, "%Y-%m-%d %H:%M:%S").timestamp() * 1_000_000)
# Open the systemd journal
j = journal.Reader()
# Set filters for units
j.add_match(_SYSTEMD_UNIT="qpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="uqpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="sqpsmtpd.service")
# Filter by time range
j.seek_realtime(since_microseconds // 1_000_000) # Convert back to seconds for seeking
# Retrieve logs within the time range
logs = []
for entry in j:
entry_timestamp = entry.get('__REALTIME_TIMESTAMP', None)
entry_microseconds = int(entry_timestamp.timestamp() * 1_000_000)
if entry_timestamp and since_microseconds <= entry_microseconds <= until_microseconds:
logs.append(entry)
# Sort logs by __REALTIME_TIMESTAMP in ascending order
sorted_logs = sorted(logs, key=lambda x: x.get("__REALTIME_TIMESTAMP", 0))
return sorted_logs
# JSON-pretty output example from journalctl
# {
# "__CURSOR" : "s=21b4f015be0c4f1fb71ac439a8365ee7;i=385c;b=dd778625547f4883b572daf53ae93cd4;m=ca99d6d;t=62d6316802b05;x=71b24e9f19f3b99a",
# "__REALTIME_TIMESTAMP" : "1738753462774533",
# "__MONOTONIC_TIMESTAMP" : "212442477",
# "_BOOT_ID" : "dd778625547f4883b572daf53ae93cd4",
# "_MACHINE_ID" : "f20b7edad71a44e59f9e9b68d4870b19",
# "PRIORITY" : "6",
# "SYSLOG_FACILITY" : "3",
# "_UID" : "0",
# "_GID" : "0",
# "_SYSTEMD_SLICE" : "system.slice",
# "_CAP_EFFECTIVE" : "1ffffffffff",
# "_TRANSPORT" : "stdout",
# "_COMM" : "openssl",
# "_EXE" : "/usr/bin/openssl",
# "_HOSTNAME" : "sme11.thereadclan.me.uk",
# "_STREAM_ID" : "8bb0ef8920af4ae09b424a2e30abcdf7",
# "SYSLOG_IDENTIFIER" : "qpsmtpd-init",
# "MESSAGE" : "Generating DH parameters, 2048 bit long safe prime, generator 2",
# "_PID" : "2850",
# }
# and the return from here:
# {
# '_TRANSPORT': 'stdout', 'PRIORITY': 6, 'SYSLOG_FACILITY': 3, '_CAP_EFFECTIVE': '0', '_SYSTEMD_SLICE': 'system.slice',
# '_BOOT_ID': UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f'), '_MACHINE_ID': UUID('f20b7eda-d71a-44e5-9f9e-9b68d4870b19'),
# '_HOSTNAME': 'sme11.thereadclan.me.uk', '_STREAM_ID': '06c860deea374544a2b561f55394d728', 'SYSLOG_IDENTIFIER': 'qpsmtpd-forkserver',
# '_UID': 453, '_GID': 453, '_COMM': 'qpsmtpd-forkser', '_EXE': '/usr/bin/perl',
# '_CMDLINE': '/usr/bin/perl -Tw /usr/bin/qpsmtpd-forkserver -u qpsmtpd -l 0.0.0.0 -p 25 -c 40 -m 5',
# '_SYSTEMD_CGROUP': '/system.slice/qpsmtpd.service', '_SYSTEMD_UNIT': 'qpsmtpd.service',
# '_SYSTEMD_INVOCATION_ID': 'a2b7889a307748daaeb60173d31c5e0f', '_PID': 93647,
# 'MESSAGE': '93647 Connection from localhost [127.0.0.1]',
# '__REALTIME_TIMESTAMP': datetime.datetime(2025, 4, 2, 0, 1, 11, 668929),
# '__MONOTONIC_TIMESTAMP': journal.Monotonic(timestamp=datetime.timedelta(11, 53118, 613602),
# bootid=UUID('465c6202-36ac-4a8b-98e9-1581e8fec68f')),
# '__CURSOR': 's=21b4f015be0c4f1fb71ac439a8365ee7;i=66d2c;b=465c620236ac4a8b98e91581e8fec68f;m=e9a65ed862;t=
# }
"""
Retrieve and parse journalctl logs for a specific date and units,
returning them as a sorted list of dictionaries.
"""
except Exception as e:
logging.error(f"Unexpected error: {e}")
return {}
def to_us(ts):
# Convert a journal timestamp (datetime or int/string microseconds) to integer microseconds
if ts is None:
return None
if hasattr(ts, "timestamp"):
return int(ts.timestamp() * 1_000_000)
try:
return int(ts)
except Exception:
return None
try:
# Parse the input date to calculate start and end of the day
if isinstance(date, str) and date.lower() == "yesterday":
target_date = datetime.now() - timedelta(days=1)
elif isinstance(date, datetime):
target_date = date
else:
# Supports either a datetime.date-like object (has year attr) or a string YYYY-MM-DD
try:
target_date = datetime(date.year, date.month, date.day)
except Exception:
target_date = datetime.strptime(str(date), "%Y-%m-%d")
# Define the time range for the specified date
since_dt = datetime(target_date.year, target_date.month, target_date.day, 0, 0, 0, 0)
until_dt = datetime(target_date.year, target_date.month, target_date.day, 23, 59, 59, 999999)
since_microseconds = int(since_dt.timestamp() * 1_000_000)
until_microseconds = int(until_dt.timestamp() * 1_000_000)
# Open the systemd journal (system-only if supported)
try:
j = journal.Reader(flags=journal.SYSTEM_ONLY)
except Exception:
j = journal.Reader()
# Set filters for units (multiple add_match on same field => OR)
j.add_match(_SYSTEMD_UNIT="qpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="uqpsmtpd.service")
j.add_match(_SYSTEMD_UNIT="sqpsmtpd.service")
# Filter by time range: seek to the start of the interval
j.seek_realtime(since_dt)
# Retrieve logs within the time range
logs = []
log_count = 0
error_count = 0
for entry in j:
try:
entry_timestamp = entry.get("__REALTIME_TIMESTAMP", None)
entry_microseconds = to_us(entry_timestamp)
if entry_microseconds is None:
continue
# Early stop once we pass the end of the window
if entry_microseconds > until_microseconds:
break
if entry_microseconds >= since_microseconds:
log_count += 1
# Strip ANSI escape sequences in MESSAGE (if present and is text/bytes)
try:
msg = entry.get("MESSAGE", "")
if isinstance(msg, (bytes, bytearray)):
msg = msg.decode("utf-8", "replace")
# Only call strip if ESC is present
if "\x1b" in msg:
msg = strip_ansi_codes(msg)
entry["MESSAGE"] = msg
except Exception as se:
# Keep original message, just note the issue at debug level
logging.debug(f"strip_ansi_codes failed: {se}")
logs.append(entry)
except Exception as e:
# Be defensive getting context fields to avoid raising inside logging
pid = entry.get("_PID", "?") if isinstance(entry, dict) else "?"
ident = entry.get("SYSLOG_IDENTIFIER", "?") if isinstance(entry, dict) else "?"
logging.warning(f"Error - log line: {log_count} {pid} {ident} : {e}")
error_count += 1
if error_count:
logging.info(f"Had {error_count} errors on journal import - probably non character bytes")
# Sort logs by __REALTIME_TIMESTAMP in ascending order (keep original behavior)
sorted_logs = sorted(logs, key=lambda x: to_us(x.get("__REALTIME_TIMESTAMP")) or 0)
logging.debug(f"Collected {len(sorted_logs)} entries for {since_dt.date()} "
f"between {since_dt} and {until_dt} (scanned {log_count} in-window)")
return sorted_logs
except Exception as e:
logging.error(f"Unexpected error: {e}")
return {}
def transform_to_dict(data, keys, iso_date):
"""
@@ -395,23 +455,28 @@ def create_graph(data_dict, graph_type="line", output_file="graph.png",iso_date=
# return data
def save_summaries_to_db(cursor, conn, date_str, hour, parsed_data):
# Convert parsed_data to JSON string
global count_records_to_db
json_data = json.dumps(parsed_data)
# Insert the record
insert_query = """
INSERT INTO SummaryLogs (Date, Hour, logData)
VALUES (%s, %s, %s)
"""
try:
# Check if the cursor is open (pymysql has no explicit is_closed; handle by try/except)
cursor.execute(insert_query, (date_str, hour, json_data))
conn.commit()
count_records_to_db += 1
except pymysql.Error as err:
logging.error(f"DB Error {date_str} {hour} : {err}")
# Handle cursor closed or other DB errors
if 'closed' in str(err).lower():
logging.error(f"DB Error {date_str} {hour} : Cursor is closed. Check connection handling.")
else:
logging.error(f"DB Error {date_str} {hour} : {err}")
conn.rollback()
except Exception as ex:
logging.error(f"Unexpected DB Error {date_str} {hour} : {ex}")
conn.rollback()
def is_running_under_thonny():
# Check for the 'THONNY_USER_DIR' environment variable
@@ -588,32 +653,49 @@ def parse_data(data):
# and mapping:
try:
return_dict = {
'sme': fields[0].strip() if len(fields) > 0 else None,
'qpsmtpd': fields[1].strip() if len(fields) > 1 else None,
'id': fields[2].strip() if len(fields) > 2 else None,
'action': fields[3].strip() if len(fields) > 3 else None, #5
'logterse': fields[4].strip() if len(fields) > 4 else None,
'ip': fields[5].strip() if len(fields) > 5 else None,
'sendurl': fields[6].strip() if len(fields) > 6 else None, #1
'sendurl1': fields[7].strip() if len(fields) > 7 else None, #2
'from-email': fields[8].strip() if len(fields) > 8 else None, #3
'error-reason': fields[8].strip() if len(fields) > 9 else None, #3
'to-email': fields[9].strip() if len(fields) > 9 else None, #4
'error-plugin': fields[10].strip() if len(fields) > 10 else None, #5
'action1': fields[10].strip() if len(fields) > 10 else None, #5
'error-number' : fields[11].strip() if len(fields) > 11 else None, #6
'sender': fields[12].strip() if len(fields) > 12 else None, #7
'virus': fields[12].strip() if len(fields) > 12 else None, #7
'error-msg' :fields[13].strip() if len(fields) > 13 else None, #7
'spam-status': fields[13].strip() if len(fields) > 13 else None, #8
'error-result': fields[14].strip() if len(fields) > 14 else None,#8
'sme': fields[0].strip() if len(fields) > 0 else "",
'qpsmtpd': fields[1].strip() if len(fields) > 1 else "",
'id': fields[2].strip() if len(fields) > 2 else "",
'action': fields[3].strip() if len(fields) > 3 else "", #5
'logterse': fields[4].strip() if len(fields) > 4 else "",
'ip': fields[5].strip() if len(fields) > 5 else "",
'sendurl': fields[6].strip() if len(fields) > 6 else "", #1
'sendurl1': fields[7].strip() if len(fields) > 7 else "", #2
'from-email': fields[8].strip() if len(fields) > 8 else "", #3
'error-reason': fields[8].strip() if len(fields) > 9 else "", #3
'to-email': fields[9].strip() if len(fields) > 9 else "", #4
'error-plugin': fields[10].strip() if len(fields) > 10 else "", #5
'action1': fields[10].strip() if len(fields) > 10 else "", #5
'error-number' : fields[11].strip() if len(fields) > 11 else "", #6
'sender': fields[12].strip() if len(fields) > 12 else "", #7
'virus': fields[12].strip() if len(fields) > 12 else "", #7
'error-msg' :fields[13].strip() if len(fields) > 13 else "", #7
'spam-status': fields[13].strip() if len(fields) > 13 else "", #8
'error-result': fields[14].strip() if len(fields) > 14 else "",#8
# Add more fields as necessary
}
except:
logging.error(f"error:len:{len(fields)}")
return_dict = {}
return_dict = create_empty_return()
return return_dict
def safe_strip(lst, index):
if 0 <= index < len(lst):
value = lst[index]
if value is not None:
return value.strip()
return ""
def create_empty_return():
# Return dictionary with all keys, values None
keys = [
'sme', 'qpsmtpd', 'id', 'action', 'logterse', 'ip', 'sendurl', 'sendurl1',
'from-email', 'error-reason', 'to-email', 'error-plugin', 'action1', 'error-number',
'sender', 'virus', 'error-msg', 'spam-status', 'error-result'
]
return {key: "" for key in keys}
# def count_entries_by_hour(log_entries):
# hourly_counts = defaultdict(int)
# for entry in log_entries:
@@ -753,8 +835,124 @@ def split_timestamp_and_data(log_entry: str) -> list:
timestamp = None
rest_of_line = log_entry # If no match, return the whole line
return [timestamp, rest_of_line]
def render_sub_table(table_title, table_headers, found_values, get_character=None, suppress_threshold=False):
MIN_COUNT = 3 # Hide entries with count < 5
MAX_TOTAL_ROWS = 10 # Total rows INCLUDING "Other"
OTHER_TARGET_FRAC = 0.01 # Strictly less than 1%
OTHER_LABEL = 'Other'
SHOW_ALL = True # Set True to show all entries >= MIN_COUNT, no "Other" row
def select_rows_just_below(items, min_count=MIN_COUNT,
max_total_rows=MAX_TOTAL_ROWS,
other_target_frac=OTHER_TARGET_FRAC,
other_label=OTHER_LABEL, show_all=SHOW_ALL):
"""
Build rows with percentages of total (0..100).
- If show_all is True: show all entries with count >= min_count, no 'Other', ignore caps and 1% target.
- If show_all is False: pick as many top entries (count >= min_count) as needed so that
'Other' is strictly < other_target_frac (if possible), always include 'Other(n)',
and respect max_total_rows (including 'Other').
Output rows preserve original extra fields for selected entries.
The percent is written to field index 2 (replacing it if present, or appended if not).
"""
# Normalize items to a list while preserving original rows
def to_rows(seq):
if isinstance(seq, dict):
# Convert dict to rows without extras
return [(k, v) for k, v in seq.items()]
rows_ = []
for it in seq:
if isinstance(it, (tuple, list)) and len(it) >= 2:
rows_.append(tuple(it)) # store as tuple
else:
raise TypeError("Each item must be a (key, count, ...) tuple/list or a dict mapping key->count.")
return rows_
def set_percent(row, pct_value):
# Return a tuple like the input row but with percent inserted at index 2 (0..100 number, rounded)
pct_value = round(pct_value, 2)
r = list(row)
if len(r) >= 3:
r[2] = pct_value
else:
r.append(pct_value)
return tuple(r)
rows_in = to_rows(items)
total = sum(r[1] for r in rows_in)
if total == 0:
return ([(f"{other_label}(0)", 0, 0.0)] if not show_all else []), 0, "No data."
# Filter by min_count and sort by count desc
eligible = [r for r in rows_in if r[1] >= min_count]
eligible.sort(key=lambda r: r[1], reverse=True)
if show_all:
# Show all eligible rows, no 'Other', ignore caps/target; compute percent (0..100) per row
rows_out = [set_percent(r, (r[1] / total) * 100.0) for r in eligible]
return rows_out, total, None
#logging.info(f"{show_all}")
# Leave room for the "Other" row
max_top_cap = max(0, max_total_rows - 1)
# Find smallest number of top rows so that Other is strictly < target
cum = 0
needed_top = None
for i, r in enumerate(eligible, start=1):
cum += r[1]
other_frac = (total - cum) / total
if other_frac < other_target_frac:
needed_top = i
break
notes = []
if needed_top is None:
# Even after including all eligible, Other >= target
final_top = min(len(eligible), max_top_cap)
#if final_top < len(eligible):
#notes.append(f"Row cap prevents adding enough rows to push Other below {other_target_frac*100:.2f}%.")
#else:
#notes.append(f"Cannot push Other below {other_target_frac*100:.2f}% with MIN_COUNT={min_count}.")
else:
# Apply cap
if needed_top > max_top_cap:
final_top = max_top_cap
#notes.append(
# f"Row cap prevents reaching Other < {other_target_frac*100:.2f}%; "
# f"need {needed_top} rows but only {max_top_cap} allowed before Other."
#)
else:
final_top = needed_top
top = eligible[:final_top]
shown_sum = sum(r[1] for r in top)
other_count = total - shown_sum
other_percent = (other_count / total) * 100.0
# Count how many rows are aggregated into Other: everything not in 'top'
other_rows_count = len(rows_in) - len(top)
# Build output: preserve extras; write percent at index 2 as a numeric percent 0..100
rows_out = [set_percent(r, (r[1] / total) * 100.0) for r in top]
# Build the Other row with percent; no extra fields beyond the percent
rows_out.append((f"{other_label}({other_rows_count})", other_count, round(other_percent, 2)))
#if other_percent >= other_target_frac * 100.0:
#notes.append(
# f"Other is {other_percent:.2f}%, which is not strictly below {other_target_frac*100:.2f}% "
# f"(MIN_COUNT={min_count}, MAX_TOTAL_ROWS={max_total_rows})."
#)
return rows_out, total, " ".join(notes) if notes else None
def render_sub_table(table_title, table_headers, found_values, get_character=None, show_all=True):
#Check if any data provided
if len(found_values) != 0:
# Get the total
@@ -798,31 +996,37 @@ def render_sub_table(table_title, table_headers, found_values, get_character=Non
raise ValueError("found_values must be either a list of numbers or a list of dictionaries.")
else:
raise TypeError("found_values must be a dictionary or a list.")
sub_result.sort(key=lambda x: float(x[1]), reverse=True) # Sort by percentage in descending order
# Dynamic threshold calculation
if not suppress_threshold:
dynamic_threshold = max(1, 100 / (original_total**0.5)) if original_total > 0 else 0
dynamic_threshold = round(dynamic_threshold,1)
logging.debug(f"Threshold for {table_title} set to {dynamic_threshold}% ")
else:
dynamic_threshold=0
absolute_floor = 50 # Minimum absolute value threshold
# # Dynamic threshold calculation
# if not suppress_threshold:
# dynamic_threshold = max(1, 100 / (original_total**0.65)) if original_total > 0 else 0
# dynamic_threshold = round(dynamic_threshold,1)
# logging.debug(f"Threshold for {table_title} set to {dynamic_threshold}% ")
# else:
# dynamic_threshold=0
# absolute_floor = 10 # Minimum absolute value threshold
# Filter results using early termination
filtered_sub_result = []
for row in sub_result:
value = row[1]
percentage = (value / original_total * 100) if original_total else 0
# # Filter results using early termination
# filtered_sub_result = []
# for row in sub_result:
# value = row[1]
# percentage = (value / original_total * 100) if original_total else 0
# Exit condition: below both thresholds
if percentage < dynamic_threshold and value < absolute_floor:
break
# # Exit condition: below both thresholds
# if percentage < dynamic_threshold or value < absolute_floor:
# break
filtered_sub_result.append(row)
# filtered_sub_result.append(row)
sub_result = filtered_sub_result # Keep only significant rows
# sub_result = filtered_sub_result # Keep only significant rows
sub_result.sort(key=lambda x: float(x[1]), reverse=True) # Sort by percentage in descending order
if not show_all:
sub_result, total, note = select_rows_just_below(sub_result,show_all=False)
else:
note = "" #no threshold applied
total = original_total
sub_template_path = template_dir+'mailstats-sub-table.html.pt'
# Load the template
with open(sub_template_path, 'r') as template_file:
@@ -834,7 +1038,7 @@ def render_sub_table(table_title, table_headers, found_values, get_character=Non
try:
rendered_html = template(array_2d=sub_result, column_headers=table_headers,
title=table_title, classname=get_first_word(table_title),
threshold=dynamic_threshold)
threshold=note)
except Exception as e:
raise ValueError(f"{table_title}: A chameleon controller render error occurred: {e}")
except Exception as e:
@@ -944,6 +1148,9 @@ def replace_between(text, start, end, replacement):
replaced_text = re.sub(pattern, replacement, text, flags=re.DOTALL)
return replaced_text
def assemble_heading_row(label,value):
return f"<tr><td>{label}</td><td>{value}</td><tr>"
def get_heading():
#
# Needs from anaytsis
@@ -961,44 +1168,50 @@ def get_heading():
# Clam Version/DB Count/Last DB update
clam_output = subprocess.getoutput("freshclam -V")
clam_info = f"Clam Version/DB Count/Last DB update: {clam_output}"
clam_info = assemble_heading_row("Clam Version/DB Count/Last DB update:", clam_output)
# SpamAssassin Version
sa_output = subprocess.getoutput("spamassassin -V")
sa_info = f"SpamAssassin Version: {sa_output}"
sa_info = assemble_heading_row("SpamAssassin Version: ",sa_output)
# Tag level and Reject level
tag_reject_info = f"Tag level: {SATagLevel}; Reject level: {SARejectLevel} {warnnoreject}"
tag_reject_info = assemble_heading_row("Tag level:",SATagLevel)
tag_reject_info += assemble_heading_row("Reject level: ",f"{SARejectLevel} {warnnoreject}")
# SMTP connection stats
smtp_stats = f"External SMTP connections accepted: {totalexternalsmtpsessions}\n"\
f"Internal SMTP connections accepted: {totalinternalsmtpsessions}"
smtp_stats = assemble_heading_row("External SMTP connections accepted:",totalexternalsmtpsessions)
smtp_stats += assemble_heading_row("Internal SMTP connections accepted:",totalinternalsmtpsessions)
if len(connection_type_counts)>0:
for connection_type in connection_type_counts.keys():
smtp_stats += f"\nCount of {connection_type} connections: {connection_type_counts[connection_type]}"
smtp_stats += assemble_heading_row(f"\nCount of {connection_type} connections:",connection_type_counts[connection_type])
if len(total_ports)>0:
for port_number in total_ports.keys():
smtp_stats += f"\nCount of port {port_number} connections: {total_ports[port_number]}"
smtp_stats += assemble_heading_row(f"\nCount of port {port_number} connections: ",total_ports[port_number])
smtp_stats = smtp_stats + f"\nEmails per hour: {emailperhour:.1f}/hr\n"\
f"Average spam score (accepted): {spamavg or 0:.2f}\n"\
f"Average spam score (rejected): {rejectspamavg or 0:.2f}\n"\
f"Average ham score: {hamavg or 0:.2f}\n"\
f"Number of DMARC reporting emails sent: {DMARCSendCount or 0} (not shown on table)"
rows = [
assemble_heading_row("Emails per hour:", f"{(emailperhour if emailperhour is not None else 0):.1f}/hr"),
assemble_heading_row("Average spam score (accepted):", f"{(spamavg if spamavg is not None else 0):.2f}"),
assemble_heading_row("Average spam score (rejected):", f"{(rejectspamavg if rejectspamavg is not None else 0):.2f}"),
assemble_heading_row("Average ham score:", f"{(hamavg if hamavg is not None else 0):.2f}"),
assemble_heading_row("Number of DMARC reporting emails sent:", f"{DMARCSendCount if DMARCSendCount is not None else 0} (not shown on table)"),
]
smtp_stats += " ".join(rows) # or "\n".join(rows) if assemble_heading_row doesnt add its own newline
# DMARC approved emails
dmarc_info = ""
if hamcount != 0:
dmarc_ok_percentage = DMARCOkCount * 100 / hamcount
dmarc_info = f"Number of emails approved through DMARC: {DMARCOkCount or 0} ({dmarc_ok_percentage:.2f}% of Ham count)"
dmarc_info = assemble_heading_row("Number of emails approved through DMARC:",f"{DMARCOkCount or 0} ({dmarc_ok_percentage:.2f}% of Ham count)")
# Accumulate all strings
header_str = "\n".join([clam_info, sa_info, tag_reject_info, smtp_stats, dmarc_info])
#header_str = "<br />".join([clam_info, sa_info, tag_reject_info, smtp_stats, dmarc_info])
# switch newlines to <br />
header_str = header_str.replace("\n","<br />")
return header_str
#header_str = header_str.replace("\n","<br />")
header_str1 = clam_info + sa_info + tag_reject_info
header_str2 = smtp_stats + dmarc_info
return header_str1,header_str2
def scan_mail_users():
#
@@ -1097,11 +1310,63 @@ def display_keys_and_values(data):
raise ValueError("Input must be a list of dictionaries or a list of lists.")
def extract_blacklist_domain(text):
match = re.search(r'http://www\.surbl\.org', text)
if match:
return "www.surbl.org"
return None
"""
Compare 'text' against comma-separated URL strings from global vars
RBLList, SBLList, and UBLList. Return the first matching entry or "".
Match is done on exact hostname substring OR the base domain (eTLD+1),
so 'black.uribl.com' will match text containing 'lookup.uribl.com'.
"""
s = text if isinstance(text, str) else str(text or "")
s_lower = s.lower()
logging.debug(f"extract blacklist called:{text}")
combined = ",".join([RBLList, SBLList, UBLList])
def hostname_from(sval: str) -> str:
sval = (sval or "").strip().lower()
if "://" in sval:
# Strip scheme using simple split to avoid needing urlparse
sval = sval.split("://", 1)[1]
# Strip path and port if present
sval = sval.split("/", 1)[0]
sval = sval.split(":", 1)[0]
# Remove leading wildcards/dots
sval = sval.lstrip(".")
if sval.startswith("*."):
sval = sval[2:]
return sval
def base_domain(hostname: str) -> str:
parts = hostname.split(".")
if len(parts) >= 3 and parts[-2] in ("co", "org", "gov", "ac") and parts[-1] == "uk":
return ".".join(parts[-3:])
if len(parts) >= 2:
return ".".join(parts[-2:])
return hostname
def boundary_re(term: str):
# Match term when not part of a larger domain label
return re.compile(r"(?<![A-Za-z0-9-])" + re.escape(term) + r"(?![A-Za-z0-9-])")
for part in combined.split(","):
entry = part.strip()
logging.debug(f"Comparing: {entry}")
if not entry:
continue
entry_host = hostname_from(entry)
entry_base = base_domain(entry_host)
# 1) Try matching the full entry host (e.g., black.uribl.com)
if entry_host and boundary_re(entry_host).search(s_lower):
return entry
# 2) Fallback: match by base domain (e.g., uribl.com) to catch lookup.uribl.com, etc.
if entry_base and boundary_re(entry_base).search(s_lower):
return entry
return ""
def set_log_level(level):
"""Dynamically adjust logging level (e.g., 'DEBUG', 'INFO', 'ERROR')."""
numeric_level = getattr(logging, level.upper(), None)
@@ -1114,6 +1379,41 @@ def format_duration(seconds: float) -> str:
return str(timedelta(seconds=seconds))
DB_CONFIG_PATH = '/etc/mailstats/db.php'
def parse_php_config(path):
# Read file as text and extract key-value pairs using regex
try:
with open(path, 'r') as f:
content = f.read()
cfg = {}
for match in re.finditer(r"'(\w+)'\s*=>\s*'([^']*)'", content):
cfg[match.group(1)] = match.group(2)
return cfg
except Exception as e:
logging.error(f"Could not parse PHP config file: {e}")
return {}
def load_db_config():
db_host = os.environ.get('MAILSTATS_DB_HOST', 'localhost')
db_user = os.environ.get('MAILSTATS_DB_USER', '')
db_pass = os.environ.get('MAILSTATS_DB_PASS', '')
db_name = os.environ.get('MAILSTATS_DB_NAME', '')
if db_user == '' or db_pass == '' or db_name == '':
if os.path.isfile(DB_CONFIG_PATH) and os.access(DB_CONFIG_PATH, os.R_OK):
cfg = parse_php_config(DB_CONFIG_PATH)
db_host = cfg.get('host', db_host)
db_user = cfg.get('user', db_user)
db_pass = cfg.get('pass', db_pass)
db_name = cfg.get('name', db_name)
if db_user == '' or db_pass == '' or db_name == '':
logging.error('DB credentials missing (env and config file).')
raise RuntimeError('DB credentials missing (env and config file)')
return db_host, db_user, db_pass, db_name
if __name__ == "__main__":
start_time = datetime.now()
try:
@@ -1197,18 +1497,17 @@ if __name__ == "__main__":
count_records_to_db = 0;
# Db save control
saveData = get_value(ConfigDB,"mailstats","SaveDataToMySQL","no") == 'yes' or forceDbSave
saveData = get_value(ConfigDB,"mailstats","SaveDataToMySQL","yes") == 'yes' or forceDbSave
logging.debug(f"Save Mailstats to DB set:{saveData} ")
if saveData:
# Connect to MySQL DB for saving
DBName = "mailstats"
DBHost = get_value(ConfigDB, 'mailstats', 'DBHost', "localhost")
DBPort = int(get_value(ConfigDB, 'mailstats', 'DBPort', "3306")) # Ensure port is an integer
DBPassw = 'mailstats'
DBUser = 'mailstats'
UnixSocket = "/var/lib/mysql/mysql.sock"
# Database config retrieval
try:
DBHost, DBUser, DBPassw, DBName = load_db_config()
DBPort = 3306 # If you want configurability, load this from config too
UnixSocket = "/var/lib/mysql/mysql.sock"
except RuntimeError as err:
logging.error(f"Database config error: {err}")
saveData = False
# Try to establish a database connection
try:
conn = pymysql.connect(
@@ -1218,7 +1517,7 @@ if __name__ == "__main__":
database=DBName,
port=DBPort,
unix_socket=UnixSocket,
cursorclass=pymysql.cursors.DictCursor # Optional: use DictCursor for dict output
cursorclass=pymysql.cursors.DictCursor
)
cursor = conn.cursor()
# Check if the table exists before creating it
@@ -1226,47 +1525,50 @@ if __name__ == "__main__":
cursor.execute(check_table_query)
table_exists = cursor.fetchone()
if not table_exists:
# Create table if it doesn't exist
cursor.execute("""
CREATE TABLE IF NOT EXISTS SummaryLogs (
id INT AUTO_INCREMENT PRIMARY KEY,
Date DATE,
Hour INT,
logData TEXT
)
CREATE TABLE IF NOT EXISTS SummaryLogs (
id INT AUTO_INCREMENT PRIMARY KEY,
Date DATE,
Hour INT,
logData TEXT
)
""")
# Delete existing records for the given date
try:
delete_query = """
DELETE FROM SummaryLogs
WHERE Date = %s
DELETE FROM SummaryLogs
WHERE Date = %s
"""
cursor.execute(delete_query, (analysis_date,)) # Don't forget the extra comma for tuple
# Get the number of records deleted
cursor.execute(delete_query, (analysis_date,))
rows_deleted = cursor.rowcount
if rows_deleted > 0:
logging.debug(f"Deleted {rows_deleted} rows for {analysis_date} ")
logging.debug(f"Deleted {rows_deleted} rows for {analysis_date}")
except pymysql.Error as e:
logging.error(f"SQL Delete failed ({delete_query}) ({e}) ")
logging.error(f"SQL Delete failed ({delete_query}) ({e})")
# Commit changes & close resources after all DB operations
conn.commit()
#cursor.close()
#conn.close()
except pymysql.Error as e:
logging.error(f"Unable to connect to {DBName} on {DBHost} port {DBPort} error ({e}) ")
logging.error(f"Unable to connect to {DBName} on {DBHost} port {DBPort} error ({e})")
saveData = False
nolinks = not saveData
# Not sure we need these...
# if (ConfigDB,"qpsmtpd","RHSBL").lower() == 'enabled':
# RBLList = get_value(ConfigDB,"qpsmtpd","RBLList")
# else:
# RBLList = ""
# if (ConfigDB,"qpsmtpd","RBLList").lower() == 'enabled':
# SBLLIst = get_value(ConfigDB,"qpsmtpd","SBLLIst")
# else:
# RBLList = ""
# if (ConfigDB,"qpsmtpd","RBLList").lower() == 'enabled':
# UBLList = get_value(ConfigDB,"qpsmtpd","UBLLIst")
# else:
# RBLList = ""
# Needed to identify blacklist used to reject emails.
if get_value(ConfigDB,"qpsmtpd","RHSBL").lower() == 'enabled':
RBLList = get_value(ConfigDB,"qpsmtpd","RBLList")
else:
RBLList = ""
if get_value(ConfigDB,"qpsmtpd","DNSBL").lower() == 'enabled':
SBLList = get_value(ConfigDB,"qpsmtpd","SBLList")
else:
SBLList = ""
if get_value(ConfigDB,"qpsmtpd","URIBL").lower() == 'enabled':
UBLList = get_value(ConfigDB,"qpsmtpd","UBLList")
else:
UBLList = ""
FetchmailIP = '127.0.0.200'; #Apparent Ip address of fetchmail deliveries
WebmailIP = '127.0.0.1'; #Apparent Ip of Webmail sender
@@ -1495,13 +1797,17 @@ if __name__ == "__main__":
rejReason = match.group(1)
found_qpcodes[parsed_data['error-plugin']+"-"+rejReason] += 1
else:
found_qpcodes[parsed_data['action1']] += 1
if parsed_data['action1'] == "":
logging.warning(f"Found blank action1 {timestamp} {parsed_data['id']} {parsed_data['ip']} {parsed_data['sendurl']}")
else:
found_qpcodes[parsed_data['action1']] += 1
#Check for blacklist rejection
error_plugin = parsed_data['error-plugin'].strip()
if error_plugin == 'rhsbl' or error_plugin == 'dnsbl':
blacklist_domain = extract_blacklist_domain(parsed_data['sender'])
blacklist_found[blacklist_domain] += 1
if blacklist_domain:
blacklist_found[blacklist_domain] += 1
#Log the recipients and deny or accept and spam-tagged counts
# Try to find an existing record for the email
@@ -1530,6 +1836,8 @@ if __name__ == "__main__":
else:
email = None
if email:
if '@' in email:
email = email.lower()
record = next((item for item in recipients_found if item['email'] == email), None)
if not record:
# If email is not in the array, we add it
@@ -1642,6 +1950,7 @@ if __name__ == "__main__":
try:
match = geoip_pattern.match(data['MESSAGE'])
if match:
logging.debug(f"Found bad country message {data['MESSAGE']} {match.group(1)} ")
j += 1
country = match.group(1)
found_countries[country] += 1
@@ -1739,15 +2048,17 @@ if __name__ == "__main__":
total_html = rendered_html
# Add in the header information
header_rendered_html = get_heading()
total_html = insert_string_after(total_html,header_rendered_html, "<!---Add in header information here -->")
header_rendered_html1,header_rendered_html2 = get_heading()
total_html = insert_string_after(total_html,header_rendered_html1, "<!---Add in table1 information here -->")
total_html = insert_string_after(total_html,header_rendered_html2, "<!---Add in table2 information here -->")
header_rendered_html = header_rendered_html1 + header_rendered_html2
#add in the subservient tables..(remeber they appear in the reverse order of below!)
#virus codes
virus_headers = ["Virus",'Count','Percent']
virus_title = 'Viruses found'
virus_rendered_html = render_sub_table(virus_title,virus_headers,found_viruses,suppress_threshold=True)
virus_rendered_html = render_sub_table(virus_title,virus_headers,found_viruses)
# Add it to the total
total_html = insert_string_after(total_html,virus_rendered_html, "<!---Add in sub tables here -->")
@@ -1763,7 +2074,7 @@ if __name__ == "__main__":
junk_mail_count_headers = ['Username','Count', 'Percent']
junk_mail_counts = scan_mail_users()
junk_mail_count_title = 'Junk mail counts'
junk_rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts,suppress_threshold=True)
junk_rendered_html = render_sub_table(junk_mail_count_title,junk_mail_count_headers,junk_mail_counts)
# Add it to the total
total_html = insert_string_after(total_html,junk_rendered_html, "<!---Add in sub tables here -->")
@@ -1771,21 +2082,21 @@ if __name__ == "__main__":
#Recipient counts
recipient_count_headers = ["Email",'Queued','Rejected','Spam tagged','Accepted Percent']
recipient_count_title = 'Incoming email recipients'
recipient_rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found,suppress_threshold=True)
recipient_rendered_html = render_sub_table(recipient_count_title,recipient_count_headers,recipients_found)
# Add it to the total
total_html = insert_string_after(total_html,recipient_rendered_html, "<!---Add in sub tables here -->")
#Geoip Country codes
geoip_headers = ['Country','Count','Percent','Rejected?']
geoip_title = 'Geoip results'
geoip_rendered_html = render_sub_table(geoip_title,geoip_headers,found_countries,get_character_in_reject_list)
geoip_rendered_html = render_sub_table(geoip_title,geoip_headers,found_countries,get_character_in_reject_list,show_all=False)
# Add it to the total
total_html = insert_string_after(total_html,geoip_rendered_html, "<!---Add in sub tables here -->")
#Blacklist counts
blacklist_headers = ['URL','Count','Percent']
blacklist_title = 'Blacklist used'
blacklist_rendered_html = render_sub_table(blacklist_title,blacklist_headers,blacklist_found,suppress_threshold=True)
blacklist_rendered_html = render_sub_table(blacklist_title,blacklist_headers,blacklist_found)
# Add it to the total
total_html = insert_string_after(total_html,blacklist_rendered_html, "<!---Add in sub tables here -->")

15
root/usr/bin/runallmailstats.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
# Extract the earliest date from the journalctl header for qpsmtpd service
earliest_date=$(journalctl -u qpsmtpd | head -n 1 | sed -n 's/.*Logs begin at [A-Za-z]* \([0-9-]*\).*/\1/p')
# Get yesterday's date
yesterday=$(date -d 'yesterday' +%F)
current_date="$earliest_date"
# Loop from earliest date to yesterday
while [[ "$current_date" < "$yesterday" || "$current_date" == "$yesterday" ]]; do
runmailstats.sh "$current_date"
current_date=$(date -I -d "$current_date + 1 day")
done

View File

@@ -1,17 +0,0 @@
#!/bin/bash
#exec 1> >(logger -t $(basename $0)) 2>&1
perl /usr/bin/mailstats.pl /var/log/qpsmtpd/\@* /var/log/qpsmtpd/current /var/log/sqpsmtpd/\@* /var/log/sqpsmtpd/current
# and run new python one - start by copying and decoding log files
yesterday_date=$(date -d "yesterday" +'%mm %d')
#cd /var/log/qpsmtpd
#cat \@* current >/opt/mailstats/logs/current1 2>/dev/null
#cd /var/log/sqpsmtpd
#cat \@* current >/opt/mailstats/logs/current2 2>/dev/null
cd /opt/mailstats/logs
#cat current1 current2 2>/dev/null | /usr/local/bin/tai64nlocal | grep "$yesterday_date" > current1.log
python3 /usr/bin/mailstats-convert-log-sme10-to-sme11.py
yesterday_date=$(date -d "yesterday" +'%b %d')
cat output_log.txt | grep "$yesterday_date" | sort >current.log
ls -l
python3 /usr/bin/mailstats.py
echo "Done"

View File

@@ -77,7 +77,7 @@
<div class=dbwanted>
<!--
<h2 class='subh2'><%=l('mst_Details_for_connection_to_database')%></h2>
<p><span class=label>
@@ -108,7 +108,7 @@
% param 'DBPassword' => $mst_data->{DBPassword} unless param 'DBPassword';
%=password_field 'DBPassword', class => 'pass13 sme-password', autocomplete => 'off'
</span></p>
-->
</div>

View File

@@ -6,13 +6,17 @@ Summary: Daily mail statistics for SME Server
%define name smeserver-mailstats
Name: %{name}
%define version 11.1
%define release 2
%define release 10
%define full_version %{version}.%{release})
Version: %{version}
Release: %{release}%{?dist}
License: GPL
Group: SME/addon
Source: %{name}-%{version}.tgz
%global _binaries_in_noarch_packages_terminate_build 0
%global debug_package %{nil}
BuildRoot: /var/tmp/%{name}-%{version}-%{release}-buildroot
BuildArchitectures: noarch
Requires: smeserver-release => 9.0
@@ -25,21 +29,103 @@ Requires: python36
# So install as: dnf install smeserver-mailstats --enablerepo=epel,smecontribs
Requires: html2text
Requires: python3-chameleon
Requires: python3-mysql
Requires: python3-matplotlib
Requires: python3-mysql
Requires: python3-matplotlib
Requires: python3-pip
Requires: systemd-libs
AutoReqProv: no
%description
A script that via cron.d e-mails mail statistics to admin on a daily basis.
See http://www.contribs.org/bugzilla/show_bug.cgi?id=819
See https://wiki.koozali.org/mailstats
%prep
%setup
%build
perl createlinks
%install
/bin/rm -rf $RPM_BUILD_ROOT
(cd root ; /usr/bin/find . -depth -print | /bin/cpio -dump $RPM_BUILD_ROOT)
chmod +x $RPM_BUILD_ROOT/usr/bin/runmailstats.sh
now=$(date +"%Y-%m-%d %H:%M:%S")
# Replace placeholders in the Python program using sed
perl -pi -e 'if (!$done && s/^Mailstats_version *=.*/Mailstats_version = '\''%{full_version}'\'/') { $done = 1 }' $RPM_BUILD_ROOT/usr/bin/mailstats.py
perl -pi -e 'if (!$done && s/^build_date_time *=.*/build_date_time = "'"$now"'"/) { $done = 1 }' $RPM_BUILD_ROOT/usr/bin/mailstats.py
/bin/rm -f %{name}-%{version}-filelist
/sbin/e-smith/genfilelist --file '/etc/mailstats/db.php' 'attr(0640, root, apache)' $RPM_BUILD_ROOT | grep -v "\.pyc" | grep -v "\.pyo" > %{name}-%{version}-filelist
install -Dpm 0755 journalwrap %{buildroot}%{_bindir}/journalwrap
%pre
/usr/bin/pip3 install -q pymysql
/usr/bin/pip3 install -q numpy
/usr/bin/pip3 install -q pandas
%clean
/bin/rm -rf $RPM_BUILD_ROOT
%files -f %{name}-%{version}-filelist
%defattr(-,root,root)
#%attr(0640, root, apache) %config(noreplace) /etc/mailstats/db.php
%{_bindir}/journalwrap
#%{_libdir}/libjournalwrap.so
%post
/sbin/ldconfig
#Remove www from systemd-journal group as is potential security risk
gpasswd -d www systemd-journal
# and set setuid bit for c wrapper called from log detail web page
chmod u+s /usr/bin/journalwrap
%postun
/sbin/ldconfig
%changelog
* Sun Apr 06 2025 Brian Read <brianr@koozali.org> 11.2-2.sme
- Add in SM2 panel [SME: ]
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-10.sme
- Fix version and build date from spec file [SME: 13121]
* Mon Dec 30 2024 Brian Read <brianr@koozali.org> 11.2-1.sme
- Update mailstats.pl to accomodate change in log format for SME11 [SME: 12841]
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-8.sme
- Remove www from systemd-journal group and setuid bit in journal wrapper [SME: 13121]
* Fri Sep 12 2025 Brian Read <brianr@koozali.org> 11.1-7.sme
- Truncate Geoip table and add other category [SME: 13121]
- Cope with blank data in action1 [SME: 13121]
* Thu Sep 04 2025 Brian Read <brianr@koozali.org> 11.1-6.sme
- Add favicon to mailstats table, summary and detailed pages [SME: 13121]
- Bring DB config reading for mailstats itself inline with php summary and detailed logs - using /etc/mailstats/db.php [SME: 13121]
- Remove DB config fields from the SM2 config panel {sme: 13121]
- Arrange for password to be generated and mailstats user to be set with limited permissions [SME: 13121]
* Tue Sep 02 2025 Brian Read <brianr@koozali.org> 11.1-5.sme
- Speed up Journal access [SME: 13121]
- Fix missing blacklist URL [SME: 13121]
- Add extra security to php show summary page [SME: 13121]
- Fix up CSS for Summary Page [SME: 13121]
- Get Detail logs page working and prettyfy [SME: 13121]
- Add in C wrapper source code to interrogate journal [SME: 13121]
- Get permission and ownership right for /etc/mailstats/db.php [SME: 13121]
- Refactor main table header into two tables side by side [SME: 13121]
* Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-4.sme
- More fixes for Journal bytes instead of characters [SME: 13117]
* Mon Sep 01 2025 Brian Read <brianr@koozali.org> 11.1-3.sme
- Sort out ASCII escape codes in return from journalctl API [SME: 13117]
- Add in Status enabled t default for mailstats DB [SME: 13118]
* Sun Apr 06 2025 Brian Read <brianr@koozali.org> 11.1-2.sme
- First build on Koji - and Add in SM2 panel [SME: 13116]
* Mon Dec 30 2024 Brian Read <brianr@koozali.org> 11.1-1.sme
- Update mailstats.py to accomodate change in log format for SME11 [SME: 12841]
* Fri Jun 07 2024 Brian Read <brianr@koozali.org> 1.1-18.sme
- Pull in python re-write from SME11 dev [SME: ]
@@ -110,34 +196,3 @@ See http://www.contribs.org/bugzilla/show_bug.cgi?id=819
* Sat May 26 2012 Brian J read <brianr@bjsystems.co.uk> 1.0-1.sme
- Initial version
%prep
%setup
%build
perl createlinks
%install
/bin/rm -rf $RPM_BUILD_ROOT
(cd root ; /usr/bin/find . -depth -print | /bin/cpio -dump $RPM_BUILD_ROOT)
chmod +x $RPM_BUILD_ROOT/usr/bin/runmailstats.sh
# Define the placeholder and generate the current date and time
now=$(date +"%Y-%m-%d %H:%M:%S")
# Replace the placeholder in the Python program located at %{BUILDROOT}/usr/bin
sed -i "s|__BUILD_DATE_TIME__|$now|" $RPM_BUILD_ROOT/usr/bin/mailstats.py
/bin/rm -f %{name}-%{version}-filelist
/sbin/e-smith/genfilelist $RPM_BUILD_ROOT | grep -v "\.pyc" | grep -v "\.pyo" > %{name}-%{version}-filelist
%pre
/usr/bin/pip3 install -q pymysql
/usr/bin/pip3 install -q numpy
/usr/bin/pip3 install -q pandas
/usr/bin/pip3 install -q plotly
%clean
/bin/rm -rf $RPM_BUILD_ROOT
%files -f %{name}-%{version}-filelist
%defattr(-,root,root)