#!/usr/bin/env python3
"""
FeedToRocket.py
Unified feeder to post Bugzilla, Koji, Wiki and Gitea events to Rocket.Chat webhooks.
Usage examples:
python FeedToRocket.py
python FeedToRocket.py --sleep 5 --log-level DEBUG
python FeedToRocket.py --one-off --feeds wiki,gitea
python FeedToRocket.py --empty-db
"""
import argparse
import logging
import os
import sqlite3
import socket
import time
import json
import re
from typing import Optional, Dict, Any, List
import requests
import feedparser
import xml.etree.ElementTree as ET
from bs4 import BeautifulSoup
from dotenv import load_dotenv
# ---------------------------
# Load .env variables
# ---------------------------
load_dotenv()
# ---------------------------
# Configuration (from .env)
# ---------------------------
FEED_CONFIG = {
"bugzilla": {
"enabled": True,
"type": "bugzilla",
"domain": "bugs.koozali.org",
"feed_path": "/buglist.cgi?chfield=%5BBug%20creation%5D&chfieldfrom=7d&ctype=atom&title=Bugs%20reported%20in%20the%20last%207%20days",
#"chat_url": os.getenv("TEST_CHAT_URL"),
chat_url": os.getenv("BUGZILLA_CHAT_URL"),
"filter_field": "status",
"filter_value": "open",
"bypass_filter": True,
},
"koji": {
"enabled": True,
"type": "koji",
"domain": "koji.koozali.org",
"feed_path": "/koji/recentbuilds?feed=rss",
"chat_url": os.getenv("KOJI_CHAT_URL")
#"chat_url": os.getenv("TEST_CHAT_URL")
},
"wiki": {
"enabled": True,
"type": "wiki",
"domain": "wiki.koozali.org",
"feed_path": "/api.php?hidebots=1&urlversion=2&days=7&limit=50&action=feedrecentchanges&feedformat=rss",
#"chat_url": os.getenv("TEST_CHAT_URL")
"chat_url": os.getenv("WIKI_CHAT_URL")
}
}
GITEA_ORGS = ["smecontribs", "smeserver","smedev"]
for org in GITEA_ORGS:
FEED_CONFIG[f"gitea_{org}"] = {
"enabled": True,
"type": "gitea",
"feed_url": f"https://src.koozali.org/{org}.atom",
"org": org, # ✅ this line ensures process_gitea knows the org name
#"chat_url": os.getenv("TEST_CHAT_URL"),
"chat_url": os.getenv("GITEA_CHAT_URL")
}
# ---------------------------
# Logging configuration
# ---------------------------
DEFAULT_LOG_FILENAME = "FeedToRocket.log"
DEFAULT_LOG_DIR = "/var/log"
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(name)s - %(message)s"
def init_logging(log_level_str: str) -> str:
log_level = getattr(logging, log_level_str.upper(), logging.INFO)
preferred_path = os.path.join(DEFAULT_LOG_DIR, DEFAULT_LOG_FILENAME)
fallback_path = os.path.join(".", DEFAULT_LOG_FILENAME)
log_file = preferred_path
try:
os.makedirs(os.path.dirname(preferred_path), exist_ok=True)
logging.basicConfig(filename=preferred_path, level=log_level, format=LOG_FORMAT)
logging.getLogger().info("Logging initialized at %s", preferred_path)
except PermissionError:
log_file = fallback_path
logging.basicConfig(filename=fallback_path, level=log_level, format=LOG_FORMAT)
logging.getLogger().info("Permission denied for %s. Logging initialized at %s", preferred_path, fallback_path)
console = logging.StreamHandler()
console.setLevel(log_level)
console.setFormatter(logging.Formatter(LOG_FORMAT))
logging.getLogger().addHandler(console)
return log_file
# ---------------------------
# Database helpers
# ---------------------------
DB_PATH = "sent_items.db"
def setup_database():
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS sent_items (
feed_name TEXT,
item_id TEXT,
PRIMARY KEY(feed_name, item_id)
)
''')
conn.commit()
conn.close()
def clear_database():
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute('DELETE FROM sent_items')
conn.commit()
conn.close()
logging.getLogger().info("Cleared the sent_items database")
def has_been_sent(feed_name: str, item_id: str) -> bool:
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute('SELECT 1 FROM sent_items WHERE feed_name = ? AND item_id = ?', (feed_name, item_id))
exists = cursor.fetchone() is not None
conn.close()
return exists
def mark_as_sent(feed_name: str, item_id: str):
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute('INSERT OR IGNORE INTO sent_items (feed_name, item_id) VALUES (?, ?)', (feed_name, item_id))
conn.commit()
conn.close()
# ---------------------------
# Utilities
# ---------------------------
def get_ip_address(domain: str, retries: int = 10, delay: int = 1) -> str:
for attempt in range(1, retries + 1):
try:
ip_address = socket.gethostbyname(domain)
logging.info("Resolved %s -> %s", domain, ip_address)
return ip_address
except socket.gaierror:
logging.warning("Attempt %d failed to resolve %s, retrying...", attempt, domain)
time.sleep(delay)
raise RuntimeError(f"Unable to resolve domain '{domain}' after {retries} attempts.")
def send_to_rocket_chat(alias: str, text: str, attachments: Optional[List[Dict[str, Any]]], chat_url: str):
if not chat_url:
logging.warning("No chat URL for alias %s; skipping message.", alias)
return
payload = {"alias": alias, "text": text, "attachments": attachments or []}
try:
r = requests.post(chat_url, json=payload, timeout=10)
if r.status_code == 200:
logging.info("%s: message sent to Rocket.Chat", alias)
else:
logging.error("%s: Rocket.Chat returned %d - %s", alias, r.status_code, r.text)
except Exception as e:
logging.exception("%s: failed to send to Rocket.Chat: %s", alias, e)
def send_startup_message(feed_name: str, chat_url: str):
send_to_rocket_chat(feed_name.capitalize(), f"{feed_name.capitalize()} integration started successfully.", None, chat_url)
# ---------------------------
# Feed Processors
# ---------------------------
# Bugzilla
def parse_bugzilla_summary(summary: str):
summary = summary.replace("<", "<").replace(">", ">").replace("&", "&")
try:
root = ET.fromstring(summary)
except Exception as e:
logging.warning("Bugzilla parse error: %s", e)
return "", "", "", "", ""
status = reported_by = last_changed = product = component = ""
for row in root.findall('.//tr'):
if len(row) < 2: continue
field = (row[0].text or "").strip()
value = (row[1].text or "").strip()
if field == "Status": status = value
elif field == "ReportedByName": reported_by = value
elif field == "Last changed date": last_changed = value
elif field == "Product": product = value
elif field == "Component": component = value
return status, reported_by, last_changed, product, component
def process_bugzilla(conf, one_shot, domain_cache):
domain = conf["domain"]
if domain not in domain_cache:
domain_cache[domain] = get_ip_address(domain)
feed_url = f"http://{domain_cache[domain]}{conf['feed_path']}"
headers = {"Host": domain, "Referer": f"https://{domain}/"}
feed = feedparser.parse(requests.get(feed_url, headers=headers, timeout=10).content)
for entry in feed.entries:
summary = getattr(entry, "summary", "")
status, reported_by, last_changed, product, component = parse_bugzilla_summary(summary)
bug_id = str(entry.id).split("=")[-1]
status_key = status.lower() if status else "unknown"
if conf.get("bypass_filter") or (getattr(entry, conf.get("filter_field", ""), "").lower() == conf.get("filter_value", "")):
key = f"{bug_id}:{status_key}"
if not has_been_sent("bugzilla", key):
text = f"*Bug ID:* {bug_id} | *Status:* {status} | *Reporter:* {reported_by}\nProduct: {product}, Component: {component}, Last Changed: {last_changed}"
send_to_rocket_chat("Bugzilla", text, [{"title": entry.title, "title_link": entry.link, "color": "#764FA5"}], conf["chat_url"])
mark_as_sent("bugzilla", key)
# Koji
def extract_koji_changelog(link, host):
try:
r = requests.get(link, headers={"Host": host}, timeout=10)
soup = BeautifulSoup(r.text, "html.parser")
td = soup.find("td", class_="changelog")
if not td:
return ""
lines = [ln.strip() for ln in td.get_text().splitlines() if ln.strip()]
return "\n".join(lines[:5])
except Exception as e:
logging.warning("Koji changelog fetch failed: %s", e)
return ""
def process_koji(conf, one_shot, domain_cache):
domain = conf["domain"]
if domain not in domain_cache:
domain_cache[domain] = get_ip_address(domain)
url = f"http://{domain_cache[domain]}{conf['feed_path']}"
feed = feedparser.parse(requests.get(url, headers={"Host": domain}, timeout=10).content)
for entry in feed.entries:
title = entry.title.strip()
build_id = title.split(":")[1].split(",")[0].strip() if ":" in title else entry.id
if not has_been_sent("koji", build_id):
link = entry.link #.replace(domain, domain_cache[domain])
changelog = extract_koji_changelog(link, domain)
text = f"Build Notification - ID: {build_id}\n{changelog}"
send_to_rocket_chat("Koji", text, [{"title": title, "title_link": entry.link, "color": "#764FA5"}], conf["chat_url"])
mark_as_sent("koji", build_id)
# Wiki
def process_wiki(conf, one_shot, domain_cache):
domain = conf["domain"]
if domain not in domain_cache:
domain_cache[domain] = get_ip_address(domain)
url = f"http://{domain_cache[domain]}{conf['feed_path']}"
feed = feedparser.parse(requests.get(url, headers={"Host": domain}, timeout=10).content)
logging.debug(f"Wiki feed:{feed}")
for entry in feed.entries:
wiki_id = entry.id
if not has_been_sent("wiki", wiki_id):
text = f"*Title:* {entry.title}\n*Date:* {entry.published}\n*Author:* {entry.author}\nLink: {entry.link}"
send_to_rocket_chat("Wiki", text, [{"title": entry.title, "title_link": entry.link, "color": "#764FA5"}], conf["chat_url"])
mark_as_sent("wiki", wiki_id)
# Gitea (Atom feed)
# --- Gitea helpers (shared by processor + selftest) ---
def _gitea_clean_author(a: str) -> str:
if not a:
return ""
a = re.sub(r"\S*noreply\S*", "", a) # drop noreply
a = re.sub(r"mailto:\S+", "", a) # drop mailto
a = re.sub(r"<.*?>", "", a) # drop angle-bracketed emails
a = re.sub(r"\S+@\S+", "", a) # drop raw emails that might slip in
return a.strip()
def _gitea_text_only(html_snippet: str) -> str:
return BeautifulSoup(html_snippet or "", "html.parser").get_text(" ", strip=True)
def _gitea_first_two_links(html_snippet: str, base_host: str):
"""
Return (diff_link, diff_text, repo_link, repo_text) from an HTML snippet.
Only consider anchors pointing to src.koozali.org, make them absolute, ensure distinct.
"""
from urllib.parse import urljoin
diff_link = diff_text = repo_link = repo_text = None
soup = BeautifulSoup(html_snippet or "", "html.parser")
anchors = []
for a in soup.find_all("a"):
href = (a.get("href") or "").strip()
if "src.koozali.org" in href:
anchors.append((urljoin(base_host, href), a.get_text(strip=True)))
if anchors:
diff_link, diff_text = anchors[0]
if len(anchors) > 1:
repo_link, repo_text = anchors[-1]
else:
repo_link, repo_text = diff_link, diff_text
return diff_link, diff_text, repo_link, repo_text
def _gitea_build_attachment(org_name: str,
title_html: str,
summary_html: str,
content_html: str,
entry_link: str,
updated: str,
author_raw: str,
base_host: str) -> dict:
"""
Build a single Rocket.Chat attachment dict:
title: org_name
title_link: diff (or repo/entry_link)
text: compact body with Date, Author, and 'Diff | Repo' (each once)
"""
import html
from urllib.parse import urljoin
author = _gitea_clean_author(author_raw)
# Prefer links from
, then fallback to summary/content
diff_link, diff_text, repo_link, repo_text = _gitea_first_two_links(title_html, base_host)
if not diff_link or not repo_link:
alt_diff, alt_dt, alt_repo, alt_rt = _gitea_first_two_links(summary_html or content_html, base_host)
diff_link = diff_link or alt_diff
diff_text = diff_text or alt_dt
repo_link = repo_link or alt_repo
repo_text = repo_text or alt_rt
# If still no repo_link, try to derive from entry_link
if not repo_link and entry_link:
entry_link = urljoin(base_host, entry_link)
m = re.search(r"^(https?://[^/]+/[^/]+/[^/]+)", entry_link)
if m:
repo_link = m.group(1)
repo_text = "/".join(repo_link.rstrip("/").split("/")[-2:])
# Guard against any accidental email capture in repo_text
if repo_text and "@" in repo_text:
repo_text = None
if (not repo_text) and repo_link:
repo_text = "/".join(repo_link.rstrip("/").split("/")[-2:])
# Compact action line: clean readable text from
title_text = html.unescape(_gitea_text_only(title_html))
action_line = f"{org_name}: {title_text}"
# Links shown once; both only if distinct
link_parts = []
if diff_link:
link_parts.append(f"[Diff]({diff_link})")
if repo_link and (repo_link != diff_link):
link_parts.append(f"[Repo]({repo_link})")
lines = [action_line]
if updated:
lines.append(f"Date: {updated}")
author = author.strip()
if author:
lines.append(f"Author: {author}")
if link_parts:
lines.append(" | ".join(link_parts))
text = "\n\n".join(lines)
title_link = diff_link or repo_link or urljoin(base_host, entry_link or "")
return {
"title": f"{org_name}",
"title_link": title_link,
"text": text,
"color": "#764FA5",
"collapsed": False
}
def process_gitea(conf, one_shot, domain_cache):
"""
Gitea Atom -> Rocket.Chat (compact, using attachment fields):
Fields shown:
- Action: :
- Message: (if available)
- Date: (if available)
- Author: (if available)
- Links: [Diff](...) | [Repo](...) (Diff is compare/commit/branch/tag; Repo is repo home)
Diff priority: compare > commit > branch > tag
For tag-only entries, resolves tag -> commit (and previous tag) via Gitea API
to generate a commit or compare diff link.
Expects helpers elsewhere:
- has_been_sent(key, id)
- mark_as_sent(key, id)
- send_to_rocket_chat(username, text, attachments, webhook_url)
"""
import re
import html
import json
import logging
from urllib.parse import urljoin
import requests
import feedparser
from bs4 import BeautifulSoup
# ---- Constants / config --------------------------------------------------------
base_host = "https://src.koozali.org"
GITEA_API_BASE = f"{base_host}/api/v1"
feed_url = conf.get("feed_url")
chat_url = conf.get("chat_url")
org_name = conf.get("org") or (
re.search(r"src\.koozali\.org/([^/.]+)", conf.get("feed_url", "")) or [None]
)[1] or "unknown"
log = logging.getLogger(f"gitea.{org_name}")
if not feed_url or not chat_url:
log.warning("Skipping Gitea: feed URL or chat URL missing (org=%s)", org_name)
return
# ---- Helpers ------------------------------------------------------------------
def clean_author(a: str) -> str:
if not a:
return ""
a = re.sub(r"\S*noreply\S*", "", a) # drop noreply
a = re.sub(r"mailto:\S+", "", a) # drop mailto
a = re.sub(r"<.*?>", "", a) # drop angle-brackets
a = re.sub(r"\S+@\S+", "", a) # drop raw emails
return a.strip()
def text_only(html_snippet: str) -> str:
return html.unescape(BeautifulSoup(html_snippet or "", "html.parser").get_text(" ", strip=True))
def _sanitize_url(u: str) -> str:
u = (u or "").strip()
# Trim artifacts from scraping
u = re.sub(r'[">\')\]]+$', '', u) # closing quotes/brackets/parens
u = re.sub(r'[.,;:]+$', '', u) # trailing punctuation
return u
def _normalize_url(u: str) -> str:
return urljoin(base_host, _sanitize_url(u))
def _collect_links(entry):
"""
Return absolute src.koozali.org URLs with texts from:
- title (anchors)
- summary or content (anchors)
- entry.link (raw)
- entry.id (raw; after colon)
- PLUS: any raw URLs in summary/content (non-anchors) inc. relative paths
De-dupes by URL.
"""
links = []
def add_from_anchors(snippet):
if not snippet:
return
soup = BeautifulSoup(snippet, "html.parser")
for a in soup.find_all("a"):
href = _sanitize_url(a.get("href") or "")
if "src.koozali.org" in href:
links.append((_normalize_url(href), a.get_text(strip=True)))
def add_from_raw(text):
if not text:
return
# absolute URLs
for m in re.finditer(r"https?://src\.koozali\.org[^\s)>\]\"']+", text):
links.append((_normalize_url(m.group(0)), ""))
# relative (common org prefixes or any /compare/)
for m in re.finditer(r"(?:^|\s)(/[^ \t\n\r)>\]\"']+)", text):
candidate = m.group(1)
if candidate.startswith(("/smeserver/", "/smecontribs/", "/common/")) or "/compare/" in candidate:
links.append((_normalize_url(candidate), ""))
title_html = getattr(entry, "title", "")
summary_html = getattr(entry, "summary", "")
content_html = ""
if getattr(entry, "content", None):
content_html = entry.content[0].get("value", "")
# Anchors
add_from_anchors(title_html)
add_from_anchors(summary_html or content_html)
# entry.link
elink = getattr(entry, "link", "")
if "src.koozali.org" in elink:
links.append((_normalize_url(elink), ""))
# URL inside id (e.g., "123: https://...")
eid = getattr(entry, "id", "")
m = re.search(r"https?://[^ \t\n\r]+", eid)
if m and "src.koozali.org" in m.group(0):
links.append((_normalize_url(m.group(0)), ""))
# Raw text scan
add_from_raw(summary_html)
add_from_raw(content_html)
# De-dupe by URL
seen = set()
out = []
for u, t in links:
if "src.koozali.org" not in u:
continue
if u not in seen:
seen.add(u)
out.append((u, t))
return out
def _classify(links):
"""Split into compare/commit/branch/tag and detect repo homes."""
compares, commits, branches, tags, repos = [], [], [], [], []
for url, txt in links:
if "/compare/" in url:
compares.append((url, txt or "compare"))
elif "/commit/" in url:
sha = url.rsplit("/", 1)[-1]
commits.append((url, txt or (sha[:7] if sha else "commit")))
elif "/src/branch/" in url:
br = url.rsplit("/", 1)[-1]
branches.append((url, txt or br))
elif "/src/tag/" in url:
tg = url.rsplit("/", 1)[-1]
tags.append((url, txt or tg))
# Repo home: https://host/org/repo
m = re.match(r"^https?://[^/]+/[^/]+/[^/]+/?$", url)
if m:
home = m.group(0).rstrip("/")
repos.append((home, "/".join(home.split("/")[-2:])))
return compares, commits, branches, tags, repos
def _extract_commit_message(summary_html: str, content_html: str) -> str:
"""
Pull a compact one-line commit message from summary/content.
- Convert to text with newlines preserved
- Drop a leading line that is just a SHA
- Return first meaningful non-empty line (trimmed to ~200 chars)
"""
soup = BeautifulSoup((summary_html or content_html) or "", "html.parser")
txt = soup.get_text("\n", strip=True)
if not txt:
return ""
lines = [ln.strip() for ln in txt.splitlines()]
if not lines:
return ""
sha_like = re.compile(r"^[0-9a-f]{7,40}$", re.I)
if lines and sha_like.match(lines[0]):
lines = lines[1:]
for ln in lines:
if not ln:
continue
# Skip pure mailto lines
if "mailto:" in ln.lower():
continue
# Compact whitespace and trim length
msg = re.sub(r"\s+", " ", ln).strip()
if msg:
return (msg[:200] + "…") if len(msg) > 200 else msg
return ""
# ---- Gitea API helpers (for tag-only upgrade) ---------------------------------
def _gitea_api_get_tags(owner: str, repo: str, timeout=10):
"""
Return list of tags (dicts) from Gitea:
[{"name": "v1.2.3", "commit": {"sha": "...", "url": "..."}}, ...]
On error, return [].
"""
url = f"{GITEA_API_BASE}/repos/{owner}/{repo}/tags"
try:
r = requests.get(url, timeout=timeout)
r.raise_for_status()
data = r.json()
if isinstance(data, list):
return data
except Exception as e:
log.warning("Gitea API tags fetch failed for %s/%s: %s", owner, repo, e)
return []
def _resolve_tag_to_commit(owner: str, repo: str, tag_name: str, cache: dict):
"""
Map tag_name -> commit SHA via cache/api:
cache['owner/repo'][tag_name] = sha
"""
repo_key = f"{owner}/{repo}"
cache.setdefault(repo_key, {})
if tag_name in cache[repo_key]:
return cache[repo_key][tag_name]
tags = _gitea_api_get_tags(owner, repo)
for t in tags:
if t.get("name") == tag_name:
sha = (t.get("commit") or {}).get("sha")
if sha:
cache[repo_key][tag_name] = sha
return sha
return None
def _find_previous_tag(owner: str, repo: str, tag_name: str):
"""
Best-effort previous tag:
- Use /tags list order (typically newest-first). If current at i, pick i+1 if exists.
- Else first different tag.
"""
tags = _gitea_api_get_tags(owner, repo)
names = [t.get("name") for t in tags if t.get("name")]
if not names:
return None
try:
i = names.index(tag_name)
if i + 1 < len(names):
return names[i + 1]
except ValueError:
pass
for n in names:
if n != tag_name:
return n
return None
# ---- Fetch + parse feed -------------------------------------------------------
try:
log.info("Fetching Atom from %s", feed_url)
resp = requests.get(feed_url, timeout=15)
resp.raise_for_status()
feed = feedparser.parse(resp.content)
except Exception as e:
log.warning("Fetch/parse failed: %s", e)
return
sent_key = f"gitea_{org_name}"
# Cache for tag->sha resolution across entries
tag_sha_cache = domain_cache.setdefault("gitea_tag_sha_cache", {})
for entry in feed.entries:
try:
entry_id = getattr(entry, "id", None)
if not entry_id:
continue
if has_been_sent(sent_key, entry_id):
continue
title_html = getattr(entry, "title", "")
summary_html = getattr(entry, "summary", "")
content_html = ""
if getattr(entry, "content", None):
content_html = entry.content[0].get("value", "")
updated = getattr(entry, "updated", "")
author_raw = getattr(entry, "author", "")
# Collect + classify URLs
all_links = _collect_links(entry)
compares, commits, branches, tags, repos = _classify(all_links)
log.debug(
"Entry id=%s title='%s' links: compares=%s commits=%s branches=%s tags=%s repos=%s",
entry_id,
text_only(title_html)[:120],
[u for u, _ in compares],
[u for u, _ in commits],
[u for u, _ in branches],
[u for u, _ in tags],
[u for u, _ in repos],
)
# Repo: explicit home if present, else derive from any URL
if repos:
repo_link, _repo_text = repos[0]
else:
repo_link = None
for url, _ in (compares + commits + branches + tags):
m = re.match(r"^(https?://[^/]+/[^/]+/[^/]+)", url)
if m:
repo_link = m.group(1)
break
# Diff: compare > commit > branch > tag
diff_link = None
diff_kind = "-"
if compares:
diff_link, _ = compares[0]
diff_kind = "compare"
elif commits:
diff_link, _ = commits[0]
diff_kind = "commit"
elif branches:
diff_link, _ = branches[0]
diff_kind = "branch"
elif tags:
diff_link, _ = tags[0]
diff_kind = "tag"
#Make sure repo has something
repo = ''
# Tag-only upgrade: try to turn tag page into commit or compare
if diff_kind == "tag" and diff_link:
try:
m = re.match(r"^https?://[^/]+/([^/]+)/([^/]+)/src/tag/([^/]+)$", diff_link)
if m:
owner, repo, tag_name = m.group(1), m.group(2), m.group(3)
sha = _resolve_tag_to_commit(owner, repo, tag_name, tag_sha_cache)
if sha:
prev_tag = _find_previous_tag(owner, repo, tag_name)
if prev_tag:
diff_link = f"{base_host}/{owner}/{repo}/compare/{prev_tag}...{tag_name}"
diff_kind = "compare"
else:
diff_link = f"{base_host}/{owner}/{repo}/commit/{sha}"
diff_kind = "commit"
log.debug("Upgraded tag-only diff to %s: %s", diff_kind, diff_link)
else:
log.debug("Could not resolve tag '%s' for %s/%s; keeping tag URL", tag_name, owner, repo)
except Exception:
log.exception("Error upgrading tag-only entry to a commit/compare diff")
#Make sure repo is set in some way:
if not repo:
repo = ''
# Clean author & title; extract commit message
author = clean_author(author_raw)
title_text = text_only(title_html)
commit_msg = _extract_commit_message(summary_html, content_html)
# ---- Build fields -----------------------------------------------------
fields = []
fields.append({"title": "*Action*", "value": f"{org_name}: {title_text}", "short": False})
if commit_msg:
fields.append({"title": "*Message*", "value": commit_msg, "short": False})
if updated:
fields.append({"title": "*Date*", "value": updated, "short": True})
if author:
fields.append({"title": "*Author*", "value": author, "short": True})
link_parts = []
if diff_link:
link_parts.append(f"[Diff]({diff_link})")
if repo_link and (repo_link != diff_link):
link_parts.append(f"[Repo]({repo_link})")
if link_parts:
fields.append({"title": "*Links*", "value": " | ".join(link_parts), "short": False})
# Title click-through prefers diff, else repo
title_link = diff_link or repo_link
attachment = {
"title": f"{org_name}/{repo}",
"title_link": title_link,
"text": "", # we use fields for layout
"fields": fields, # <<<<<<<<<<<<<<<<<<<<<<<<
"color": "#764FA5",
"collapsed": False,
}
# Send & mark
send_to_rocket_chat(f"Gitea-{org_name}", "", [attachment], chat_url)
mark_as_sent(sent_key, entry_id)
log.info(
"Sent: diffKind=%s diff=%s repo=%s title='%s'",
diff_kind, (diff_link or "-"), (repo_link or "-"),
title_text[:160],
)
except Exception:
log.exception("Error processing Gitea entry for org '%s'", org_name)
# --- Self-test: runs against embedded sample Atom and prints attachments (no sending) ---
SMESERVER_SAMPLE_ATOM = r"""
Feed of "smeserver"
https://src.koozali.org/smeserver
2025-10-29T12:41:45+01:00
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-124_el8_sme">11_0_0-124_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-24T13:41:59+02:00
84593: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-124_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-24T13:41:58+02:00
84586: https://src.koozali.org/smeserver/smeserver-manager/commit/8e270ef3fd973ef27d0087fcfa02f614c1e13676
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/8e270ef3fd973ef27d0087fcfa02f614c1e13676" rel="nofollow">8e270ef3fd973ef27d0087fcfa02f614c1e13676</a>
* Fri Oct 24 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-124.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/8e270ef3fd973ef27d0087fcfa02f614c1e13676">8e270ef3fd973ef27d0087fcfa02f614c1e13676</a>
* Fri Oct 24 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-124.sme
brianr
brianr@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-123_el8_sme">11_0_0-123_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-24T11:50:58+02:00
84579: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-123_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-24T11:50:56+02:00
84572: https://src.koozali.org/smeserver/smeserver-manager/commit/a04097bf5a2fefe78aa3d324bf4d9d9ce90f31ad
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/a04097bf5a2fefe78aa3d324bf4d9d9ce90f31ad" rel="nofollow">a04097bf5a2fefe78aa3d324bf4d9d9ce90f31ad</a>
* Fri Oct 24 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-123.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/a04097bf5a2fefe78aa3d324bf4d9d9ce90f31ad">a04097bf5a2fefe78aa3d324bf4d9d9ce90f31ad</a>
* Fri Oct 24 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-123.sme
brianr
brianr@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery/src/tag/11_0_0-11_el8_sme">11_0_0-11_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery">smeserver/smeserver-manager-jsquery</a>
2025-10-24T11:41:20+02:00
84565: https://src.koozali.org/smeserver/smeserver-manager-jsquery/src/tag/11_0_0-11_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery">smeserver/smeserver-manager-jsquery</a>
2025-10-24T11:41:20+02:00
84558: https://src.koozali.org/smeserver/smeserver-manager-jsquery/commit/e026aa17369953a482be3ecb1338f39cada6d03a
<a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery/commit/e026aa17369953a482be3ecb1338f39cada6d03a" rel="nofollow">e026aa17369953a482be3ecb1338f39cada6d03a</a>
* Thu Oct 23 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-11.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager-jsquery/commit/e026aa17369953a482be3ecb1338f39cada6d03a">e026aa17369953a482be3ecb1338f39cada6d03a</a>
* Thu Oct 23 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-11.sme
brianr
brianr@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/common/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/common">smeserver/common</a>
2025-10-23T18:20:23+02:00
83886: /smeserver/common/compare/507cc753ec53612b622047344a527314158808a3...8d5535b58b89c2ab8757842325a687c73022f317
<a href="https://src.koozali.org/smeserver/common/commit/8d5535b58b89c2ab8757842325a687c73022f317" rel="nofollow">8d5535b58b89c2ab8757842325a687c73022f317</a>
filter ARCHIVEFILE to only get archives ending with z
<a href="https://src.koozali.org/smeserver/common/commit/63f19e99973fe9d254394b3fbd08a57f14c8d7f4" rel="nofollow">63f19e99973fe9d254394b3fbd08a57f14c8d7f4</a>
add info
<a href="https://src.koozali.org/smeserver/common/commit/8d5535b58b89c2ab8757842325a687c73022f317">8d5535b58b89c2ab8757842325a687c73022f317</a>
filter ARCHIVEFILE to only get archives ending with z
<a href="https://src.koozali.org/smeserver/common/commit/63f19e99973fe9d254394b3fbd08a57f14c8d7f4">63f19e99973fe9d254394b3fbd08a57f14c8d7f4</a>
add info
jpp
jpp@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-22T10:44:34+02:00
83707: https://src.koozali.org/smeserver/smeserver-manager/commit/9437dd792a2117ba41c433881fef4a915acfcc2c
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/9437dd792a2117ba41c433881fef4a915acfcc2c" rel="nofollow">9437dd792a2117ba41c433881fef4a915acfcc2c</a>
html comment closure leaks onto panel
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/9437dd792a2117ba41c433881fef4a915acfcc2c">9437dd792a2117ba41c433881fef4a915acfcc2c</a>
html comment closure leaks onto panel
brianr
brianr@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-122_el8_sme">11_0_0-122_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-21T20:28:14+02:00
83700: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-122_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-10-21T20:28:12+02:00
83693: https://src.koozali.org/smeserver/smeserver-manager/commit/f03d82ebf746e38a7678f1ee82ab754bb20da9eb
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/f03d82ebf746e38a7678f1ee82ab754bb20da9eb" rel="nofollow">f03d82ebf746e38a7678f1ee82ab754bb20da9eb</a>
* Tue Oct 21 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-122.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/f03d82ebf746e38a7678f1ee82ab754bb20da9eb">f03d82ebf746e38a7678f1ee82ab754bb20da9eb</a>
* Tue Oct 21 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-122.sme
brianr
brianr@noreply.koozali.org
jcrisp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-certificates/src/tag/11_0-11_el8_sme">11_0-11_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-certificates">smeserver/smeserver-certificates</a>
2025-10-15T17:10:17+02:00
83634: https://src.koozali.org/smeserver/smeserver-certificates/src/tag/11_0-11_el8_sme
jcrisp
jcrisp@noreply.koozali.org
jcrisp pushed to <a href="https://src.koozali.org/smeserver/smeserver-certificates/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-certificates">smeserver/smeserver-certificates</a>
2025-10-15T17:05:40+02:00
83627: https://src.koozali.org/smeserver/smeserver-certificates/commit/73ef48ef5f0bfb5e9e990b4f6b0948f662a3f9b4
<a href="https://src.koozali.org/smeserver/smeserver-certificates/commit/73ef48ef5f0bfb5e9e990b4f6b0948f662a3f9b4" rel="nofollow">73ef48ef5f0bfb5e9e990b4f6b0948f662a3f9b4</a>
Fix typo
<a href="https://src.koozali.org/smeserver/smeserver-certificates/commit/73ef48ef5f0bfb5e9e990b4f6b0948f662a3f9b4">73ef48ef5f0bfb5e9e990b4f6b0948f662a3f9b4</a>
Fix typo
jcrisp
jcrisp@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-31_el8_sme">11_0_0-31_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-06T19:34:20+02:00
81653: https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-31_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-update/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-06T19:34:15+02:00
81646: https://src.koozali.org/smeserver/smeserver-update/commit/27485c3952d2aa55de468a70cc5f69939580bb87
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/27485c3952d2aa55de468a70cc5f69939580bb87" rel="nofollow">27485c3952d2aa55de468a70cc5f69939580bb87</a>
* Mon Oct 06 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-31.sme
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/27485c3952d2aa55de468a70cc5f69939580bb87">27485c3952d2aa55de468a70cc5f69939580bb87</a>
* Mon Oct 06 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-31.sme
brianr
brianr@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/smeserver-ntp/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-ntp">smeserver/smeserver-ntp</a>
2025-10-03T21:50:22+02:00
81202: https://src.koozali.org/smeserver/smeserver-ntp/commit/8879d29ca50e750e0890f11d4c0d405954ae7c2e
<a href="https://src.koozali.org/smeserver/smeserver-ntp/commit/8879d29ca50e750e0890f11d4c0d405954ae7c2e" rel="nofollow">8879d29ca50e750e0890f11d4c0d405954ae7c2e</a>
typo in changelog
<a href="https://src.koozali.org/smeserver/smeserver-ntp/commit/8879d29ca50e750e0890f11d4c0d405954ae7c2e">8879d29ca50e750e0890f11d4c0d405954ae7c2e</a>
typo in changelog
jpp
jpp@noreply.koozali.org
jpp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-ntp/src/tag/11_0_0-8_el8_sme">11_0_0-8_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-ntp">smeserver/smeserver-ntp</a>
2025-10-03T21:48:25+02:00
81195: https://src.koozali.org/smeserver/smeserver-ntp/src/tag/11_0_0-8_el8_sme
jpp
jpp@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/smeserver-ntp/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-ntp">smeserver/smeserver-ntp</a>
2025-10-03T21:47:56+02:00
81188: https://src.koozali.org/smeserver/smeserver-ntp/commit/6d07479bf668b0f0be4ae705841b3208fa9ee233
<a href="https://src.koozali.org/smeserver/smeserver-ntp/commit/6d07479bf668b0f0be4ae705841b3208fa9ee233" rel="nofollow">6d07479bf668b0f0be4ae705841b3208fa9ee233</a>
* Fri Oct 03 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="" rel="nofollow">jpp@koozali.org</a>> 11.0.0-8.sme
<a href="https://src.koozali.org/smeserver/smeserver-ntp/commit/6d07479bf668b0f0be4ae705841b3208fa9ee233">6d07479bf668b0f0be4ae705841b3208fa9ee233</a>
* Fri Oct 03 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="">jpp@koozali.org</a>> 11.0.0-8.sme
jpp
jpp@noreply.koozali.org
jpp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-30_el8_sme">11_0_0-30_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-03T15:17:43+02:00
81125: https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-30_el8_sme
jpp
jpp@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/smeserver-update/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-03T15:17:15+02:00
81118: https://src.koozali.org/smeserver/smeserver-update/commit/8e29af1670c27e91ff9846f7c677b8e1e943e94a
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/8e29af1670c27e91ff9846f7c677b8e1e943e94a" rel="nofollow">8e29af1670c27e91ff9846f7c677b8e1e943e94a</a>
* Thu Oct 02 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="" rel="nofollow">jpp@koozali.org</a>> 11.0.0-30.sme
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/8e29af1670c27e91ff9846f7c677b8e1e943e94a">8e29af1670c27e91ff9846f7c677b8e1e943e94a</a>
* Thu Oct 02 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="">jpp@koozali.org</a>> 11.0.0-30.sme
jpp
jpp@noreply.koozali.org
jpp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-29_el8_sme">11_0_0-29_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-02T15:51:27+02:00
81083: https://src.koozali.org/smeserver/smeserver-update/src/tag/11_0_0-29_el8_sme
jpp
jpp@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/smeserver-update/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-update">smeserver/smeserver-update</a>
2025-10-02T15:49:58+02:00
81076: https://src.koozali.org/smeserver/smeserver-update/commit/37f6399569c303a9a64de84bd39b836b1de78598
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/37f6399569c303a9a64de84bd39b836b1de78598" rel="nofollow">37f6399569c303a9a64de84bd39b836b1de78598</a>
* Thu Oct 02 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="" rel="nofollow">jpp@koozali.org</a>> 11.0.0-29.sme
<a href="https://src.koozali.org/smeserver/smeserver-update/commit/37f6399569c303a9a64de84bd39b836b1de78598">37f6399569c303a9a64de84bd39b836b1de78598</a>
* Thu Oct 02 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="">jpp@koozali.org</a>> 11.0.0-29.sme
jpp
jpp@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-121_el8_sme">11_0_0-121_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-27T13:30:07+02:00
80913: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-121_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-27T13:30:05+02:00
80906: https://src.koozali.org/smeserver/smeserver-manager/commit/de2f78a0892214b8c1fdd2de7b2eed177d398aac
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/de2f78a0892214b8c1fdd2de7b2eed177d398aac" rel="nofollow">de2f78a0892214b8c1fdd2de7b2eed177d398aac</a>
* Sat Sep 27 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-121.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/de2f78a0892214b8c1fdd2de7b2eed177d398aac">de2f78a0892214b8c1fdd2de7b2eed177d398aac</a>
* Sat Sep 27 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-121.sme
brianr
brianr@noreply.koozali.org
jpp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-proftpd/src/tag/11_0_0-12_el8_sme">11_0_0-12_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-proftpd">smeserver/smeserver-proftpd</a>
2025-09-26T18:49:52+02:00
80885: https://src.koozali.org/smeserver/smeserver-proftpd/src/tag/11_0_0-12_el8_sme
jpp
jpp@noreply.koozali.org
jpp pushed to <a href="https://src.koozali.org/smeserver/smeserver-proftpd/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-proftpd">smeserver/smeserver-proftpd</a>
2025-09-26T18:49:43+02:00
80878: https://src.koozali.org/smeserver/smeserver-proftpd/commit/ed837ffb760943d12a6462c35c7c9f48176b91d8
<a href="https://src.koozali.org/smeserver/smeserver-proftpd/commit/ed837ffb760943d12a6462c35c7c9f48176b91d8" rel="nofollow">ed837ffb760943d12a6462c35c7c9f48176b91d8</a>
* Fri Sep 26 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="" rel="nofollow">jpp@koozali.org</a>> 11.0.0-12.sme
<a href="https://src.koozali.org/smeserver/smeserver-proftpd/commit/ed837ffb760943d12a6462c35c7c9f48176b91d8">ed837ffb760943d12a6462c35c7c9f48176b91d8</a>
* Fri Sep 26 2025 Jean-Philippe Pialasse <<a href="mailto:jpp@koozali.org" data-markdown-generated-content="">jpp@koozali.org</a>> 11.0.0-12.sme
jpp
jpp@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-120_el8_sme">11_0_0-120_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-25T19:45:32+02:00
80752: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-120_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-25T19:45:30+02:00
80745: https://src.koozali.org/smeserver/smeserver-manager/commit/b838d9252a7d6fd52e9dc3e9c6fc91f4f8d726d7
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/b838d9252a7d6fd52e9dc3e9c6fc91f4f8d726d7" rel="nofollow">b838d9252a7d6fd52e9dc3e9c6fc91f4f8d726d7</a>
* Thu Sep 25 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-120.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/b838d9252a7d6fd52e9dc3e9c6fc91f4f8d726d7">b838d9252a7d6fd52e9dc3e9c6fc91f4f8d726d7</a>
* Thu Sep 25 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-120.sme
brianr
brianr@noreply.koozali.org
brianr pushed tag <a href="https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-119_el8_sme">11_0_0-119_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-25T16:42:44+02:00
80708: https://src.koozali.org/smeserver/smeserver-manager/src/tag/11_0_0-119_el8_sme
brianr
brianr@noreply.koozali.org
brianr pushed to <a href="https://src.koozali.org/smeserver/smeserver-manager/src/branch/master">master</a> at <a href="https://src.koozali.org/smeserver/smeserver-manager">smeserver/smeserver-manager</a>
2025-09-25T16:42:39+02:00
80701: https://src.koozali.org/smeserver/smeserver-manager/commit/9c9ab9186966b5ba893528a79531bc608c42ac7b
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/9c9ab9186966b5ba893528a79531bc608c42ac7b" rel="nofollow">9c9ab9186966b5ba893528a79531bc608c42ac7b</a>
* Thu Sep 25 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="" rel="nofollow">brianr@koozali.org</a>> 11.0.0-119.sme
<a href="https://src.koozali.org/smeserver/smeserver-manager/commit/9c9ab9186966b5ba893528a79531bc608c42ac7b">9c9ab9186966b5ba893528a79531bc608c42ac7b</a>
* Thu Sep 25 2025 Brian Read <<a href="mailto:brianr@koozali.org" data-markdown-generated-content="">brianr@koozali.org</a>> 11.0.0-119.sme
brianr
brianr@noreply.koozali.org
jpp pushed tag <a href="https://src.koozali.org/smeserver/smeserver-proftpd/src/tag/11_0_0-11_el8_sme">11_0_0-11_el8_sme</a> to <a href="https://src.koozali.org/smeserver/smeserver-proftpd">smeserver/smeserver-proftpd</a>
2025-09-25T16:32:37+02:00
80694: https://src.koozali.org/smeserver/smeserver-proftpd/src/tag/11_0_0-11_el8_sme
jpp
jpp@noreply.koozali.org
"""
def selftest_gitea_sample(org_name="smeserver", log_level="DEBUG"):
"""
Parse the embedded sample Atom and print the Rocket.Chat attachment JSON
that would be sent, one per entry. Does NOT call the webhook.
"""
logger = logging.getLogger("gitea_selftest")
logger.setLevel(getattr(logging, log_level.upper(), logging.DEBUG))
base_host = "https://src.koozali.org"
feed = feedparser.parse(SMESERVER_SAMPLE_ATOM)
out = []
for entry in feed.entries:
title_html = getattr(entry, "title", "")
summary_html = getattr(entry, "summary", "")
content_html = ""
if getattr(entry, "content", None):
content_html = entry.content[0].get("value", "")
updated = getattr(entry, "updated", "")
author_raw = getattr(entry, "author", "")
entry_link = getattr(entry, "link", "")
att = _gitea_build_attachment(
org_name=org_name,
title_html=title_html,
summary_html=summary_html,
content_html=content_html,
entry_link=entry_link,
updated=updated,
author_raw=author_raw,
base_host=base_host,
)
out.append(att)
# Pretty print to console & log
for i, att in enumerate(out, 1):
logger.debug("Attachment %d: %s", i, json.dumps(att, ensure_ascii=False))
print(f"\n--- Attachment {i} ---")
print(json.dumps(att, ensure_ascii=False, indent=2))
# ---------------------------
# Main loop
# ---------------------------
def main():
parser = argparse.ArgumentParser(description="Unified Feed -> Rocket.Chat notifier")
parser.add_argument("--sleep", type=int, default=1, help="Minutes to sleep between polls")
parser.add_argument("--one-off", action="store_true", help="Run once then exit")
parser.add_argument("--empty-db", action="store_true", help="Clear DB before start")
parser.add_argument("--feeds", type=str, default="", help="Comma-separated subset of feeds")
parser.add_argument("--log-level", type=str, default="INFO", help="Logging level")
parser.add_argument("--selftest-gitea", action="store_true",
help="Run built-in Gitea parser selftest using the embedded sample Atom (no network, no send)")
args = parser.parse_args()
log_path = init_logging(args.log_level)
logging.info("FeedToRocket starting (log: %s)", log_path)
logging.debug("Loaded feeds: %s", FEED_CONFIG)
if args.selftest_gitea:
logging.info("Running Gitea selftest against embedded sample Atom…")
selftest_gitea_sample(org_name="smeserver", log_level=args.log_level)
return
setup_database()
if args.empty_db:
clear_database()
selected = {f.strip() for f in args.feeds.split(",") if f.strip()} if args.feeds else set()
domain_cache = {}
for name, conf in FEED_CONFIG.items():
if not conf.get("enabled"): continue
if selected and name not in selected: continue
send_startup_message(name, conf.get("chat_url"))
processors = {"bugzilla": process_bugzilla, "koji": process_koji, "wiki": process_wiki, "gitea": process_gitea}
sleep_sec = max(1, args.sleep) * 60
while True:
start = time.time()
for name, conf in FEED_CONFIG.items():
if not conf.get("enabled"): continue
if selected and name not in selected: continue
try:
proc = processors[conf["type"]]
proc(conf, args.one_off, domain_cache)
except Exception as e:
logging.exception("Feed %s failed: %s", name, e)
if args.one_off:
logging.info("One-off mode complete; exiting.")
break
elapsed = time.time() - start
time.sleep(max(1, sleep_sec - elapsed))
if __name__ == "__main__":
main()