Compare commits
6 Commits
v0.9.3
...
3e1255ccdc
Author | SHA1 | Date | |
---|---|---|---|
3e1255ccdc
|
|||
3bb33ca379
|
|||
7fe2a1de00
|
|||
8f5813b39c
|
|||
17357da659
|
|||
e4fc11405a
|
38
.chglog/CHANGELOG.tpl.md
Executable file
38
.chglog/CHANGELOG.tpl.md
Executable file
@ -0,0 +1,38 @@
|
||||
{{ with index .Versions 0 }}
|
||||
<a name="{{ .Tag.Name }}"></a>
|
||||
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
|
||||
|
||||
{{ range .CommitGroups -}}
|
||||
### {{ .Title }}
|
||||
|
||||
{{ range .Commits -}}
|
||||
* {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .RevertCommits -}}
|
||||
### Reverts
|
||||
|
||||
{{ range .RevertCommits -}}
|
||||
* {{ .Revert.Header }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .MergeCommits -}}
|
||||
### Pull Requests
|
||||
|
||||
{{ range .MergeCommits -}}
|
||||
* {{ .Header }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .NoteGroups -}}
|
||||
{{ range .NoteGroups -}}
|
||||
### {{ .Title }}
|
||||
|
||||
{{ range .Notes }}
|
||||
{{ .Body }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
{{ end }}
|
37
.chglog/config.yml
Executable file
37
.chglog/config.yml
Executable file
@ -0,0 +1,37 @@
|
||||
style: github
|
||||
template: CHANGELOG.tpl.md
|
||||
|
||||
info:
|
||||
title: CHANGELOG
|
||||
repository_url: https://dev.ksite.de/ralf.kirchner/BlueMastoFeed
|
||||
|
||||
options:
|
||||
commits:
|
||||
sort_by: "date" # Optional, default is OK too
|
||||
exclude_merge_commits: false
|
||||
|
||||
commit_groups:
|
||||
group_by: "Type"
|
||||
title_maps:
|
||||
feat: Features
|
||||
fix: Bug Fixes
|
||||
perf: Performance Improvements
|
||||
refactor: Code Refactoring
|
||||
docs: Documentation
|
||||
chore: Maintenance
|
||||
test: Tests
|
||||
build: Build System
|
||||
ci: Continuous Integration
|
||||
style: Code Style
|
||||
|
||||
header:
|
||||
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
|
||||
pattern_maps:
|
||||
- Type
|
||||
- Scope
|
||||
- Subject
|
||||
|
||||
notes:
|
||||
keywords:
|
||||
- BREAKING CHANGE
|
||||
- DEPRECATED
|
100
.gitea/workflows/release.yml
Executable file
100
.gitea/workflows/release.yml
Executable file
@ -0,0 +1,100 @@
|
||||
name: Create Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*' # Nur bei Tags wie v1.0.0, v2.0.0
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Enable debug output
|
||||
run: set -x
|
||||
|
||||
- name: Checkout full history including tags
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Show environment variables for debugging
|
||||
run: |
|
||||
echo "GIT_REMOTE_URL=$(git config --get remote.origin.url)"
|
||||
echo "GITHUB_REF=$GITHUB_REF"
|
||||
|
||||
- name: Extract OWNER and REPO from git remote URL
|
||||
id: repo-info
|
||||
run: |
|
||||
REMOTE_URL=$(git config --get remote.origin.url)
|
||||
OWNER=$(echo "$REMOTE_URL" | sed -E 's#.*/([^/]+)/([^/]+)(\.git)?#\1#')
|
||||
REPO=$(echo "$REMOTE_URL" | sed -E 's#.*/([^/]+)/([^/]+)(\.git)?#\2#')
|
||||
echo "OWNER=$OWNER" >> $GITHUB_ENV
|
||||
echo "REPO=$REPO" >> $GITHUB_ENV
|
||||
|
||||
- name: Install git-chglog binary (no Go needed)
|
||||
run: |
|
||||
GIT_CHGLOG_VERSION="0.15.1"
|
||||
curl -sSL "https://github.com/git-chglog/git-chglog/releases/download/v${GIT_CHGLOG_VERSION}/git-chglog_${GIT_CHGLOG_VERSION}_linux_amd64.tar.gz" -o git-chglog.tar.gz
|
||||
tar -xzf git-chglog.tar.gz
|
||||
chmod +x git-chglog
|
||||
sudo mv git-chglog /usr/local/bin/
|
||||
|
||||
- name: Determine current and previous tag
|
||||
id: tags
|
||||
run: |
|
||||
CURRENT_TAG="${GITHUB_REF##*/}"
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 "${CURRENT_TAG}^" 2>/dev/null || true)
|
||||
|
||||
echo "CURRENT_TAG=$CURRENT_TAG"
|
||||
echo "PREVIOUS_TAG=$PREVIOUS_TAG"
|
||||
|
||||
echo "CURRENT_TAG=$CURRENT_TAG" >> $GITHUB_ENV
|
||||
echo "PREVIOUS_TAG=$PREVIOUS_TAG" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate CHANGELOG.md
|
||||
run: |
|
||||
# Optional: kompletter Changelog (nicht für Release-Body)
|
||||
git-chglog -o CHANGELOG.md
|
||||
|
||||
# Nur der relevante Abschnitt zwischen Tags
|
||||
if [ -n "$PREVIOUS_TAG" ]; then
|
||||
git-chglog "$PREVIOUS_TAG..$CURRENT_TAG" > RELEASE_BODY.md
|
||||
else
|
||||
git-chglog "$CURRENT_TAG" > RELEASE_BODY.md
|
||||
fi
|
||||
|
||||
echo "Release changelog content:"
|
||||
cat RELEASE_BODY.md
|
||||
|
||||
- name: Replace issue references with Markdown links
|
||||
env:
|
||||
OWNER: ${{ env.OWNER }}
|
||||
REPO: ${{ env.REPO }}
|
||||
run: |
|
||||
sed -i -E "s/([^\\[])#([0-9]+)/\1[#\2](https:\/\/dev.ksite.de\/${OWNER}\/${REPO}\/issues\/\2)/g" RELEASE_BODY.md
|
||||
|
||||
- name: Create Gitea Release via API
|
||||
env:
|
||||
TOKEN: ${{ secrets.TOKEN }}
|
||||
OWNER: ${{ env.OWNER }}
|
||||
REPO: ${{ env.REPO }}
|
||||
CURRENT_TAG: ${{ env.CURRENT_TAG }}
|
||||
run: |
|
||||
# Base64-encode und sicher escapen für JSON
|
||||
BODY=$(base64 -w0 RELEASE_BODY.md)
|
||||
DECODED_BODY=$(echo "$BODY" | base64 -d | jq -Rs .)
|
||||
|
||||
echo "Creating release for tag $CURRENT_TAG"
|
||||
|
||||
curl -s -X POST "https://dev.ksite.de/api/v1/repos/${OWNER}/${REPO}/releases" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
-d @- <<EOF
|
||||
{
|
||||
"tag_name": "${CURRENT_TAG}",
|
||||
"name": "${REPO} ${CURRENT_TAG}",
|
||||
"body": ${DECODED_BODY}
|
||||
}
|
||||
EOF
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -2,3 +2,12 @@
|
||||
data/*
|
||||
!data/.gitkeep
|
||||
|
||||
# Config & meta
|
||||
CHANGELOG.md
|
||||
ENVIRONMENT.md
|
||||
|
||||
# IDEs / Editor
|
||||
.vscode/
|
||||
.idea/
|
||||
.DS_Store
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
FROM python:3.11-slim
|
||||
LABEL version="0.9.0"
|
||||
LABEL version="0.9.6"
|
||||
|
||||
RUN apt-get update && apt-get install -y curl && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@ -15,3 +15,5 @@ HEALTHCHECK --interval=1m --timeout=5s --start-period=10s --retries=3 \
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
|
||||
|
||||
|
15
README.md
15
README.md
@ -2,7 +2,19 @@
|
||||
|
||||
**BlueMastoFeed** ist ein Docker-basiertes Tool, das regelmäßig einen RSS-Feed ausliest und neue Beiträge automatisch auf **Mastodon** und **Bluesky** veröffentlicht.
|
||||
|
||||
Es prüft, ob ein Beitrag bereits gepostet wurde, und speichert diese Information lokal in einer Datei (`/data/seen_posts.txt`). Optional werden OpenGraph-Daten (Titel, Vorschau-Bild etc.) der verlinkten Seiten extrahiert, um die Posts ansprechender zu gestalten.
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
- RSS-Feed regelmäßig auslesen
|
||||
- Postfilterung nach Alter (`MAX_POST_AGE_DAYS`)
|
||||
- Verhindert doppelte Posts mit Hilfe einer persistierten ID-Liste
|
||||
- Posten auf:
|
||||
- ✅ Mastodon
|
||||
- ✅ Bluesky
|
||||
- ✅ Beides (konfigurierbar über `.env`)
|
||||
- Optionaler E-Mail-Versand bei Erfolg oder Fehler
|
||||
- Healthcheck-Endpoint auf Port 8000
|
||||
|
||||
|
||||
|
||||
@ -87,6 +99,7 @@ Die folgenden Umgebungsvariablen steuern das Verhalten des Containers. Sie könn
|
||||
| ----------------------- | ------------------------------------------------------------ | -------------------------- | -------------- |
|
||||
| `FEED_URL` | URL zum RSS- oder Atom-Feed | `https://example.com/feed` | _erforderlich_ |
|
||||
| `MAX_POST_AGE_DAYS` | Maximales Alter eines Beitrags (in Tagen), der gepostet werden darf | `0` = nur heutige Beiträge | `0` |
|
||||
| `POST_TARGETS` | Zielplattform(en): `mastodon`, `bluesky`, `both` | `mastodon` = nur Mastodon | `both` |
|
||||
| `MASTODON_API_BASE_URL` | Basis-URL deiner Mastodon-Instanz | `https://mastodon.social` | _erforderlich_ |
|
||||
| `MASTODON_ACCESS_TOKEN` | Access Token für die Mastodon API | `abc123...` | _erforderlich_ |
|
||||
| `BSKY_IDENTIFIER` | Bluesky-Handle | `name.bsky.social` | _erforderlich_ |
|
||||
|
194
bluemastofeed.py
194
bluemastofeed.py
@ -1,11 +1,12 @@
|
||||
import os
|
||||
import time
|
||||
import feedparser
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
import threading
|
||||
import smtplib
|
||||
import re
|
||||
import unicodedata
|
||||
from bs4 import BeautifulSoup
|
||||
from io import BytesIO
|
||||
from mastodon import Mastodon
|
||||
@ -18,6 +19,7 @@ from dateutil import parser as date_parser
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
load_dotenv()
|
||||
|
||||
FEED_URL = os.getenv("FEED_URL")
|
||||
SEEN_POSTS_FILE = "/data/seen_posts.txt"
|
||||
MASTODON_BASE_URL = os.getenv("MASTODON_API_BASE_URL")
|
||||
@ -25,6 +27,7 @@ MASTODON_TOKEN = os.getenv("MASTODON_ACCESS_TOKEN")
|
||||
BSKY_HANDLE = os.getenv("BSKY_IDENTIFIER")
|
||||
BSKY_PASSWORD = os.getenv("BSKY_PASSWORD")
|
||||
MAX_POST_AGE_DAYS = int(os.getenv("MAX_POST_AGE_DAYS", 0))
|
||||
POST_TARGETS = os.getenv("POST_TARGETS", "both").lower()
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
@ -35,7 +38,6 @@ logger.addHandler(handler)
|
||||
|
||||
|
||||
class HealthHandler(BaseHTTPRequestHandler):
|
||||
"""Handles HTTP GET requests for the health check endpoint."""
|
||||
def do_GET(self):
|
||||
if self.path == "/health":
|
||||
self.send_response(200)
|
||||
@ -46,26 +48,99 @@ class HealthHandler(BaseHTTPRequestHandler):
|
||||
self.end_headers()
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Suppress default HTTP request logging."""
|
||||
pass
|
||||
|
||||
|
||||
def start_health_server():
|
||||
"""Starts the health check HTTP server in a background thread."""
|
||||
server = HTTPServer(("0.0.0.0", 8000), HealthHandler)
|
||||
thread = threading.Thread(target=server.serve_forever, daemon=True)
|
||||
thread.start()
|
||||
logger.info("Healthcheck server is running on port 8000.")
|
||||
logger.info(f"💡 Healthcheck server running on port 8000.")
|
||||
|
||||
|
||||
def should_send_email(on_success: bool):
|
||||
"""Determines whether to send a status email based on mode and success."""
|
||||
mode = os.getenv("EMAIL_MODE", "errors").lower()
|
||||
return (mode == "all") or (mode == "errors" and not on_success)
|
||||
|
||||
|
||||
def extract_facets_utf8(text: str):
|
||||
import re
|
||||
facets = []
|
||||
def get_byte_range(char_start, char_end):
|
||||
byte_start = len(text[:char_start].encode("utf-8"))
|
||||
byte_end = len(text[:char_end].encode("utf-8"))
|
||||
return byte_start, byte_end
|
||||
|
||||
# Hashtags
|
||||
for match in re.finditer(r"#(\w+)", text):
|
||||
tag = match.group(1)
|
||||
char_start, char_end = match.span()
|
||||
byte_start, byte_end = get_byte_range(char_start, char_end)
|
||||
|
||||
facets.append({
|
||||
"index": {"byteStart": byte_start, "byteEnd": byte_end},
|
||||
"features": [{
|
||||
"$type": "app.bsky.richtext.facet#tag",
|
||||
"tag": tag
|
||||
}]
|
||||
})
|
||||
|
||||
# Links
|
||||
for match in re.finditer(r"https?://[^\s]+", text):
|
||||
url = match.group(0)
|
||||
char_start, char_end = match.span()
|
||||
byte_start, byte_end = get_byte_range(char_start, char_end)
|
||||
|
||||
facets.append({
|
||||
"index": {"byteStart": byte_start, "byteEnd": byte_end},
|
||||
"features": [{
|
||||
"$type": "app.bsky.richtext.facet#link",
|
||||
"uri": url
|
||||
}]
|
||||
})
|
||||
|
||||
return facets
|
||||
|
||||
|
||||
def generate_email_html(status: str, title: str, link: str, error_message: str = None) -> str:
|
||||
color = "#2e7d32" if status == "success" else "#d32f2f"
|
||||
bg_color = "#f5f5f5" if status == "success" else "#fff3f3"
|
||||
border_color = "#ccc" if status == "success" else "#e57373"
|
||||
emoji = "✅" if status == "success" else "❌"
|
||||
heading = "Post Published" if status == "success" else "Error Posting Entry"
|
||||
meta = "This is an automated success notification." if status == "success" else "Please check logs or configuration."
|
||||
|
||||
error_html = f"""
|
||||
<p><strong>Error:</strong></p>
|
||||
<div class=\"error\">{error_message}</div>
|
||||
""" if error_message else ""
|
||||
|
||||
return f"""
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: 'Courier New', monospace; background-color: {bg_color}; color: #333; padding: 20px; }}
|
||||
.container {{ background-color: #ffffff; border: 1px solid {border_color}; border-radius: 8px; padding: 20px; max-width: 600px; margin: auto; }}
|
||||
h2 {{ color: {color}; }}
|
||||
a {{ color: #1a73e8; text-decoration: none; }}
|
||||
.error {{ font-family: monospace; background-color: #fce4ec; padding: 10px; border-radius: 4px; color: #b71c1c; }}
|
||||
.meta {{ font-size: 14px; color: #777; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class=\"container\">
|
||||
<h2>{emoji} {heading}</h2>
|
||||
<p><strong>Title:</strong><br>{title}</p>
|
||||
<p><strong>Link:</strong><br><a href=\"{link}\">{link}</a></p>
|
||||
{error_html}
|
||||
<p class=\"meta\">{meta}</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def send_status_email(subject, html_content):
|
||||
"""Sends a formatted HTML email with the given subject and content."""
|
||||
try:
|
||||
smtp_host = os.getenv("SMTP_HOST")
|
||||
smtp_port = int(os.getenv("SMTP_PORT", 587))
|
||||
@ -84,13 +159,13 @@ def send_status_email(subject, html_content):
|
||||
server.starttls()
|
||||
server.login(smtp_user, smtp_password)
|
||||
server.sendmail(email_from, email_to, msg.as_string())
|
||||
logger.info("Status email sent.")
|
||||
|
||||
logger.info(f"✅ Status email sent successfully.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending status email: {e}")
|
||||
logger.error(f"❌ Error sending email: {e}")
|
||||
|
||||
|
||||
def load_seen_ids():
|
||||
"""Loads the set of already seen post IDs from file."""
|
||||
os.makedirs(os.path.dirname(SEEN_POSTS_FILE), exist_ok=True)
|
||||
if not os.path.exists(SEEN_POSTS_FILE):
|
||||
open(SEEN_POSTS_FILE, "w").close()
|
||||
@ -99,19 +174,20 @@ def load_seen_ids():
|
||||
|
||||
|
||||
def save_seen_id(post_id):
|
||||
"""Appends a new post ID to the seen posts file."""
|
||||
with open(SEEN_POSTS_FILE, "a") as f:
|
||||
f.write(post_id + "\n")
|
||||
|
||||
|
||||
def post_to_mastodon(message):
|
||||
"""Posts a message to Mastodon."""
|
||||
def post_to_mastodon(title, link, tags):
|
||||
mastodon = Mastodon(access_token=MASTODON_TOKEN, api_base_url=MASTODON_BASE_URL)
|
||||
hashtags = " ".join(f"#{tag}" for tag in tags) if tags else ""
|
||||
message = f"{title}\n\n{link}"
|
||||
if hashtags:
|
||||
message += f"\n\n{hashtags}"
|
||||
mastodon.toot(message)
|
||||
|
||||
|
||||
def fetch_og_data(url):
|
||||
"""Fetches Open Graph title and image URL from a web page."""
|
||||
try:
|
||||
resp = requests.get(url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
@ -122,20 +198,25 @@ def fetch_og_data(url):
|
||||
image_url = og_image["content"] if og_image and og_image.has_attr("content") else None
|
||||
return title, image_url
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading OG data: {e}")
|
||||
logger.error(f"❌ Error fetching OG data: {e}")
|
||||
return None, None
|
||||
|
||||
|
||||
def post_to_bluesky(message, link):
|
||||
"""Posts a message and optional preview to Bluesky."""
|
||||
def post_to_bluesky(title, link, tags):
|
||||
client = Client()
|
||||
client.login(BSKY_HANDLE, BSKY_PASSWORD)
|
||||
|
||||
title, image_url = fetch_og_data(link)
|
||||
text = title or message
|
||||
hashtags = " ".join(f"#{tag}" for tag in tags) if tags else ""
|
||||
message = f"{title}\n\n{link}"
|
||||
if hashtags:
|
||||
message += f"\n\n{hashtags}"
|
||||
|
||||
if title and image_url:
|
||||
try:
|
||||
facets = extract_facets_utf8(message) # <-- NEU
|
||||
|
||||
# Versuche OG-Vorschau
|
||||
try:
|
||||
og_title, image_url = fetch_og_data(link)
|
||||
if og_title and image_url:
|
||||
embed = {
|
||||
"$type": "app.bsky.embed.external",
|
||||
"external": {
|
||||
@ -156,18 +237,19 @@ def post_to_bluesky(message, link):
|
||||
blob = client.upload_blob(BytesIO(img_resp.content))
|
||||
embed["external"]["thumb"] = blob.blob
|
||||
|
||||
client.send_post(text=text, embed=embed)
|
||||
logger.info("Posted with OG preview.")
|
||||
client.send_post(text=message, embed=embed, facets=facets) # <-- facets hier
|
||||
logger.info(f"✅ Posted to Bluesky with preview.")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(f"Error uploading OG preview: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error uploading preview to Bluesky: {e}")
|
||||
|
||||
# Fallback: Nur Text, aber mit Facets
|
||||
client.send_post(text=message, facets=facets) # <-- facets hier
|
||||
logger.info(f"💡 Posted to Bluesky without preview.")
|
||||
|
||||
client.send_post(f"{text}\n{link}")
|
||||
logger.info("Posted without preview.")
|
||||
|
||||
|
||||
def extract_post_date(entry):
|
||||
"""Extracts the oldest available date from various RSS date fields."""
|
||||
date_fields = [
|
||||
entry.get("published"),
|
||||
entry.get("updated"),
|
||||
@ -185,13 +267,12 @@ def extract_post_date(entry):
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
dates.append(dt)
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ Cannot parse date field: {d} ({e})")
|
||||
logger.warning(f"⚠️ Could not parse date: {d} ({e})")
|
||||
|
||||
return min(dates) if dates else datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to process feed entries and post new items."""
|
||||
seen_ids = load_seen_ids()
|
||||
feed = feedparser.parse(FEED_URL)
|
||||
now = datetime.now(timezone.utc)
|
||||
@ -206,37 +287,60 @@ def main():
|
||||
age = now - post_date
|
||||
age_days = age.days
|
||||
age_hours = age.seconds // 3600
|
||||
logger.info(f"Post '{entry.get('title', '').strip()}' is {age_days} days and {age_hours} hours old.")
|
||||
#logger.info(f"Post '{entry.get('title', '').strip()}' is {age_days} days and {age_hours} hours old.")
|
||||
|
||||
if post_date < now - max_age:
|
||||
logger.info(f"⏩ Skipping old post (older than {MAX_POST_AGE_DAYS} days): {post_id}")
|
||||
logger.info(f"⏩ Skipping old post ({MAX_POST_AGE_DAYS}+ days): {post_id}")
|
||||
continue
|
||||
|
||||
title = entry.get("title", "").strip()
|
||||
link = entry.get("link", "").strip()
|
||||
message = link
|
||||
|
||||
logger.info(f"New post: {title}")
|
||||
def sanitize_tag(tag):
|
||||
tag = tag.lower()
|
||||
tag = unicodedata.normalize("NFKD", tag).encode("ascii", "ignore").decode("ascii")
|
||||
tag = re.sub(r"\W+", "", tag)
|
||||
return tag
|
||||
|
||||
tags = []
|
||||
if "tags" in entry:
|
||||
raw_tags = [
|
||||
tag.get("term") if isinstance(tag, dict) else getattr(tag, "term", None)
|
||||
for tag in entry.tags
|
||||
]
|
||||
tags = [sanitize_tag(t) for t in raw_tags if t]
|
||||
|
||||
if tags:
|
||||
hashtags = " ".join(f"#{tag}" for tag in tags)
|
||||
message = f"{link} {hashtags}"
|
||||
else:
|
||||
message = link
|
||||
|
||||
logger.info(f"💡 New post found: {title}")
|
||||
|
||||
try:
|
||||
post_to_mastodon(message)
|
||||
time.sleep(2)
|
||||
post_to_bluesky(message, link)
|
||||
if POST_TARGETS in ("mastodon", "both"):
|
||||
post_to_mastodon(title, link, tags)
|
||||
time.sleep(2)
|
||||
|
||||
if POST_TARGETS in ("bluesky", "both"):
|
||||
post_to_bluesky(title, link, tags)
|
||||
|
||||
save_seen_id(post_id)
|
||||
logger.info("✅ Successfully posted.")
|
||||
logger.info(f"✅ Post successfully published.")
|
||||
|
||||
if should_send_email(on_success=True):
|
||||
send_status_email(
|
||||
f"✅ Successfully posted: {title}",
|
||||
f"<html><body><h2>Post successfully published</h2><p><b>Title:</b> {title}</p><p><b>Link:</b> <a href='{link}'>{link}</a></p></body></html>"
|
||||
f"✅ Post published: {title}",
|
||||
generate_email_html("success", title, link)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error posting: {e}")
|
||||
logger.error(f"❌ Posting failed: {e}")
|
||||
if should_send_email(on_success=False):
|
||||
send_status_email(
|
||||
f"❌ Error posting: {title}",
|
||||
f"<html><body><h2>Error posting</h2><p><b>Title:</b> {title}</p><p><b>Link:</b> <a href='{link}'>{link}</a></p><p><b>Error message:</b> {str(e)}</p></body></html>"
|
||||
generate_email_html("error", title, link, str(e))
|
||||
)
|
||||
|
||||
time.sleep(5)
|
||||
@ -244,7 +348,7 @@ def main():
|
||||
|
||||
if __name__ == "__main__":
|
||||
INTERVAL_MINUTES = int(os.getenv("INTERVAL_MINUTES", 30))
|
||||
logger.info(f"Start feed check every {INTERVAL_MINUTES} minutes.")
|
||||
logger.info(f"🔁 Starting feed check every {INTERVAL_MINUTES} minutes.")
|
||||
|
||||
start_health_server()
|
||||
|
||||
@ -252,7 +356,7 @@ if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in main execution: {e}")
|
||||
logger.info(f"Wait {INTERVAL_MINUTES} minutes until next execution...")
|
||||
logger.error(f"Unhandled error during execution: {e}")
|
||||
logger.info(f"⏳ Waiting {INTERVAL_MINUTES} minutes until next run...")
|
||||
time.sleep(INTERVAL_MINUTES * 60)
|
||||
|
||||
|
3
env
3
env
@ -9,6 +9,9 @@ MASTODON_ACCESS_TOKEN=your_mastodon_access_token
|
||||
BSKY_IDENTIFIER=your_handle.bsky.social
|
||||
BSKY_PASSWORD=your_bluesky_password
|
||||
|
||||
# mögliche Werte: mastodon, bluesky, both
|
||||
POST_TARGETS=both
|
||||
|
||||
# Intervall in Minuten für Feedprüfung
|
||||
INTERVAL_MINUTES=30
|
||||
|
||||
|
@ -5,3 +5,4 @@ python-dotenv
|
||||
beautifulsoup4
|
||||
python-dateutil
|
||||
requests
|
||||
|
||||
|
Reference in New Issue
Block a user