Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
7fe2a1de00
|
|||
8f5813b39c
|
|||
17357da659
|
38
.chglog/CHANGELOG.tpl.md
Executable file
38
.chglog/CHANGELOG.tpl.md
Executable file
@ -0,0 +1,38 @@
|
|||||||
|
{{ with index .Versions 0 }}
|
||||||
|
<a name="{{ .Tag.Name }}"></a>
|
||||||
|
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
|
||||||
|
|
||||||
|
{{ range .CommitGroups -}}
|
||||||
|
### {{ .Title }}
|
||||||
|
|
||||||
|
{{ range .Commits -}}
|
||||||
|
* {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
|
||||||
|
{{- if .RevertCommits -}}
|
||||||
|
### Reverts
|
||||||
|
|
||||||
|
{{ range .RevertCommits -}}
|
||||||
|
* {{ .Revert.Header }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
|
||||||
|
{{- if .MergeCommits -}}
|
||||||
|
### Pull Requests
|
||||||
|
|
||||||
|
{{ range .MergeCommits -}}
|
||||||
|
* {{ .Header }} ([{{ .Hash.Short }}]({{ $.Info.RepositoryURL }}/commit/{{ .Hash.Short }}))
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
|
||||||
|
{{- if .NoteGroups -}}
|
||||||
|
{{ range .NoteGroups -}}
|
||||||
|
### {{ .Title }}
|
||||||
|
|
||||||
|
{{ range .Notes }}
|
||||||
|
{{ .Body }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ end -}}
|
||||||
|
{{ end }}
|
37
.chglog/config.yml
Executable file
37
.chglog/config.yml
Executable file
@ -0,0 +1,37 @@
|
|||||||
|
style: github
|
||||||
|
template: CHANGELOG.tpl.md
|
||||||
|
|
||||||
|
info:
|
||||||
|
title: CHANGELOG
|
||||||
|
repository_url: https://dev.ksite.de/ralf.kirchner/BlueMastoFeed
|
||||||
|
|
||||||
|
options:
|
||||||
|
commits:
|
||||||
|
sort_by: "date" # Optional, default is OK too
|
||||||
|
exclude_merge_commits: false
|
||||||
|
|
||||||
|
commit_groups:
|
||||||
|
group_by: "Type"
|
||||||
|
title_maps:
|
||||||
|
feat: Features
|
||||||
|
fix: Bug Fixes
|
||||||
|
perf: Performance Improvements
|
||||||
|
refactor: Code Refactoring
|
||||||
|
docs: Documentation
|
||||||
|
chore: Maintenance
|
||||||
|
test: Tests
|
||||||
|
build: Build System
|
||||||
|
ci: Continuous Integration
|
||||||
|
style: Code Style
|
||||||
|
|
||||||
|
header:
|
||||||
|
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
|
||||||
|
pattern_maps:
|
||||||
|
- Type
|
||||||
|
- Scope
|
||||||
|
- Subject
|
||||||
|
|
||||||
|
notes:
|
||||||
|
keywords:
|
||||||
|
- BREAKING CHANGE
|
||||||
|
- DEPRECATED
|
100
.gitea/workflows/release.yml
Executable file
100
.gitea/workflows/release.yml
Executable file
@ -0,0 +1,100 @@
|
|||||||
|
name: Create Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*' # Nur bei Tags wie v1.0.0, v2.0.0
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Enable debug output
|
||||||
|
run: set -x
|
||||||
|
|
||||||
|
- name: Checkout full history including tags
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
fetch-tags: true
|
||||||
|
|
||||||
|
- name: Show environment variables for debugging
|
||||||
|
run: |
|
||||||
|
echo "GIT_REMOTE_URL=$(git config --get remote.origin.url)"
|
||||||
|
echo "GITHUB_REF=$GITHUB_REF"
|
||||||
|
|
||||||
|
- name: Extract OWNER and REPO from git remote URL
|
||||||
|
id: repo-info
|
||||||
|
run: |
|
||||||
|
REMOTE_URL=$(git config --get remote.origin.url)
|
||||||
|
OWNER=$(echo "$REMOTE_URL" | sed -E 's#.*/([^/]+)/([^/]+)(\.git)?#\1#')
|
||||||
|
REPO=$(echo "$REMOTE_URL" | sed -E 's#.*/([^/]+)/([^/]+)(\.git)?#\2#')
|
||||||
|
echo "OWNER=$OWNER" >> $GITHUB_ENV
|
||||||
|
echo "REPO=$REPO" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install git-chglog binary (no Go needed)
|
||||||
|
run: |
|
||||||
|
GIT_CHGLOG_VERSION="0.15.1"
|
||||||
|
curl -sSL "https://github.com/git-chglog/git-chglog/releases/download/v${GIT_CHGLOG_VERSION}/git-chglog_${GIT_CHGLOG_VERSION}_linux_amd64.tar.gz" -o git-chglog.tar.gz
|
||||||
|
tar -xzf git-chglog.tar.gz
|
||||||
|
chmod +x git-chglog
|
||||||
|
sudo mv git-chglog /usr/local/bin/
|
||||||
|
|
||||||
|
- name: Determine current and previous tag
|
||||||
|
id: tags
|
||||||
|
run: |
|
||||||
|
CURRENT_TAG="${GITHUB_REF##*/}"
|
||||||
|
PREVIOUS_TAG=$(git describe --tags --abbrev=0 "${CURRENT_TAG}^" 2>/dev/null || true)
|
||||||
|
|
||||||
|
echo "CURRENT_TAG=$CURRENT_TAG"
|
||||||
|
echo "PREVIOUS_TAG=$PREVIOUS_TAG"
|
||||||
|
|
||||||
|
echo "CURRENT_TAG=$CURRENT_TAG" >> $GITHUB_ENV
|
||||||
|
echo "PREVIOUS_TAG=$PREVIOUS_TAG" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Generate CHANGELOG.md
|
||||||
|
run: |
|
||||||
|
# Optional: kompletter Changelog (nicht für Release-Body)
|
||||||
|
git-chglog -o CHANGELOG.md
|
||||||
|
|
||||||
|
# Nur der relevante Abschnitt zwischen Tags
|
||||||
|
if [ -n "$PREVIOUS_TAG" ]; then
|
||||||
|
git-chglog "$PREVIOUS_TAG..$CURRENT_TAG" > RELEASE_BODY.md
|
||||||
|
else
|
||||||
|
git-chglog "$CURRENT_TAG" > RELEASE_BODY.md
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Release changelog content:"
|
||||||
|
cat RELEASE_BODY.md
|
||||||
|
|
||||||
|
- name: Replace issue references with Markdown links
|
||||||
|
env:
|
||||||
|
OWNER: ${{ env.OWNER }}
|
||||||
|
REPO: ${{ env.REPO }}
|
||||||
|
run: |
|
||||||
|
sed -i -E "s/([^\\[])#([0-9]+)/\1[#\2](https:\/\/dev.ksite.de\/${OWNER}\/${REPO}\/issues\/\2)/g" RELEASE_BODY.md
|
||||||
|
|
||||||
|
- name: Create Gitea Release via API
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ secrets.TOKEN }}
|
||||||
|
OWNER: ${{ env.OWNER }}
|
||||||
|
REPO: ${{ env.REPO }}
|
||||||
|
CURRENT_TAG: ${{ env.CURRENT_TAG }}
|
||||||
|
run: |
|
||||||
|
# Base64-encode und sicher escapen für JSON
|
||||||
|
BODY=$(base64 -w0 RELEASE_BODY.md)
|
||||||
|
DECODED_BODY=$(echo "$BODY" | base64 -d | jq -Rs .)
|
||||||
|
|
||||||
|
echo "Creating release for tag $CURRENT_TAG"
|
||||||
|
|
||||||
|
curl -s -X POST "https://dev.ksite.de/api/v1/repos/${OWNER}/${REPO}/releases" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-d @- <<EOF
|
||||||
|
{
|
||||||
|
"tag_name": "${CURRENT_TAG}",
|
||||||
|
"name": "${REPO} ${CURRENT_TAG}",
|
||||||
|
"body": ${DECODED_BODY}
|
||||||
|
}
|
||||||
|
EOF
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -2,3 +2,12 @@
|
|||||||
data/*
|
data/*
|
||||||
!data/.gitkeep
|
!data/.gitkeep
|
||||||
|
|
||||||
|
# Config & meta
|
||||||
|
CHANGELOG.md
|
||||||
|
ENVIRONMENT.md
|
||||||
|
|
||||||
|
# IDEs / Editor
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-slim
|
||||||
LABEL version="0.9.0"
|
LABEL version="0.9.6"
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y curl && apt-get clean && rm -rf /var/lib/apt/lists/*
|
RUN apt-get update && apt-get install -y curl && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@ -15,3 +15,5 @@ HEALTHCHECK --interval=1m --timeout=5s --start-period=10s --retries=3 \
|
|||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
124
bluemastofeed.py
124
bluemastofeed.py
@ -1,7 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import feedparser
|
import feedparser
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
import threading
|
import threading
|
||||||
@ -18,6 +17,7 @@ from dateutil import parser as date_parser
|
|||||||
from datetime import datetime, timezone, timedelta
|
from datetime import datetime, timezone, timedelta
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
FEED_URL = os.getenv("FEED_URL")
|
FEED_URL = os.getenv("FEED_URL")
|
||||||
SEEN_POSTS_FILE = "/data/seen_posts.txt"
|
SEEN_POSTS_FILE = "/data/seen_posts.txt"
|
||||||
MASTODON_BASE_URL = os.getenv("MASTODON_API_BASE_URL")
|
MASTODON_BASE_URL = os.getenv("MASTODON_API_BASE_URL")
|
||||||
@ -34,8 +34,8 @@ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
logger.addHandler(handler)
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
class HealthHandler(BaseHTTPRequestHandler):
|
class HealthHandler(BaseHTTPRequestHandler):
|
||||||
"""Handles HTTP GET requests for the health check endpoint."""
|
|
||||||
def do_GET(self):
|
def do_GET(self):
|
||||||
if self.path == "/health":
|
if self.path == "/health":
|
||||||
self.send_response(200)
|
self.send_response(200)
|
||||||
@ -46,23 +46,60 @@ class HealthHandler(BaseHTTPRequestHandler):
|
|||||||
self.end_headers()
|
self.end_headers()
|
||||||
|
|
||||||
def log_message(self, format, *args):
|
def log_message(self, format, *args):
|
||||||
"""Suppress default HTTP request logging."""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def start_health_server():
|
def start_health_server():
|
||||||
"""Starts the health check HTTP server in a background thread."""
|
|
||||||
server = HTTPServer(("0.0.0.0", 8000), HealthHandler)
|
server = HTTPServer(("0.0.0.0", 8000), HealthHandler)
|
||||||
thread = threading.Thread(target=server.serve_forever, daemon=True)
|
thread = threading.Thread(target=server.serve_forever, daemon=True)
|
||||||
thread.start()
|
thread.start()
|
||||||
logger.info("Healthcheck server is running on port 8000.")
|
logger.info(f"💡 Healthcheck server running on port 8000.")
|
||||||
|
|
||||||
|
|
||||||
def should_send_email(on_success: bool):
|
def should_send_email(on_success: bool):
|
||||||
"""Determines whether to send a status email based on mode and success."""
|
|
||||||
mode = os.getenv("EMAIL_MODE", "errors").lower()
|
mode = os.getenv("EMAIL_MODE", "errors").lower()
|
||||||
return (mode == "all") or (mode == "errors" and not on_success)
|
return (mode == "all") or (mode == "errors" and not on_success)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_email_html(status: str, title: str, link: str, error_message: str = None) -> str:
|
||||||
|
color = "#2e7d32" if status == "success" else "#d32f2f"
|
||||||
|
bg_color = "#f5f5f5" if status == "success" else "#fff3f3"
|
||||||
|
border_color = "#ccc" if status == "success" else "#e57373"
|
||||||
|
emoji = "✅" if status == "success" else "❌"
|
||||||
|
heading = "Post Published" if status == "success" else "Error Posting Entry"
|
||||||
|
meta = "This is an automated success notification." if status == "success" else "Please check logs or configuration."
|
||||||
|
|
||||||
|
error_html = f"""
|
||||||
|
<p><strong>Error:</strong></p>
|
||||||
|
<div class=\"error\">{error_message}</div>
|
||||||
|
""" if error_message else ""
|
||||||
|
|
||||||
|
return f"""
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<style>
|
||||||
|
body {{ font-family: 'Courier New', monospace; background-color: {bg_color}; color: #333; padding: 20px; }}
|
||||||
|
.container {{ background-color: #ffffff; border: 1px solid {border_color}; border-radius: 8px; padding: 20px; max-width: 600px; margin: auto; }}
|
||||||
|
h2 {{ color: {color}; }}
|
||||||
|
a {{ color: #1a73e8; text-decoration: none; }}
|
||||||
|
.error {{ font-family: monospace; background-color: #fce4ec; padding: 10px; border-radius: 4px; color: #b71c1c; }}
|
||||||
|
.meta {{ font-size: 14px; color: #777; }}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class=\"container\">
|
||||||
|
<h2>{emoji} {heading}</h2>
|
||||||
|
<p><strong>Title:</strong><br>{title}</p>
|
||||||
|
<p><strong>Link:</strong><br><a href=\"{link}\">{link}</a></p>
|
||||||
|
{error_html}
|
||||||
|
<p class=\"meta\">{meta}</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def send_status_email(subject, html_content):
|
def send_status_email(subject, html_content):
|
||||||
"""Sends a formatted HTML email with the given subject and content."""
|
|
||||||
try:
|
try:
|
||||||
smtp_host = os.getenv("SMTP_HOST")
|
smtp_host = os.getenv("SMTP_HOST")
|
||||||
smtp_port = int(os.getenv("SMTP_PORT", 587))
|
smtp_port = int(os.getenv("SMTP_PORT", 587))
|
||||||
@ -81,30 +118,35 @@ def send_status_email(subject, html_content):
|
|||||||
server.starttls()
|
server.starttls()
|
||||||
server.login(smtp_user, smtp_password)
|
server.login(smtp_user, smtp_password)
|
||||||
server.sendmail(email_from, email_to, msg.as_string())
|
server.sendmail(email_from, email_to, msg.as_string())
|
||||||
logger.info("Status email sent.")
|
|
||||||
|
logger.info(f"✅ Status email sent successfully.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error sending status email: {e}")
|
logger.error(f"❌ Error sending email: {e}")
|
||||||
|
|
||||||
|
|
||||||
def load_seen_ids():
|
def load_seen_ids():
|
||||||
"""Loads the set of already seen post IDs from file."""
|
|
||||||
os.makedirs(os.path.dirname(SEEN_POSTS_FILE), exist_ok=True)
|
os.makedirs(os.path.dirname(SEEN_POSTS_FILE), exist_ok=True)
|
||||||
if not os.path.exists(SEEN_POSTS_FILE):
|
if not os.path.exists(SEEN_POSTS_FILE):
|
||||||
open(SEEN_POSTS_FILE, "w").close()
|
open(SEEN_POSTS_FILE, "w").close()
|
||||||
with open(SEEN_POSTS_FILE, "r") as f:
|
with open(SEEN_POSTS_FILE, "r") as f:
|
||||||
return set(line.strip() for line in f)
|
return set(line.strip() for line in f)
|
||||||
|
|
||||||
|
|
||||||
def save_seen_id(post_id):
|
def save_seen_id(post_id):
|
||||||
"""Appends a new post ID to the seen posts file."""
|
|
||||||
with open(SEEN_POSTS_FILE, "a") as f:
|
with open(SEEN_POSTS_FILE, "a") as f:
|
||||||
f.write(post_id + "\n")
|
f.write(post_id + "\n")
|
||||||
|
|
||||||
def post_to_mastodon(message):
|
|
||||||
"""Posts a message to Mastodon."""
|
def post_to_mastodon(title, link, tags):
|
||||||
mastodon = Mastodon(access_token=MASTODON_TOKEN, api_base_url=MASTODON_BASE_URL)
|
mastodon = Mastodon(access_token=MASTODON_TOKEN, api_base_url=MASTODON_BASE_URL)
|
||||||
|
hashtags = " ".join(f"#{tag}" for tag in tags) if tags else ""
|
||||||
|
message = f"{title}\n\n{link}"
|
||||||
|
if hashtags:
|
||||||
|
message += f"\n\n{hashtags}"
|
||||||
mastodon.toot(message)
|
mastodon.toot(message)
|
||||||
|
|
||||||
|
|
||||||
def fetch_og_data(url):
|
def fetch_og_data(url):
|
||||||
"""Fetches Open Graph title and image URL from a web page."""
|
|
||||||
try:
|
try:
|
||||||
resp = requests.get(url, timeout=10)
|
resp = requests.get(url, timeout=10)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
@ -115,11 +157,11 @@ def fetch_og_data(url):
|
|||||||
image_url = og_image["content"] if og_image and og_image.has_attr("content") else None
|
image_url = og_image["content"] if og_image and og_image.has_attr("content") else None
|
||||||
return title, image_url
|
return title, image_url
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error loading OG data: {e}")
|
logger.error(f"❌ Error fetching OG data: {e}")
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
def post_to_bluesky(message, link):
|
def post_to_bluesky(message, link):
|
||||||
"""Posts a message and optional preview to Bluesky."""
|
|
||||||
client = Client()
|
client = Client()
|
||||||
client.login(BSKY_HANDLE, BSKY_PASSWORD)
|
client.login(BSKY_HANDLE, BSKY_PASSWORD)
|
||||||
|
|
||||||
@ -149,16 +191,16 @@ def post_to_bluesky(message, link):
|
|||||||
embed["external"]["thumb"] = blob.blob
|
embed["external"]["thumb"] = blob.blob
|
||||||
|
|
||||||
client.send_post(text=text, embed=embed)
|
client.send_post(text=text, embed=embed)
|
||||||
logger.info("Posted to Bluesky with OG preview.")
|
logger.info(f"✅ Posted to Bluesky with preview.")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error uploading OG preview: {e}")
|
logger.error(f"❌ Error uploading preview to Bluesky: {e}")
|
||||||
|
|
||||||
client.send_post(f"{text}\n{link}")
|
client.send_post(f"{text}\n{link}")
|
||||||
logger.info("Posted to Bluesky without preview.")
|
logger.info(f"💡 Posted to Bluesky without preview.")
|
||||||
|
|
||||||
|
|
||||||
def extract_post_date(entry):
|
def extract_post_date(entry):
|
||||||
"""Extracts the oldest available date from various RSS date fields."""
|
|
||||||
date_fields = [
|
date_fields = [
|
||||||
entry.get("published"),
|
entry.get("published"),
|
||||||
entry.get("updated"),
|
entry.get("updated"),
|
||||||
@ -176,12 +218,12 @@ def extract_post_date(entry):
|
|||||||
dt = dt.replace(tzinfo=timezone.utc)
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
dates.append(dt)
|
dates.append(dt)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"⚠️ Cannot parse date field: {d} ({e})")
|
logger.warning(f"⚠️ Could not parse date: {d} ({e})")
|
||||||
|
|
||||||
return min(dates) if dates else datetime.now(timezone.utc)
|
return min(dates) if dates else datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main function to process feed entries and post new items."""
|
|
||||||
seen_ids = load_seen_ids()
|
seen_ids = load_seen_ids()
|
||||||
feed = feedparser.parse(FEED_URL)
|
feed = feedparser.parse(FEED_URL)
|
||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
@ -196,48 +238,58 @@ def main():
|
|||||||
age = now - post_date
|
age = now - post_date
|
||||||
age_days = age.days
|
age_days = age.days
|
||||||
age_hours = age.seconds // 3600
|
age_hours = age.seconds // 3600
|
||||||
logger.info(f"Post '{entry.get('title', '').strip()}' is {age_days} days and {age_hours} hours old.")
|
#logger.info(f"Post '{entry.get('title', '').strip()}' is {age_days} days and {age_hours} hours old.")
|
||||||
|
|
||||||
if post_date < now - max_age:
|
if post_date < now - max_age:
|
||||||
logger.info(f"⏩ Skipping old post (older than {MAX_POST_AGE_DAYS} days): {post_id}")
|
logger.info(f"⏩ Skipping old post ({MAX_POST_AGE_DAYS}+ days): {post_id}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = entry.get("title", "").strip()
|
title = entry.get("title", "").strip()
|
||||||
link = entry.get("link", "").strip()
|
link = entry.get("link", "").strip()
|
||||||
message = link
|
|
||||||
|
|
||||||
logger.info(f"New post: {title}")
|
tags = []
|
||||||
|
if "tags" in entry:
|
||||||
|
tags = [tag["term"] for tag in entry.tags if "term" in tag]
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
hashtags = " ".join(f"#{tag}" for tag in tags)
|
||||||
|
message = f"{link} {hashtags}"
|
||||||
|
else:
|
||||||
|
message = link
|
||||||
|
|
||||||
|
logger.info(f"💡 New post found: {title}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if POST_TARGETS in ("mastodon", "both"):
|
if POST_TARGETS in ("mastodon", "both"):
|
||||||
post_to_mastodon(message)
|
post_to_mastodon(title, link, tags)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
if POST_TARGETS in ("bluesky", "both"):
|
if POST_TARGETS in ("bluesky", "both"):
|
||||||
post_to_bluesky(message, link)
|
post_to_bluesky(f"{title}\n{link}", link)
|
||||||
|
|
||||||
save_seen_id(post_id)
|
save_seen_id(post_id)
|
||||||
logger.info("✅ Successfully posted.")
|
logger.info(f"✅ Post successfully published.")
|
||||||
|
|
||||||
if should_send_email(on_success=True):
|
if should_send_email(on_success=True):
|
||||||
send_status_email(
|
send_status_email(
|
||||||
f"✅ Successfully posted: {title}",
|
f"✅ Post published: {title}",
|
||||||
f"<html><body><h2>Post successfully published</h2><p><b>Title:</b> {title}</p><p><b>Link:</b> <a href='{link}'>{link}</a></p></body></html>"
|
generate_email_html("success", title, link)
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error posting: {e}")
|
logger.error(f"❌ Posting failed: {e}")
|
||||||
if should_send_email(on_success=False):
|
if should_send_email(on_success=False):
|
||||||
send_status_email(
|
send_status_email(
|
||||||
f"❌ Error posting: {title}",
|
f"❌ Error posting: {title}",
|
||||||
f"<html><body><h2>Error posting</h2><p><b>Title:</b> {title}</p><p><b>Link:</b> <a href='{link}'>{link}</a></p><p><b>Error message:</b> {str(e)}</p></body></html>"
|
generate_email_html("error", title, link, str(e))
|
||||||
)
|
)
|
||||||
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
INTERVAL_MINUTES = int(os.getenv("INTERVAL_MINUTES", 30))
|
INTERVAL_MINUTES = int(os.getenv("INTERVAL_MINUTES", 30))
|
||||||
logger.info(f"Start feed check every {INTERVAL_MINUTES} minutes.")
|
logger.info(f"🔁 Starting feed check every {INTERVAL_MINUTES} minutes.")
|
||||||
|
|
||||||
start_health_server()
|
start_health_server()
|
||||||
|
|
||||||
@ -245,7 +297,7 @@ if __name__ == "__main__":
|
|||||||
try:
|
try:
|
||||||
main()
|
main()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in main execution: {e}")
|
logger.error(f"Unhandled error during execution: {e}")
|
||||||
logger.info(f"Wait {INTERVAL_MINUTES} minutes until next execution...")
|
logger.info(f"⏳ Waiting {INTERVAL_MINUTES} minutes until next run...")
|
||||||
time.sleep(INTERVAL_MINUTES * 60)
|
time.sleep(INTERVAL_MINUTES * 60)
|
||||||
|
|
||||||
|
@ -5,3 +5,4 @@ python-dotenv
|
|||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
python-dateutil
|
python-dateutil
|
||||||
requests
|
requests
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user