|
|
|
@ -33,24 +33,34 @@ POST_TARGETS = os.getenv("POST_TARGETS", "both").lower()
|
|
|
|
|
|
|
|
|
|
# Logger setup
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
|
log_level = os.getenv("LOG_LEVEL", "INFO").upper() # Enable DEBUG level via env variable
|
|
|
|
|
logger.setLevel(getattr(logging, log_level, logging.INFO))
|
|
|
|
|
handler = logging.StreamHandler()
|
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
|
logger.addHandler(handler)
|
|
|
|
|
|
|
|
|
|
# Healthcheck server
|
|
|
|
|
# Configuration overview (INFO level)
|
|
|
|
|
logger.info(f"📡 Feed URL: {FEED_URL}")
|
|
|
|
|
logger.info(f"📤 Posting targets: {POST_TARGETS}")
|
|
|
|
|
logger.info(f"🕒 Max post age: {MAX_POST_AGE_DAYS} days")
|
|
|
|
|
logger.info(f"📨 Email mode: {os.getenv('EMAIL_MODE', 'errors')}")
|
|
|
|
|
logger.debug(f"🛠 Full environment variables: {dict(os.environ)}") # DEBUG: All environment variables
|
|
|
|
|
|
|
|
|
|
# Healthcheck server handler
|
|
|
|
|
class HealthHandler(BaseHTTPRequestHandler):
|
|
|
|
|
def do_GET(self):
|
|
|
|
|
if self.path == "/health":
|
|
|
|
|
self.send_response(200)
|
|
|
|
|
self.end_headers()
|
|
|
|
|
self.wfile.write(b"OK")
|
|
|
|
|
logger.debug("🏥 Healthcheck requested and responded OK.") # DEBUG Healthcheck log
|
|
|
|
|
else:
|
|
|
|
|
self.send_response(404)
|
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
|
|
def log_message(self, format, *args):
|
|
|
|
|
# Suppress default HTTP request logging
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def start_health_server():
|
|
|
|
@ -59,7 +69,7 @@ def start_health_server():
|
|
|
|
|
thread.start()
|
|
|
|
|
logger.info(f"✨ Healthcheck server running on port 8000.")
|
|
|
|
|
|
|
|
|
|
# Email helper
|
|
|
|
|
# Email helper functions
|
|
|
|
|
|
|
|
|
|
def should_send_email(on_success: bool):
|
|
|
|
|
mode = os.getenv("EMAIL_MODE", "errors").lower()
|
|
|
|
@ -74,7 +84,7 @@ def generate_email_html(status: str, title: str, link: str, error_message: str =
|
|
|
|
|
meta = "This is an automated success notification." if status == "success" else "Please check logs or configuration."
|
|
|
|
|
|
|
|
|
|
error_html = f"""
|
|
|
|
|
<p><strong>Error:</strong></p>
|
|
|
|
|
<p><strong>Error Details:</strong></p>
|
|
|
|
|
<div class=\"error\">{error_message}</div>
|
|
|
|
|
""" if error_message else ""
|
|
|
|
|
|
|
|
|
@ -111,6 +121,8 @@ def send_status_email(subject, html_content):
|
|
|
|
|
email_from = os.getenv("EMAIL_FROM")
|
|
|
|
|
email_to = os.getenv("EMAIL_TO")
|
|
|
|
|
|
|
|
|
|
logger.debug(f"📧 Preparing to send email to {email_to} with subject: {subject}") # DEBUG
|
|
|
|
|
|
|
|
|
|
msg = MIMEMultipart("alternative")
|
|
|
|
|
msg["Subject"] = subject
|
|
|
|
|
msg["From"] = email_from
|
|
|
|
@ -123,8 +135,8 @@ def send_status_email(subject, html_content):
|
|
|
|
|
server.sendmail(email_from, email_to, msg.as_string())
|
|
|
|
|
|
|
|
|
|
logger.info(f"✅ Status email sent successfully.")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error sending email: {e}")
|
|
|
|
|
except Exception:
|
|
|
|
|
logger.exception(f"❌ Error sending email:") # Full stack trace on error
|
|
|
|
|
|
|
|
|
|
# Utility functions
|
|
|
|
|
|
|
|
|
@ -136,6 +148,7 @@ def extract_facets_utf8(text: str):
|
|
|
|
|
byte_end = len(text[:char_end].encode("utf-8"))
|
|
|
|
|
return byte_start, byte_end
|
|
|
|
|
|
|
|
|
|
# Extract hashtags
|
|
|
|
|
for match in re.finditer(r"#(\w+)", text):
|
|
|
|
|
tag = match.group(1)
|
|
|
|
|
byte_start, byte_end = get_byte_range(*match.span())
|
|
|
|
@ -144,6 +157,7 @@ def extract_facets_utf8(text: str):
|
|
|
|
|
"features": [{"$type": "app.bsky.richtext.facet#tag", "tag": tag}]
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
# Extract links
|
|
|
|
|
for match in re.finditer(r"https?://[^\s]+", text):
|
|
|
|
|
url = match.group(0)
|
|
|
|
|
byte_start, byte_end = get_byte_range(*match.span())
|
|
|
|
@ -152,6 +166,7 @@ def extract_facets_utf8(text: str):
|
|
|
|
|
"features": [{"$type": "app.bsky.richtext.facet#link", "uri": url}]
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
logger.debug(f"🏷 Extracted facets: {facets}") # DEBUG
|
|
|
|
|
return facets
|
|
|
|
|
|
|
|
|
|
def load_seen_ids():
|
|
|
|
@ -159,11 +174,14 @@ def load_seen_ids():
|
|
|
|
|
if not os.path.exists(SEEN_POSTS_FILE):
|
|
|
|
|
open(SEEN_POSTS_FILE, "w").close()
|
|
|
|
|
with open(SEEN_POSTS_FILE, "r") as f:
|
|
|
|
|
return set(line.strip() for line in f)
|
|
|
|
|
seen = set(line.strip() for line in f)
|
|
|
|
|
logger.debug(f"🗂 Loaded {len(seen)} seen post IDs.") # DEBUG
|
|
|
|
|
return seen
|
|
|
|
|
|
|
|
|
|
def save_seen_id(post_id):
|
|
|
|
|
with open(SEEN_POSTS_FILE, "a") as f:
|
|
|
|
|
f.write(post_id + "\n")
|
|
|
|
|
logger.debug(f"📝 Saved post ID as seen: {post_id}") # DEBUG
|
|
|
|
|
|
|
|
|
|
def post_to_mastodon(title, link, tags):
|
|
|
|
|
mastodon = Mastodon(access_token=MASTODON_TOKEN, api_base_url=MASTODON_BASE_URL)
|
|
|
|
@ -171,10 +189,13 @@ def post_to_mastodon(title, link, tags):
|
|
|
|
|
message = f"{title}\n\n{link}"
|
|
|
|
|
if hashtags:
|
|
|
|
|
message += f"\n\n{hashtags}"
|
|
|
|
|
logger.debug(f"🐘 Posting to Mastodon: {message}") # DEBUG
|
|
|
|
|
mastodon.toot(message)
|
|
|
|
|
logger.info("✅ Posted to Mastodon.")
|
|
|
|
|
|
|
|
|
|
def fetch_og_data(url):
|
|
|
|
|
try:
|
|
|
|
|
logger.debug(f"🔍 Fetching OpenGraph data from {url}") # DEBUG
|
|
|
|
|
resp = requests.get(url, timeout=10)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
soup = BeautifulSoup(resp.text, "html.parser")
|
|
|
|
@ -182,13 +203,16 @@ def fetch_og_data(url):
|
|
|
|
|
og_image = soup.find("meta", property="og:image")
|
|
|
|
|
title = og_title["content"] if og_title and og_title.has_attr("content") else None
|
|
|
|
|
image_url = og_image["content"] if og_image and og_image.has_attr("content") else None
|
|
|
|
|
if not title or not image_url:
|
|
|
|
|
logger.debug(f"⚠️ OpenGraph data incomplete. Title: {title}, Image: {image_url}") # DEBUG
|
|
|
|
|
return title, image_url
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error fetching OG data: {e}")
|
|
|
|
|
except Exception:
|
|
|
|
|
logger.exception(f"❌ Error fetching OpenGraph data:")
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
def post_to_bluesky(title, link, tags):
|
|
|
|
|
client = Client()
|
|
|
|
|
logger.debug(f"🔑 Logging in to Bluesky as {BSKY_HANDLE}") # DEBUG
|
|
|
|
|
client.login(BSKY_HANDLE, BSKY_PASSWORD)
|
|
|
|
|
|
|
|
|
|
hashtags = " ".join(f"#{tag}" for tag in tags) if tags else ""
|
|
|
|
@ -210,6 +234,7 @@ def post_to_bluesky(title, link, tags):
|
|
|
|
|
"thumb": {"$type": "blob", "ref": None, "mimeType": "", "size": 0}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
logger.debug(f"📸 Attempting to upload preview image from: {image_url}") # DEBUG
|
|
|
|
|
img_resp = requests.get(image_url, timeout=10)
|
|
|
|
|
img_resp.raise_for_status()
|
|
|
|
|
blob = client.upload_blob(BytesIO(img_resp.content))
|
|
|
|
@ -217,8 +242,8 @@ def post_to_bluesky(title, link, tags):
|
|
|
|
|
client.send_post(text=message, embed=embed, facets=facets)
|
|
|
|
|
logger.info(f"✅ Posted to Bluesky with preview.")
|
|
|
|
|
return
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error uploading preview to Bluesky: {e}")
|
|
|
|
|
except Exception:
|
|
|
|
|
logger.exception(f"❌ Error uploading preview to Bluesky:")
|
|
|
|
|
|
|
|
|
|
client.send_post(text=message, facets=facets)
|
|
|
|
|
logger.info(f"💡 Posted to Bluesky without preview.")
|
|
|
|
@ -235,7 +260,9 @@ def extract_post_date(entry):
|
|
|
|
|
dates.append(dt)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning(f"⚠️ Could not parse date: {d} ({e})")
|
|
|
|
|
return min(dates) if dates else datetime.now(timezone.utc)
|
|
|
|
|
chosen_date = min(dates) if dates else datetime.now(timezone.utc)
|
|
|
|
|
logger.debug(f"📅 Extracted post date: {chosen_date}") # DEBUG
|
|
|
|
|
return chosen_date
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
seen_ids = load_seen_ids()
|
|
|
|
@ -243,14 +270,19 @@ def main():
|
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
|
max_age = timedelta(days=MAX_POST_AGE_DAYS)
|
|
|
|
|
|
|
|
|
|
logger.debug(f"📰 Number of feed entries found: {len(feed.entries)}") # DEBUG
|
|
|
|
|
|
|
|
|
|
for entry in feed.entries:
|
|
|
|
|
post_id = entry.get("id") or entry.get("link")
|
|
|
|
|
logger.debug(f"🆔 Checking post ID: {post_id}") # DEBUG
|
|
|
|
|
|
|
|
|
|
if post_id in seen_ids:
|
|
|
|
|
logger.debug(f"⏭️ Post already processed: {post_id}") # DEBUG
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
post_date = extract_post_date(entry)
|
|
|
|
|
if post_date < now - max_age:
|
|
|
|
|
logger.info(f"⏩ Skipping old post ({MAX_POST_AGE_DAYS}+ days): {post_id}")
|
|
|
|
|
logger.info(f"⏩ Skipping old post (older than {MAX_POST_AGE_DAYS} days): {post_id}")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
title = entry.get("title", "").strip()
|
|
|
|
@ -267,6 +299,8 @@ def main():
|
|
|
|
|
raw_tags = [tag.get("term") if isinstance(tag, dict) else getattr(tag, "term", None) for tag in entry.tags]
|
|
|
|
|
tags = [sanitize_tag(t) for t in raw_tags if t]
|
|
|
|
|
|
|
|
|
|
logger.debug(f"🏷 Extracted tags: {tags}") # DEBUG
|
|
|
|
|
|
|
|
|
|
logger.info(f"💡 New post found: {title}")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
@ -283,10 +317,10 @@ def main():
|
|
|
|
|
if should_send_email(on_success=True):
|
|
|
|
|
send_status_email(f"✅ Post published: {title}", generate_email_html("success", title, link))
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Posting failed: {e}")
|
|
|
|
|
except Exception:
|
|
|
|
|
logger.exception(f"❌ Posting failed for post: {post_id}")
|
|
|
|
|
if should_send_email(on_success=False):
|
|
|
|
|
send_status_email(f"❌ Error posting: {title}", generate_email_html("error", title, link, str(e)))
|
|
|
|
|
send_status_email(f"❌ Error posting: {title}", generate_email_html("error", title, link, str(Exception)))
|
|
|
|
|
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
|
|
|
|
@ -297,8 +331,8 @@ if __name__ == "__main__":
|
|
|
|
|
while True:
|
|
|
|
|
try:
|
|
|
|
|
main()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Unhandled error during execution: {e}")
|
|
|
|
|
except Exception:
|
|
|
|
|
logger.exception("Unhandled error during execution:")
|
|
|
|
|
logger.info(f"⏳ Waiting {INTERVAL_MINUTES} minutes until next run...")
|
|
|
|
|
time.sleep(INTERVAL_MINUTES * 60)
|
|
|
|
|
|
|
|
|
|