diff --git a/.gitignore b/.gitignore index fed12ea..f8ec04c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,3 @@ *.conf *.txt -log/ -*.bak -bin/ -lib/ -*.cfg \ No newline at end of file +log/ \ No newline at end of file diff --git a/discorss.py b/discorss.py index be76e69..7e49771 100755 --- a/discorss.py +++ b/discorss.py @@ -12,48 +12,35 @@ import requests import feedparser -import hashlib from pathlib import Path import json import time import os import re -config_dir = os.environ.get('XDG_CONFIG_HOME') -if config_dir is None: - config_file_path = r"~/.config/discorss/discorss.conf" - config_dir = r"~/.config/discorss" -else: - config_file_path = config_dir + r"/discorss/discorss.conf" +config_file_path = r"/etc/discorss.conf" +# config_file_path = r"discorss.conf" log_file_path = r"/var/log/discorss" # log_file_path = r"./log" log_file_name = r"/app.log" # Yes, I know you "can't parse HTML with regex", but # just watch me. html_filter = re.compile(r"\<\/?([A-Za-z \"\=])*\>") -success_codes = ["200", "201", "202", "203", "204", "205", "206"] +success_codes = ['200', '201', '202', '203', '204', '205', '206'] -# This function gets and formats the brief excerpt that goes in the embed -# Different feeds put summaries in different fields, so we pick the best -# one and limit it to 150 characters. -# TODO: make the character limit smarter, as to split at a natural point def get_description(feed): try: temporary_string = str(feed.entries[0]["summary_detail"]["value"]) temporary_string = html_filter.sub("", temporary_string) desc = ( - temporary_string[:150] - if len(temporary_string) > 150 - else temporary_string + temporary_string[:150] if len(temporary_string) > 150 else temporary_string ) except KeyError: temporary_string = str(feed.entries[0]["description"]) temporary_string = html_filter.sub("", temporary_string) desc = ( - temporary_string[:150] - if len(temporary_string) > 150 - else temporary_string + temporary_string[:150] if len(temporary_string) > 150 else temporary_string ) return desc @@ -61,41 +48,24 @@ def get_description(feed): def main(): os.environ["TZ"] = "America/Toronto" time.tzset() - # Check for log and config files/paths, create empty directories if needed try: Path(log_file_path).mkdir(parents=True, exist_ok=True) except FileExistsError: - print("The logfile path {} already exists and is not a directory!".format(log_file_path)) + print("This path already exists and is not a directory!") + # Load and read the config file if not Path(config_file_path).exists(): - print("No config file at {}! Snarf.\n{} was created for you.".format(config_file_path, config_dir)) - Path(config_file_path).mkdir(parents=True, exist_ok=True) + print("No config file! Snarf. Directories were created for you.") return with open(config_file_path, "r") as config_file: app_config = json.load(config_file) now = time.mktime(time.localtime()) - try: - last_check = app_config["lastupdate"] - except KeyError: - last_check = ( - now - 21600 - ) # first run, no lastupdate, check up to 6 hours ago - for i, hook in enumerate(app_config["feeds"]): + last_check = app_config["lastupdate"] + for hook in app_config["feeds"]: # Get the feed feed = feedparser.parse(hook["url"]) published_time = time.mktime(feed.entries[0]["published_parsed"]) published_time = published_time + hook["offset"] print("Parsing feed {}...".format(hook["name"])) - # Hash the title of the latest post and use that to determine if it's been posted - new_hash = hashlib.sha3_512( - bytes(feed.entries[0]["title"], "utf-8") - ).hexdigest() - try: - if hook["lasthash"] != new_hash: - app_config["feeds"][i]["lasthash"] = new_hash - else: - continue - except KeyError: - app_config["feeds"][i]["lasthash"] = new_hash # Generate the webhook webhook = { "embeds": [ @@ -107,10 +77,7 @@ def main(): "name": "DiscoRSS", # "url": "https://git.frzn.dev/amr/discorss", }, - "author": { - "name": str(hook["name"]), - "url": str(hook["siteurl"]), - }, + "author": {"name": str(hook["name"]), "url": str(hook["siteurl"])}, "fields": [ { "name": "Excerpt from post:", @@ -126,8 +93,8 @@ def main(): "content-type": "application/json", } webhook_string = json.dumps(webhook) - - if published_time > last_check: + # print(webhook_string) + if published_time > last_check and published_time < now: r = requests.post( hook["webhook"], data=webhook_string, headers=custom_header )