FEATURE: Added logging, finally!

Now the log_dir and log_file_path actually do something useful.
This commit is contained in:
A.M. Rowsell 2025-02-25 20:56:37 -05:00
parent 3def57a933
commit a263f5cb93
Signed by: amr
GPG key ID: 0B6E2D8375CF79A9

View file

@ -13,6 +13,7 @@
import requests
import feedparser
import hashlib
import logging
from pathlib import Path
import json
import time
@ -58,10 +59,11 @@ def get_description(feed):
def setupPaths():
global app_config
global logger
# Check for log and config files/paths, create empty directories if needed
# TODO: make this cleaner
if not Path(log_file_path).exists():
print("No log file path exists. Yark! We'll try and make {}...", log_dir)
if not Path(log_dir).exists():
print("No log file path exists. Yark! We'll try and make {}...".format(log_dir))
try:
Path(log_dir).mkdir(parents=True, exist_ok=True)
except FileExistsError:
@ -85,6 +87,15 @@ def setupPaths():
# Loading the config file
with open(config_file_path, "r") as config_file:
app_config = json.load(config_file)
# Set up logging
logger = logging.getLogger(__name__)
logging.basicConfig(
filename=str(log_dir + log_file_path),
encoding="utf-8",
level=logging.INFO,
datefmt="%m/%d/%Y %H:%M:%S",
format="%(asctime)s: %(levelname)s: %(message)s",
)
return
@ -99,24 +110,28 @@ def main():
last_check = now - 21600 # first run, no lastupdate, check up to 6 hours ago
for i, hook in enumerate(app_config["feeds"]):
# Get the feed
print("Parsing feed {}...".format(hook["name"]))
logger.info("Parsing feed %s...", hook["name"])
feeds = feedparser.parse(hook["url"])
latest_post = []
prev_best = 0
for feed in feeds:
for feed in feeds["entries"]:
try:
bad_time = False
published_time = time.mktime(feed["published_parsed"])
published_time = published_time + hook["offset"]
except KeyError:
published_time = feed["published"]
print(published_time)
sys.exit(254)
published_time = time.mktime(feed["updated_parsed"])
bad_time = True
if published_time > prev_best:
latest_post = feed
prev_best = published_time
else:
continue
if bad_time is True:
logger.warning(
"Feed %s doesn't supply a published time, using updated time instead",
hook["name"],
)
# Hash the title of the latest post and use that to determine if it's been posted
new_hash = hashlib.sha3_512(bytes(latest_post["title"], "utf-8")).hexdigest()
try:
@ -126,7 +141,16 @@ def main():
continue
except KeyError:
app_config["feeds"][i]["lasthash"] = new_hash
logger.info(
"Feed %s has no existing hash, likely a new feed!", hook["name"]
)
# Generate the webhook
logger.info(
"Publishing webhook for %s. Last check was %d, now is %d",
hook["name"],
last_check,
now,
)
webhook = {
"embeds": [
{
@ -157,9 +181,10 @@ def main():
}
webhook_string = json.dumps(webhook)
if published_time > last_check:
r = requests.post(
hook["webhook"], data=webhook_string, headers=custom_header
r = requests.post(hook["webhook"], data=webhook_string, headers=custom_header)
if r.status_code not in success_codes:
logger.error(
"Error %d while trying to post %s", r.status_code, hook["webhook"]
)
if r.status_code not in success_codes:
print(