Compare commits
7 commits
ec88faa437
...
d15578f7b2
Author | SHA1 | Date | |
---|---|---|---|
d15578f7b2 | |||
6305604056 | |||
5c91dccc9e | |||
a188f8ee5d | |||
87193d0f94 | |||
bd693f6f42 | |||
8683d08d1c |
2 changed files with 51 additions and 14 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -1,3 +1,7 @@
|
|||
*.conf
|
||||
*.txt
|
||||
log/
|
||||
log/
|
||||
*.bak
|
||||
bin/
|
||||
lib/
|
||||
*.cfg
|
59
discorss.py
59
discorss.py
|
@ -12,35 +12,48 @@
|
|||
|
||||
import requests
|
||||
import feedparser
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
import json
|
||||
import time
|
||||
import os
|
||||
import re
|
||||
|
||||
config_file_path = r"/etc/discorss.conf"
|
||||
# config_file_path = r"discorss.conf"
|
||||
config_dir = os.environ.get('XDG_CONFIG_HOME')
|
||||
if config_dir is None:
|
||||
config_file_path = r"~/.config/discorss/discorss.conf"
|
||||
config_dir = r"~/.config/discorss"
|
||||
else:
|
||||
config_file_path = config_dir + r"/discorss/discorss.conf"
|
||||
log_file_path = r"/var/log/discorss"
|
||||
# log_file_path = r"./log"
|
||||
log_file_name = r"/app.log"
|
||||
# Yes, I know you "can't parse HTML with regex", but
|
||||
# just watch me.
|
||||
html_filter = re.compile(r"\<\/?([A-Za-z \"\=])*\>")
|
||||
success_codes = ['200', '201', '202', '203', '204', '205', '206']
|
||||
success_codes = ["200", "201", "202", "203", "204", "205", "206"]
|
||||
|
||||
|
||||
# This function gets and formats the brief excerpt that goes in the embed
|
||||
# Different feeds put summaries in different fields, so we pick the best
|
||||
# one and limit it to 150 characters.
|
||||
# TODO: make the character limit smarter, as to split at a natural point
|
||||
def get_description(feed):
|
||||
try:
|
||||
temporary_string = str(feed.entries[0]["summary_detail"]["value"])
|
||||
temporary_string = html_filter.sub("", temporary_string)
|
||||
desc = (
|
||||
temporary_string[:150] if len(temporary_string) > 150 else temporary_string
|
||||
temporary_string[:150]
|
||||
if len(temporary_string) > 150
|
||||
else temporary_string
|
||||
)
|
||||
except KeyError:
|
||||
temporary_string = str(feed.entries[0]["description"])
|
||||
temporary_string = html_filter.sub("", temporary_string)
|
||||
desc = (
|
||||
temporary_string[:150] if len(temporary_string) > 150 else temporary_string
|
||||
temporary_string[:150]
|
||||
if len(temporary_string) > 150
|
||||
else temporary_string
|
||||
)
|
||||
return desc
|
||||
|
||||
|
@ -48,24 +61,41 @@ def get_description(feed):
|
|||
def main():
|
||||
os.environ["TZ"] = "America/Toronto"
|
||||
time.tzset()
|
||||
# Check for log and config files/paths, create empty directories if needed
|
||||
try:
|
||||
Path(log_file_path).mkdir(parents=True, exist_ok=True)
|
||||
except FileExistsError:
|
||||
print("This path already exists and is not a directory!")
|
||||
# Load and read the config file
|
||||
print("The logfile path {} already exists and is not a directory!".format(log_file_path))
|
||||
if not Path(config_file_path).exists():
|
||||
print("No config file! Snarf. Directories were created for you.")
|
||||
print("No config file at {}! Snarf.\n{} was created for you.".format(config_file_path, config_dir))
|
||||
Path(config_file_path).mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
with open(config_file_path, "r") as config_file:
|
||||
app_config = json.load(config_file)
|
||||
now = time.mktime(time.localtime())
|
||||
last_check = app_config["lastupdate"]
|
||||
for hook in app_config["feeds"]:
|
||||
try:
|
||||
last_check = app_config["lastupdate"]
|
||||
except KeyError:
|
||||
last_check = (
|
||||
now - 21600
|
||||
) # first run, no lastupdate, check up to 6 hours ago
|
||||
for i, hook in enumerate(app_config["feeds"]):
|
||||
# Get the feed
|
||||
feed = feedparser.parse(hook["url"])
|
||||
published_time = time.mktime(feed.entries[0]["published_parsed"])
|
||||
published_time = published_time + hook["offset"]
|
||||
print("Parsing feed {}...".format(hook["name"]))
|
||||
# Hash the title of the latest post and use that to determine if it's been posted
|
||||
new_hash = hashlib.sha3_512(
|
||||
bytes(feed.entries[0]["title"], "utf-8")
|
||||
).hexdigest()
|
||||
try:
|
||||
if hook["lasthash"] != new_hash:
|
||||
app_config["feeds"][i]["lasthash"] = new_hash
|
||||
else:
|
||||
continue
|
||||
except KeyError:
|
||||
app_config["feeds"][i]["lasthash"] = new_hash
|
||||
# Generate the webhook
|
||||
webhook = {
|
||||
"embeds": [
|
||||
|
@ -77,7 +107,10 @@ def main():
|
|||
"name": "DiscoRSS",
|
||||
# "url": "https://git.frzn.dev/amr/discorss",
|
||||
},
|
||||
"author": {"name": str(hook["name"]), "url": str(hook["siteurl"])},
|
||||
"author": {
|
||||
"name": str(hook["name"]),
|
||||
"url": str(hook["siteurl"]),
|
||||
},
|
||||
"fields": [
|
||||
{
|
||||
"name": "Excerpt from post:",
|
||||
|
@ -93,8 +126,8 @@ def main():
|
|||
"content-type": "application/json",
|
||||
}
|
||||
webhook_string = json.dumps(webhook)
|
||||
# print(webhook_string)
|
||||
if published_time > last_check and published_time < now:
|
||||
|
||||
if published_time > last_check:
|
||||
r = requests.post(
|
||||
hook["webhook"], data=webhook_string, headers=custom_header
|
||||
)
|
||||
|
|
Loading…
Add table
Reference in a new issue