helm/bin/prowl-rss

92 lines
2.4 KiB
Plaintext
Raw Normal View History

2013-12-16 22:29:52 -08:00
#!/usr/bin/env python
# Checks to see if certain RSS feeds (listed in ~/.prowl-rss/feeds) have been
# updated recently, and if so, send a prowl notification.
# It stores read guids in ~/.prowl-rss/guids.
#
# Intended to be run via cron or on-demand.
2013-12-16 22:29:52 -08:00
from __future__ import print_function
import httplib2
import urllib
import feedparser
import time
import sys
import os
2014-02-06 21:53:48 -08:00
import re
2013-12-16 22:29:52 -08:00
2013-12-16 22:45:04 -08:00
###
API_FILE = os.path.expanduser("~") + "/.prowl-apikey"
GUID_FILE = os.path.expanduser("~") + "/.prowl-rss/guids"
FEEDS_FILE = os.path.expanduser("~") + "/.prowl-rss/feeds"
2013-12-16 22:45:04 -08:00
API_DOMAIN = 'https://prowl.weks.net/publicapi'
# Pulled out of prowlpy @ https://github.com/omgmovieslol/prowlpy/tree/master
def prowl_post(api, app=None, event=None, desc=None, priority=0):
# Create the http object
h = httplib2.Http()
# Set User-Agent
headers = {'User-Agent': "Prowlpy/%s" % "0.41"}
# Perform the request and get the response headers and content
data = {
'apikey': api,
'application': app,
'event': event,
'description': desc,
'priority': priority
}
headers["Content-type"] = "application/x-www-form-urlencoded"
resp,content = h.request("%s/add/" % API_DOMAIN, "POST", headers=headers, body=urllib.urlencode(data))
if resp['status'] == '200':
return True
elif resp['status'] == '401':
raise Exception("Auth Failed: %s" % content)
else:
raise Exception("Failed")
###
2013-12-16 22:29:52 -08:00
try:
2013-12-16 22:45:04 -08:00
2013-12-16 22:29:52 -08:00
guids = [line.strip() for line in open(GUID_FILE)]
conf = [line.strip() for line in open(FEEDS_FILE)]
2013-12-16 22:29:52 -08:00
with open(API_FILE) as f:
apikey = f.read().strip()
2013-12-20 10:22:59 -08:00
feeds = conf
2013-12-16 22:29:52 -08:00
2013-12-16 22:45:04 -08:00
##
2013-12-16 22:29:52 -08:00
fo = open(GUID_FILE, "a")
for feed in feeds:
2014-02-06 21:53:48 -08:00
f = feed.split(' ', 1)
xml = feedparser.parse(f[0])
2013-12-16 22:29:52 -08:00
for e in xml.entries:
if e.guid not in guids:
2014-02-06 21:53:48 -08:00
if len(f) > 1 and not re.match(f[1], e.title):
continue
2013-12-16 22:45:04 -08:00
prowl_post(apikey, "RSS", e.title, "{0} - {1} <{2}>".format(e.title, xml.feed.title, e.link))
2013-12-16 22:29:52 -08:00
fo.write(e.guid+"\n")
guids.append(e.guid)
print("Found: {0} => {1}".format(e.title, e.link))
else:
print("Ignored: {0} => {1}".format(e.title, e.link))
pass
fo.close()
2013-12-16 22:45:04 -08:00
except IOError, e:
print("{0} file couldn't be found/opened".format(e.filename))
sys.exit(1)
2013-12-16 22:29:52 -08:00
except Exception, msg:
2013-12-20 10:18:03 -08:00
print("Error: " + msg)