Black formatting.

This commit is contained in:
Samuel Clay 2024-04-24 09:43:56 -04:00
parent 0d0231ea99
commit d1dafe7606
326 changed files with 27062 additions and 20265 deletions

View file

@ -14,24 +14,31 @@ def get_host_ips_from_group(group_name):
:param inventory_base_path: Base path to the inventory directories. Defaults to the path in ansible.cfg. :param inventory_base_path: Base path to the inventory directories. Defaults to the path in ansible.cfg.
:return: A list of IP addresses belonging to the specified group. :return: A list of IP addresses belonging to the specified group.
""" """
cmd = ['ansible-inventory', '-i', '/srv/newsblur/ansible/inventories/hetzner.ini', '-i', '/srv/newsblur/ansible/inventories/hetzner.yml', '--list'] cmd = [
"ansible-inventory",
"-i",
"/srv/newsblur/ansible/inventories/hetzner.ini",
"-i",
"/srv/newsblur/ansible/inventories/hetzner.yml",
"--list",
]
try: try:
# Execute the ansible-inventory command # Execute the ansible-inventory command
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True) result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True)
# Parse the JSON output from ansible-inventory # Parse the JSON output from ansible-inventory
inventory_data = json.loads(result.stdout) inventory_data = json.loads(result.stdout)
host_ips = [] host_ips = []
# Check if the group exists # Check if the group exists
if group_name in inventory_data: if group_name in inventory_data:
# Get the list of hosts in the specified group # Get the list of hosts in the specified group
if 'hosts' in inventory_data[group_name]: if "hosts" in inventory_data[group_name]:
for host in inventory_data[group_name]['hosts']: for host in inventory_data[group_name]["hosts"]:
# Fetch the host details, specifically looking for the ansible_host variable for the IP # Fetch the host details, specifically looking for the ansible_host variable for the IP
host_vars = inventory_data['_meta']['hostvars'][host] host_vars = inventory_data["_meta"]["hostvars"][host]
ip_address = host_vars.get('ansible_host', None) ip_address = host_vars.get("ansible_host", None)
if ip_address: if ip_address:
host_ips.append(ip_address) host_ips.append(ip_address)
else: else:
@ -50,16 +57,19 @@ TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token"
with open(TOKEN_FILE) as f: with open(TOKEN_FILE) as f:
token = f.read().strip() token = f.read().strip()
os.environ['DO_API_TOKEN'] = token os.environ["DO_API_TOKEN"] = token
manager = digitalocean.Manager(token=token) manager = digitalocean.Manager(token=token)
my_droplets = manager.get_all_droplets() my_droplets = manager.get_all_droplets()
consul_manager_droplets = [d for d in my_droplets if "db-consul" in d.name] consul_manager_droplets = [d for d in my_droplets if "db-consul" in d.name]
# Use ansible-inventory to get the consul-manager ip # Use ansible-inventory to get the consul-manager ip
group_name = 'hconsul' group_name = "hconsul"
hetzner_hosts = get_host_ips_from_group(group_name) hetzner_hosts = get_host_ips_from_group(group_name)
consul_manager_ip_address = ','.join([f"\"{droplet.ip_address}\"" for droplet in consul_manager_droplets] + [f"\"{host}\"" for host in hetzner_hosts]) consul_manager_ip_address = ",".join(
[f'"{droplet.ip_address}"' for droplet in consul_manager_droplets]
+ [f'"{host}"' for host in hetzner_hosts]
)
print(consul_manager_ip_address) print(consul_manager_ip_address)

View file

@ -1,12 +1,13 @@
#!/srv/newsblur/venv/newsblur3/bin/python #!/srv/newsblur/venv/newsblur3/bin/python
import sys import sys
sys.path.append('/srv/newsblur')
sys.path.append("/srv/newsblur")
from newsblur_web import settings from newsblur_web import settings
username = settings.DATABASES['default']['USER'] username = settings.DATABASES["default"]["USER"]
password = settings.DATABASES['default']['PASSWORD'] password = settings.DATABASES["default"]["PASSWORD"]
if sys.argv[1] =='postgres_credentials': if sys.argv[1] == "postgres_credentials":
print(f"{username}:{password}") print(f"{username}:{password}")
if sys.argv[1] =='s3_bucket': if sys.argv[1] == "s3_bucket":
print(settings.S3_BACKUP_BUCKET) print(settings.S3_BACKUP_BUCKET)

View file

@ -3,6 +3,7 @@ import time
import digitalocean import digitalocean
import subprocess import subprocess
def test_ssh(drop): def test_ssh(drop):
droplet_ip_address = drop.ip_address droplet_ip_address = drop.ip_address
result = subprocess.call(f"ssh -o StrictHostKeyChecking=no root@{droplet_ip_address} ls", shell=True) result = subprocess.call(f"ssh -o StrictHostKeyChecking=no root@{droplet_ip_address} ls", shell=True)
@ -10,6 +11,7 @@ def test_ssh(drop):
return True return True
return False return False
TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token"
droplet_name = sys.argv[1] droplet_name = sys.argv[1]
@ -25,7 +27,7 @@ ssh_works = False
while not ssh_works: while not ssh_works:
if timer > timeout: if timer > timeout:
raise Exception(f"The {droplet_name} droplet was not created.") raise Exception(f"The {droplet_name} droplet was not created.")
droplets = [drop for drop in manager.get_all_droplets() if drop.name == droplet_name] droplets = [drop for drop in manager.get_all_droplets() if drop.name == droplet_name]
if droplets: if droplets:
droplet = droplets[0] droplet = droplets[0]
@ -33,4 +35,4 @@ while not ssh_works:
ssh_works = test_ssh(droplet) ssh_works = test_ssh(droplet)
time.sleep(3) time.sleep(3)
timer += 3 timer += 3
print("Success!") print("Success!")

View file

@ -8,7 +8,7 @@ import digitalocean
OLD = False OLD = False
# Set env var OLD=1 to use existing servers # Set env var OLD=1 to use existing servers
if os.environ.get('OLD', False): if os.environ.get("OLD", False):
OLD = True OLD = True
if OLD: if OLD:
@ -17,7 +17,7 @@ else:
TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token"
try: try:
api_token = open(TOKEN_FILE, 'r').read().strip() api_token = open(TOKEN_FILE, "r").read().strip()
except IOError: except IOError:
print(f" ---> Missing Digital Ocean API token: {TOKEN_FILE}") print(f" ---> Missing Digital Ocean API token: {TOKEN_FILE}")
exit() exit()
@ -25,20 +25,20 @@ except IOError:
outfile = f"/srv/newsblur/ansible/inventories/digital_ocean{'.old' if OLD else ''}.ini" outfile = f"/srv/newsblur/ansible/inventories/digital_ocean{'.old' if OLD else ''}.ini"
# Install from https://github.com/do-community/do-ansible-inventory/releases # Install from https://github.com/do-community/do-ansible-inventory/releases
ansible_inventory_cmd = f'do-ansible-inventory -t {api_token} --out {outfile}' ansible_inventory_cmd = f"do-ansible-inventory -t {api_token} --out {outfile}"
subprocess.call(ansible_inventory_cmd, shell=True) subprocess.call(ansible_inventory_cmd, shell=True)
with open(outfile, 'r') as original: with open(outfile, "r") as original:
data = original.read() data = original.read()
with open(outfile, 'w') as modified: with open(outfile, "w") as modified:
modified.write("127.0.0.1 ansible_connection=local\n" + data) modified.write("127.0.0.1 ansible_connection=local\n" + data)
exit() # Too many requests if we run the below code exit() # Too many requests if we run the below code
do = digitalocean.Manager(token=api_token) do = digitalocean.Manager(token=api_token)
droplets = do.get_all_droplets() droplets = do.get_all_droplets()
print("\n ---> Checking droplets: %s\n" % (' '.join([d.name for d in droplets]))) print("\n ---> Checking droplets: %s\n" % (" ".join([d.name for d in droplets])))
def check_droplets_created(): def check_droplets_created():
@ -46,8 +46,8 @@ def check_droplets_created():
droplets = do.get_all_droplets() droplets = do.get_all_droplets()
for instance in droplets: for instance in droplets:
if instance.status == 'new': if instance.status == "new":
print(".", end=' ') print(".", end=" ")
sys.stdout.flush() sys.stdout.flush()
i += 1 i += 1
time.sleep(i) time.sleep(i)
@ -56,6 +56,7 @@ def check_droplets_created():
print(" ---> All booted!") print(" ---> All booted!")
return True return True
i = 0 i = 0
while True: while True:
if check_droplets_created(): if check_droplets_created():

View file

@ -13,342 +13,318 @@ API_URL = "https://www.newsblur.com/"
# API_URL = "https://nb.local.host:8000/" # API_URL = "https://nb.local.host:8000/"
class request(): class request:
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar())) opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar()))
def __init__(self, endpoint=None, method='get'): def __init__(self, endpoint=None, method="get"):
self.endpoint = endpoint self.endpoint = endpoint
self.method = method self.method = method
def __call__(self, func): def __call__(self, func):
def wrapped(*args, **kwargs): def wrapped(*args, **kwargs):
params = func(*args, **kwargs) or {} params = func(*args, **kwargs) or {}
url = self.endpoint if self.endpoint else params.pop('url') url = self.endpoint if self.endpoint else params.pop("url")
params = urllib.parse.urlencode(params) params = urllib.parse.urlencode(params)
url = "%s%s" % (API_URL, url) url = "%s%s" % (API_URL, url)
response = self.opener.open(url, params).read() response = self.opener.open(url, params).read()
return json.loads(response) return json.loads(response)
return wrapped return wrapped
class API: class API:
@request("api/login", method="post")
@request('api/login', method='post')
def login(self, username, password): def login(self, username, password):
''' """
Login as an existing user. Login as an existing user.
If a user has no password set, you cannot just send any old password. If a user has no password set, you cannot just send any old password.
Required parameters, username and password, must be of string type. Required parameters, username and password, must be of string type.
''' """
return { return {"username": username, "password": password}
'username': username,
'password': password
}
@request('api/logout') @request("api/logout")
def logout(self): def logout(self):
''' """
Logout the currently logged in user. Logout the currently logged in user.
''' """
return return
@request('api/signup') @request("api/signup")
def signup(self, username, password, email): def signup(self, username, password, email):
''' """
Create a new user. Create a new user.
All three required parameters must be of type string. All three required parameters must be of type string.
''' """
return { return {"signup_username": username, "signup_password": password, "signup_email": email}
'signup_username': username,
'signup_password': password,
'signup_email': email
}
@request('rss_feeds/search_feed') @request("rss_feeds/search_feed")
def search_feed(self, address, offset=0): def search_feed(self, address, offset=0):
''' """
Retrieve information about a feed from its website or RSS address. Retrieve information about a feed from its website or RSS address.
Parameter address must be of type string while parameter offset must be an integer. Parameter address must be of type string while parameter offset must be an integer.
Will return a feed. Will return a feed.
''' """
return { return {"address": address, "offset": offset}
'address': address,
'offset': offset
}
@request('reader/feeds') @request("reader/feeds")
def feeds(self, include_favicons=True, flat=False): def feeds(self, include_favicons=True, flat=False):
''' """
Retrieve a list of feeds to which a user is actively subscribed. Retrieve a list of feeds to which a user is actively subscribed.
Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons. Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons.
''' """
return { return {"include_favicons": include_favicons, "flat": flat}
'include_favicons': include_favicons,
'flat': flat
}
@request('reader/favicons') @request("reader/favicons")
def favicons(self, feeds=None): def favicons(self, feeds=None):
''' """
Retrieve a list of favicons for a list of feeds. Retrieve a list of favicons for a list of feeds.
Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data.
Useful for mobile devices, but requires a second request. Useful for mobile devices, but requires a second request.
''' """
data = [] data = []
for feed in feeds: for feed in feeds:
data.append( ("feeds", feed) ) data.append(("feeds", feed))
return data return data
@request() @request()
def page(self, feed_id): def page(self, feed_id):
''' """
Retrieve the original page from a single feed. Retrieve the original page from a single feed.
''' """
return { return {"url": "reader/page/%s" % feed_id}
'url': 'reader/page/%s' % feed_id
}
@request() @request()
def feed(self, feed_id, page=1): def feed(self, feed_id, page=1):
''' """
Retrieve the stories from a single feed. Retrieve the stories from a single feed.
''' """
return { return {
'url': 'reader/feed/%s' % feed_id, "url": "reader/feed/%s" % feed_id,
'page': page, "page": page,
} }
@request('reader/refresh_feeds') @request("reader/refresh_feeds")
def refresh_feeds(self): def refresh_feeds(self):
''' """
Up-to-the-second unread counts for each active feed. Up-to-the-second unread counts for each active feed.
Poll for these counts no more than once a minute. Poll for these counts no more than once a minute.
''' """
return return
@request('reader/feeds_trainer') @request("reader/feeds_trainer")
def feeds_trainer(self, feed_id=None): def feeds_trainer(self, feed_id=None):
''' """
Retrieves all popular and known intelligence classifiers. Retrieves all popular and known intelligence classifiers.
Also includes user's own classifiers. Also includes user's own classifiers.
''' """
return { return {
'feed_id': feed_id, "feed_id": feed_id,
} }
@request() @request()
def statistics(self, feed_id=None): def statistics(self, feed_id=None):
''' """
If you only want a user's classifiers, use /classifiers/:id. If you only want a user's classifiers, use /classifiers/:id.
Omit the feed_id to get all classifiers for all subscriptions. Omit the feed_id to get all classifiers for all subscriptions.
''' """
return { return {"url": "rss_feeds/statistics/%d" % feed_id}
'url': 'rss_feeds/statistics/%d' % feed_id
} @request("rss_feeds/feed_autocomplete")
@request('rss_feeds/feed_autocomplete')
def feed_autocomplete(self, term): def feed_autocomplete(self, term):
''' """
Get a list of feeds that contain a search phrase. Get a list of feeds that contain a search phrase.
Searches by feed address, feed url, and feed title, in that order. Searches by feed address, feed url, and feed title, in that order.
Will only show sites with 2+ subscribers. Will only show sites with 2+ subscribers.
''' """
return { return {"term": term}
'term': term
}
@request('reader/starred_stories') @request("reader/starred_stories")
def starred_stories(self, page=1): def starred_stories(self, page=1):
''' """
Retrieve a user's starred stories. Retrieve a user's starred stories.
''' """
return { return {
'page': page, "page": page,
} }
@request('reader/river_stories') @request("reader/river_stories")
def river_stories(self, feeds, page=1, read_stories_count=0): def river_stories(self, feeds, page=1, read_stories_count=0):
''' """
Retrieve stories from a collection of feeds. This is known as the River of News. Retrieve stories from a collection of feeds. This is known as the River of News.
Stories are ordered in reverse chronological order. Stories are ordered in reverse chronological order.
`read_stories_count` is the number of stories that have been read in this `read_stories_count` is the number of stories that have been read in this
continuation, so NewsBlur can efficiently skip those stories when retrieving continuation, so NewsBlur can efficiently skip those stories when retrieving
new stories. Takes an array of feed ids. new stories. Takes an array of feed ids.
''' """
data = [ ('page', page), ('read_stories_count', read_stories_count) ] data = [("page", page), ("read_stories_count", read_stories_count)]
for feed in feeds: for feed in feeds:
data.append( ("feeds", feed) ) data.append(("feeds", feed))
return data return data
@request('reader/mark_story_hashes_as_read') @request("reader/mark_story_hashes_as_read")
def mark_story_hashes_as_read(self, story_hashes): def mark_story_hashes_as_read(self, story_hashes):
''' """
Mark stories as read using their unique story_hash. Mark stories as read using their unique story_hash.
''' """
data = [] data = []
for hash in story_hashes: for hash in story_hashes:
data.append( ("story_hash", hash) ) data.append(("story_hash", hash))
return data return data
@request('reader/mark_story_as_read') @request("reader/mark_story_as_read")
def mark_story_as_read(self, feed_id, story_ids): def mark_story_as_read(self, feed_id, story_ids):
''' """
Mark stories as read. Mark stories as read.
Multiple story ids can be sent at once. Multiple story ids can be sent at once.
Each story must be from the same feed. Each story must be from the same feed.
Takes an array of story ids. Takes an array of story ids.
''' """
data = [ ('feed_id', feed_id) ] data = [("feed_id", feed_id)]
for story_id in story_ids: for story_id in story_ids:
data.append( ("story_id", story_id) ) data.append(("story_id", story_id))
return data return data
@request('reader/mark_story_as_starred') @request("reader/mark_story_as_starred")
def mark_story_as_starred(self, feed_id, story_id): def mark_story_as_starred(self, feed_id, story_id):
''' """
Mark a story as starred (saved). Mark a story as starred (saved).
''' """
return { return {
'feed_id': feed_id, "feed_id": feed_id,
'story_id': story_id, "story_id": story_id,
} }
@request('reader/mark_all_as_read') @request("reader/mark_all_as_read")
def mark_all_as_read(self, days=0): def mark_all_as_read(self, days=0):
''' """
Mark all stories in a feed or list of feeds as read. Mark all stories in a feed or list of feeds as read.
''' """
return { return {
'days': days, "days": days,
} }
@request('reader/add_url') @request("reader/add_url")
def add_url(self, url, folder=''): def add_url(self, url, folder=""):
''' """
Add a feed by its URL. Add a feed by its URL.
Can be either the RSS feed or the website itself. Can be either the RSS feed or the website itself.
''' """
return { return {
'url': url, "url": url,
'folder': folder, "folder": folder,
} }
@request('reader/add_folder') @request("reader/add_folder")
def add_folder(self, folder, parent_folder=''): def add_folder(self, folder, parent_folder=""):
''' """
Add a new folder. Add a new folder.
''' """
return { return {
'folder': folder, "folder": folder,
'parent_folder': parent_folder, "parent_folder": parent_folder,
} }
@request('reader/rename_feed') @request("reader/rename_feed")
def rename_feed(self, feed_id, feed_title): def rename_feed(self, feed_id, feed_title):
''' """
Rename a feed title. Only the current user will see the new title. Rename a feed title. Only the current user will see the new title.
''' """
return { return {
'feed_id': feed_id, "feed_id": feed_id,
'feed_title': feed_title, "feed_title": feed_title,
} }
@request('reader/delete_feed') @request("reader/delete_feed")
def delete_feed(self, feed_id, in_folder): def delete_feed(self, feed_id, in_folder):
''' """
Unsubscribe from a feed. Removes it from the folder. Unsubscribe from a feed. Removes it from the folder.
Set the in_folder parameter to remove a feed from the correct Set the in_folder parameter to remove a feed from the correct
folder, in case the user is subscribed to the feed in multiple folders. folder, in case the user is subscribed to the feed in multiple folders.
''' """
return { return {
'feed_id': feed_id, "feed_id": feed_id,
'in_folder': in_folder, "in_folder": in_folder,
} }
@request('reader/rename_folder') @request("reader/rename_folder")
def rename_folder(self, folder_to_rename, new_folder_name, in_folder): def rename_folder(self, folder_to_rename, new_folder_name, in_folder):
''' """
Rename a folder. Rename a folder.
''' """
return { return {
'folder_to_rename': folder_to_rename, "folder_to_rename": folder_to_rename,
'new_folder_name': new_folder_name, "new_folder_name": new_folder_name,
'in_folder': in_folder, "in_folder": in_folder,
} }
@request('reader/delete_folder') @request("reader/delete_folder")
def delete_folder(self, folder_to_delete, in_folder): def delete_folder(self, folder_to_delete, in_folder):
''' """
Delete a folder and unsubscribe from all feeds inside. Delete a folder and unsubscribe from all feeds inside.
''' """
return { return {
'folder_to_delete': folder_to_delete, "folder_to_delete": folder_to_delete,
'in_folder': in_folder, "in_folder": in_folder,
} }
@request('reader/mark_feed_as_read') @request("reader/mark_feed_as_read")
def mark_feed_as_read(self, feed_ids): def mark_feed_as_read(self, feed_ids):
''' """
Mark a list of feeds as read. Mark a list of feeds as read.
Takes an array of feeds. Takes an array of feeds.
''' """
data = [] data = []
for feed in feed_ids: for feed in feed_ids:
data.append( ("feed_id", feed) ) data.append(("feed_id", feed))
return data return data
@request('reader/save_feed_order') @request("reader/save_feed_order")
def save_feed_order(self, folders): def save_feed_order(self, folders):
''' """
Reorder feeds and move them around between folders. Reorder feeds and move them around between folders.
The entire folder structure needs to be serialized. The entire folder structure needs to be serialized.
''' """
return { return {
'folders': folders, "folders": folders,
} }
@request() @request()
def classifier(self, feed_id): def classifier(self, feed_id):
''' """
Get the intelligence classifiers for a user's site. Get the intelligence classifiers for a user's site.
Only includes the user's own classifiers. Only includes the user's own classifiers.
Use /reader/feeds_trainer for popular classifiers. Use /reader/feeds_trainer for popular classifiers.
''' """
return { return {
'url': '/classifier/%d' % feed_id, "url": "/classifier/%d" % feed_id,
} }
@request('classifier/save') @request("classifier/save")
def classifier_save(self, like_type, dislike_type, remove_like_type, remove_dislike_type): def classifier_save(self, like_type, dislike_type, remove_like_type, remove_dislike_type):
''' """
Save intelligence classifiers (tags, titles, authors, and the feed) for a feed. Save intelligence classifiers (tags, titles, authors, and the feed) for a feed.
TODO: Make this usable. TODO: Make this usable.
''' """
raise NotImplemented raise NotImplemented
@request("import/opml_export")
@request('import/opml_export')
def opml_export(self): def opml_export(self):
''' """
Download a backup of feeds and folders as an OPML file. Download a backup of feeds and folders as an OPML file.
Contains folders and feeds in XML; useful for importing in another RSS reader. Contains folders and feeds in XML; useful for importing in another RSS reader.
''' """
return return
@request('import/opml_upload')
def opml_upload(self, opml_file):
'''
Upload an OPML file.
'''
f = open(opml_file)
return {
'file': f
}
@request("import/opml_upload")
def opml_upload(self, opml_file):
"""
Upload an OPML file.
"""
f = open(opml_file)
return {"file": f}

View file

@ -2,8 +2,8 @@ from apps.analyzer.models import Category, FeatureCategory
from django.db.models.aggregates import Sum from django.db.models.aggregates import Sum
import math import math
class Classifier: class Classifier:
def __init__(self, user, feed, phrases): def __init__(self, user, feed, phrases):
self.user = user self.user = user
self.feed = feed self.feed = feed
@ -11,7 +11,7 @@ class Classifier:
def get_features(self, doc): def get_features(self, doc):
found = {} found = {}
for phrase in self.phrases: for phrase in self.phrases:
if phrase in doc: if phrase in doc:
if phrase in found: if phrase in found:
@ -20,36 +20,40 @@ class Classifier:
found[phrase] = 1 found[phrase] = 1
return found return found
def increment_feature(self, feature, category): def increment_feature(self, feature, category):
count = self.feature_count(feature,category) count = self.feature_count(feature, category)
if count==0: if count == 0:
fc = FeatureCategory(user=self.user, feed=self.feed, feature=feature, category=category, count=1) fc = FeatureCategory(user=self.user, feed=self.feed, feature=feature, category=category, count=1)
fc.save() fc.save()
else: else:
fc = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) fc = FeatureCategory.objects.get(
user=self.user, feed=self.feed, feature=feature, category=category
)
fc.count = count + 1 fc.count = count + 1
fc.save() fc.save()
def feature_count(self, feature, category): def feature_count(self, feature, category):
if isinstance(category, Category): if isinstance(category, Category):
category = category.category category = category.category
try: try:
feature_count = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) feature_count = FeatureCategory.objects.get(
user=self.user, feed=self.feed, feature=feature, category=category
)
except FeatureCategory.DoesNotExist: except FeatureCategory.DoesNotExist:
return 0 return 0
else: else:
return float(feature_count.count) return float(feature_count.count)
def increment_category(self,category): def increment_category(self, category):
count = self.category_count(category) count = self.category_count(category)
if count==0: if count == 0:
category = Category(user=self.user, feed=self.feed, category=category, count=1) category = Category(user=self.user, feed=self.feed, category=category, count=1)
category.save() category.save()
else: else:
category = Category.objects.get(user=self.user, feed=self.feed, category=category) category = Category.objects.get(user=self.user, feed=self.feed, category=category)
category.count = count+1 category.count = count + 1
category.save() category.save()
def category_count(self, category): def category_count(self, category):
@ -68,12 +72,12 @@ class Classifier:
return categories return categories
def totalcount(self): def totalcount(self):
categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum('count')) categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum("count"))
return categories['sum'] return categories["sum"]
def train(self, item, category): def train(self, item, category):
features = self.get_features(item) features = self.get_features(item)
# Increment the count for every feature with this category # Increment the count for every feature with this category
for feature in features: for feature in features:
self.increment_feature(feature, category) self.increment_feature(feature, category)
@ -84,7 +88,7 @@ class Classifier:
def feature_probability(self, feature, category): def feature_probability(self, feature, category):
if self.category_count(category) == 0: if self.category_count(category) == 0:
return 0 return 0
# The total number of times this feature appeared in this # The total number of times this feature appeared in this
# category divided by the total number of items in this category # category divided by the total number of items in this category
return self.feature_count(feature, category) / self.category_count(category) return self.feature_count(feature, category) / self.category_count(category)
@ -96,21 +100,20 @@ class Classifier:
totals = sum([self.feature_count(feature, c) for c in self.categories()]) totals = sum([self.feature_count(feature, c) for c in self.categories()])
# Calculate the weighted average # Calculate the weighted average
bp = ((weight*ap) + (totals*basic_prob)) / (weight+totals) bp = ((weight * ap) + (totals * basic_prob)) / (weight + totals)
print(feature, category, basic_prob, totals, bp) print(feature, category, basic_prob, totals, bp)
return bp return bp
class FisherClassifier(Classifier): class FisherClassifier(Classifier):
def __init__(self, user, feed, phrases): def __init__(self, user, feed, phrases):
Classifier.__init__(self, user, feed, phrases) Classifier.__init__(self, user, feed, phrases)
self.minimums = {} self.minimums = {}
def category_probability(self, feature, category): def category_probability(self, feature, category):
# The frequency of this feature in this category # The frequency of this feature in this category
clf = self.feature_probability(feature, category) clf = self.feature_probability(feature, category)
if clf==0: if clf == 0:
return 0 return 0
# The frequency of this feature in all the categories # The frequency of this feature in all the categories
@ -119,54 +122,53 @@ class FisherClassifier(Classifier):
# The probability is the frequency in this category divided by # The probability is the frequency in this category divided by
# the overall frequency # the overall frequency
p = clf / freqsum p = clf / freqsum
return p return p
def fisher_probability(self, item, category): def fisher_probability(self, item, category):
# Multiply all the probabilities together # Multiply all the probabilities together
p = .5 p = 0.5
features = self.get_features(item) features = self.get_features(item)
if features: if features:
p = 1 p = 1
for feature in features: for feature in features:
p *= (self.weighted_probability(feature, category, self.category_probability)) p *= self.weighted_probability(feature, category, self.category_probability)
# Take the natural log and multiply by -2 # Take the natural log and multiply by -2
fscore = -2*math.log(p) fscore = -2 * math.log(p)
# Use the inverse chi2 function to get a probability # Use the inverse chi2 function to get a probability
return self.invchi2(fscore,len(features)*2) return self.invchi2(fscore, len(features) * 2)
def invchi2(self, chi, df): def invchi2(self, chi, df):
m = chi / 2.0 m = chi / 2.0
sum = term = math.exp(-m) sum = term = math.exp(-m)
for i in range(1, df//2): for i in range(1, df // 2):
term *= m / i term *= m / i
sum += term sum += term
return min(sum, 1.0) return min(sum, 1.0)
def setminimum(self, category, min): def setminimum(self, category, min):
self.minimums[category] = min self.minimums[category] = min
def getminimum(self, category): def getminimum(self, category):
if category not in self.minimums: if category not in self.minimums:
return 0 return 0
return self.minimums[category] return self.minimums[category]
def classify(self,item,default=None): def classify(self, item, default=None):
# Loop through looking for the best result # Loop through looking for the best result
best = default best = default
max = 0.0 max = 0.0
print(self.categories(), item) print(self.categories(), item)
for category in self.categories(): for category in self.categories():
p=self.fisher_probability(item, category) p = self.fisher_probability(item, category)
# Make sure it exceeds its minimum # Make sure it exceeds its minimum
if p > self.getminimum(category) and p > max: if p > self.getminimum(category) and p > max:
best = category best = category
max = p max = p
return best return best

View file

@ -6,36 +6,38 @@ import datetime
import re import re
import math import math
def entry_features(self, entry): def entry_features(self, entry):
splitter=re.compile('\\W*') splitter = re.compile("\\W*")
f={} f = {}
# Extract the title words and annotate # Extract the title words and annotate
titlewords=[s.lower() for s in splitter.split(entry['title']) titlewords = [s.lower() for s in splitter.split(entry["title"]) if len(s) > 2 and len(s) < 20]
if len(s)>2 and len(s)<20]
for w in titlewords:
for w in titlewords: f['Title:'+w]=1 f["Title:" + w] = 1
# Extract the summary words # Extract the summary words
summarywords=[s.lower() for s in splitter.split(entry['summary']) summarywords = [s.lower() for s in splitter.split(entry["summary"]) if len(s) > 2 and len(s) < 20]
if len(s)>2 and len(s)<20]
# Count uppercase words # Count uppercase words
uc=0 uc = 0
for i in range(len(summarywords)): for i in range(len(summarywords)):
w=summarywords[i] w = summarywords[i]
f[w]=1 f[w] = 1
if w.isupper(): uc+=1 if w.isupper():
uc += 1
# Get word pairs in summary as features # Get word pairs in summary as features
if i<len(summarywords)-1: if i < len(summarywords) - 1:
twowords=' '.join(summarywords[i:i+1]) twowords = " ".join(summarywords[i : i + 1])
f[twowords]=1 f[twowords] = 1
# Keep creator and publisher whole # Keep creator and publisher whole
f['Publisher:'+entry['publisher']]=1 f["Publisher:" + entry["publisher"]] = 1
# UPPERCASE is a virtual word flagging too much shouting # UPPERCASE is a virtual word flagging too much shouting
if float(uc)/len(summarywords)>0.3: f['UPPERCASE']=1 if float(uc) / len(summarywords) > 0.3:
f["UPPERCASE"] = 1
return f return f

View file

@ -8,25 +8,22 @@ from django.contrib.auth.models import User
from apps.profile.models import change_password, blank_authenticate, MGiftCode from apps.profile.models import change_password, blank_authenticate, MGiftCode
from apps.social.models import MSocialProfile from apps.social.models import MSocialProfile
class PopularityQueryForm(forms.Form): class PopularityQueryForm(forms.Form):
email = forms.CharField(widget=forms.TextInput(), email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False)
label="Your email address", query = forms.CharField(widget=forms.TextInput(), label="Keywords", required=False)
required=False)
query = forms.CharField(widget=forms.TextInput(),
label="Keywords",
required=False)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(PopularityQueryForm, self).__init__(*args, **kwargs) super(PopularityQueryForm, self).__init__(*args, **kwargs)
def clean_email(self): def clean_email(self):
if not self.cleaned_data['email']: if not self.cleaned_data["email"]:
raise forms.ValidationError('Please enter in an email address.') raise forms.ValidationError("Please enter in an email address.")
return self.cleaned_data["email"]
return self.cleaned_data['email']
def clean_query(self): def clean_query(self):
if not self.cleaned_data['query']: if not self.cleaned_data["query"]:
raise forms.ValidationError('Please enter in a keyword search query.') raise forms.ValidationError("Please enter in a keyword search query.")
return self.cleaned_data['query'] return self.cleaned_data["query"]

View file

@ -9,226 +9,234 @@ from nltk import FreqDist
def lgammln(xx): def lgammln(xx):
""" """
Returns the gamma function of xx. Returns the gamma function of xx.
Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt.
(Adapted from: Numerical Recipies in C.) (Adapted from: Numerical Recipies in C.)
Usage: lgammln(xx) Usage: lgammln(xx)
Copied from stats.py by strang@nmr.mgh.harvard.edu Copied from stats.py by strang@nmr.mgh.harvard.edu
""" """
coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, 0.120858003e-2, -0.536382e-5]
x = xx - 1.0
tmp = x + 5.5
tmp = tmp - (x + 0.5) * log(tmp)
ser = 1.0
for j in range(len(coeff)):
x = x + 1
ser = ser + coeff[j] / x
return -tmp + log(2.50662827465 * ser)
coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516,
0.120858003e-2, -0.536382e-5]
x = xx - 1.0
tmp = x + 5.5
tmp = tmp - (x+0.5)*log(tmp)
ser = 1.0
for j in range(len(coeff)):
x = x + 1
ser = ser + coeff[j]/x
return -tmp + log(2.50662827465*ser)
def log_sum(log_a, log_b): def log_sum(log_a, log_b):
if log_a < log_b: if log_a < log_b:
return log_b + log(1 + exp(log_a - log_b)) return log_b + log(1 + exp(log_a - log_b))
else: else:
return log_a + log(1 + exp(log_b - log_a)) return log_a + log(1 + exp(log_b - log_a))
def log_normalize(dist): def log_normalize(dist):
normalizer = reduce(log_sum, dist) normalizer = reduce(log_sum, dist)
for ii in xrange(len(dist)): for ii in xrange(len(dist)):
dist[ii] -= normalizer dist[ii] -= normalizer
return dist return dist
def log_sample(dist): def log_sample(dist):
""" """
Sample a key from a dictionary using the values as probabilities (unnormalized) Sample a key from a dictionary using the values as probabilities (unnormalized)
""" """
cutoff = random() cutoff = random()
dist = log_normalize(dist) dist = log_normalize(dist)
#print "Normalizer: ", normalizer # print "Normalizer: ", normalizer
current = 0
for ii in xrange(len(dist)):
current += exp(dist[ii])
if current >= cutoff:
# print "Chose", i
return ii
assert False, "Didn't choose anything: %f %f" % (cutoff, current)
current = 0
for ii in xrange(len(dist)):
current += exp(dist[ii])
if current >= cutoff:
#print "Chose", i
return ii
assert False, "Didn't choose anything: %f %f" % (cutoff, current)
def create_data(stories, lang="english", doc_limit=-1, delimiter=""): def create_data(stories, lang="english", doc_limit=-1, delimiter=""):
from nltk.tokenize.treebank import TreebankWordTokenizer from nltk.tokenize.treebank import TreebankWordTokenizer
tokenizer = TreebankWordTokenizer()
from nltk.corpus import stopwords tokenizer = TreebankWordTokenizer()
stop = stopwords.words('english')
from nltk.corpus import stopwords
from string import ascii_lowercase
stop = stopwords.words("english")
docs = {}
print("Found %i stories" % stories.count()) from string import ascii_lowercase
for story in stories:
text = zlib.decompress(story.story_content_z) docs = {}
# text = story.story_title print("Found %i stories" % stories.count())
text = ''.join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower() for story in stories:
if delimiter: text = zlib.decompress(story.story_content_z)
sections = text.split(delimiter) # text = story.story_title
else: text = "".join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower()
sections = [text] if delimiter:
sections = text.split(delimiter)
if doc_limit > 0 and len(docs) > doc_limit: else:
print("Passed doc limit %i" % len(docs)) sections = [text]
break
print(story.story_title, len(sections)) if doc_limit > 0 and len(docs) > doc_limit:
print("Passed doc limit %i" % len(docs))
break
print(story.story_title, len(sections))
for jj in xrange(len(sections)):
docs["%s-%i" % (story.story_title, jj)] = [
x
for x in tokenizer.tokenize(sections[jj])
if (not x in stop) and (min(y in ascii_lowercase for y in x))
]
return docs
for jj in xrange(len(sections)):
docs["%s-%i" % (story.story_title, jj)] = [x for x in tokenizer.tokenize(sections[jj]) \
if (not x in stop) and \
(min(y in ascii_lowercase for y in x))]
return docs
class LdaSampler: class LdaSampler:
def __init__(self, num_topics, doc_smoothing = 0.1, topic_smoothing = 0.01): def __init__(self, num_topics, doc_smoothing=0.1, topic_smoothing=0.01):
self._docs = defaultdict(FreqDist) self._docs = defaultdict(FreqDist)
self._topics = defaultdict(FreqDist) self._topics = defaultdict(FreqDist)
self._K = num_topics self._K = num_topics
self._state = None self._state = None
self._alpha = doc_smoothing self._alpha = doc_smoothing
self._lambda = topic_smoothing self._lambda = topic_smoothing
def optimize_hyperparameters(self, samples=5, step = 3.0): def optimize_hyperparameters(self, samples=5, step=3.0):
rawParam = [log(self._alpha), log(self._lambda)] rawParam = [log(self._alpha), log(self._lambda)]
for ii in xrange(samples): for ii in xrange(samples):
lp_old = self.lhood(self._alpha, self._lambda) lp_old = self.lhood(self._alpha, self._lambda)
lp_new = log(random()) + lp_old lp_new = log(random()) + lp_old
print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda)) print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda))
l = [x - random() * step for x in rawParam] l = [x - random() * step for x in rawParam]
r = [x + step for x in rawParam] r = [x + step for x in rawParam]
for jj in xrange(100): for jj in xrange(100):
rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))] rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))]
trial_alpha, trial_lambda = [exp(x) for x in rawParamNew] trial_alpha, trial_lambda = [exp(x) for x in rawParamNew]
lp_test = self.lhood(trial_alpha, trial_lambda) lp_test = self.lhood(trial_alpha, trial_lambda)
#print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda)) # print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda))
if lp_test > lp_new: if lp_test > lp_new:
print(jj) print(jj)
self._alpha = exp(rawParamNew[0]) self._alpha = exp(rawParamNew[0])
self._lambda = exp(rawParamNew[1]) self._lambda = exp(rawParamNew[1])
self._alpha_sum = self._alpha * self._K self._alpha_sum = self._alpha * self._K
self._lambda_sum = self._lambda * self._W self._lambda_sum = self._lambda * self._W
rawParam = [log(self._alpha), log(self._lambda)] rawParam = [log(self._alpha), log(self._lambda)]
break break
else: else:
for dd in xrange(len(rawParamNew)): for dd in xrange(len(rawParamNew)):
if rawParamNew[dd] < rawParam[dd]: if rawParamNew[dd] < rawParam[dd]:
l[dd] = rawParamNew[dd] l[dd] = rawParamNew[dd]
else: else:
r[dd] = rawParamNew[dd] r[dd] = rawParamNew[dd]
assert l[dd] <= rawParam[dd] assert l[dd] <= rawParam[dd]
assert r[dd] >= rawParam[dd] assert r[dd] >= rawParam[dd]
print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda)) print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda))
def lhood(self, doc_smoothing, voc_smoothing): def lhood(self, doc_smoothing, voc_smoothing):
doc_sum = doc_smoothing * self._K doc_sum = doc_smoothing * self._K
voc_sum = voc_smoothing * self._W voc_sum = voc_smoothing * self._W
val = 0.0 val = 0.0
val += lgammln(doc_sum) * len(self._docs) val += lgammln(doc_sum) * len(self._docs)
val -= lgammln(doc_smoothing) * self._K * len(self._docs) val -= lgammln(doc_smoothing) * self._K * len(self._docs)
for ii in self._docs: for ii in self._docs:
for jj in xrange(self._K): for jj in xrange(self._K):
val += lgammln(doc_smoothing + self._docs[ii][jj]) val += lgammln(doc_smoothing + self._docs[ii][jj])
val -= lgammln(doc_sum + self._docs[ii].N()) val -= lgammln(doc_sum + self._docs[ii].N())
val += lgammln(voc_sum) * self._K
val -= lgammln(voc_smoothing) * self._W * self._K
for ii in self._topics:
for jj in self._vocab:
val += lgammln(voc_smoothing + self._topics[ii][jj])
val -= lgammln(voc_sum + self._topics[ii].N())
return val
def initialize(self, data): val += lgammln(voc_sum) * self._K
""" val -= lgammln(voc_smoothing) * self._W * self._K
Data should be keyed by doc-id, values should be iterable for ii in self._topics:
""" for jj in self._vocab:
val += lgammln(voc_smoothing + self._topics[ii][jj])
val -= lgammln(voc_sum + self._topics[ii].N())
return val
self._alpha_sum = self._alpha * self._K def initialize(self, data):
self._state = defaultdict(dict) """
Data should be keyed by doc-id, values should be iterable
"""
self._vocab = set([]) self._alpha_sum = self._alpha * self._K
for dd in data: self._state = defaultdict(dict)
for ww in xrange(len(data[dd])):
# Learn all the words we'll see
self._vocab.add(data[dd][ww])
# Initialize the state to unassigned self._vocab = set([])
self._state[dd][ww] = -1 for dd in data:
for ww in xrange(len(data[dd])):
# Learn all the words we'll see
self._vocab.add(data[dd][ww])
self._W = len(self._vocab) # Initialize the state to unassigned
self._lambda_sum = float(self._W) * self._lambda self._state[dd][ww] = -1
self._data = data self._W = len(self._vocab)
self._lambda_sum = float(self._W) * self._lambda
print("Initialized vocab of size %i" % len(self._vocab)) self._data = data
def prob(self, doc, word, topic): print("Initialized vocab of size %i" % len(self._vocab))
val = log(self._docs[doc][topic] + self._alpha)
# This is constant across a document, so we don't need to compute this term
# val -= log(self._docs[doc].N() + self._alpha_sum)
val += log(self._topics[topic][word] + self._lambda)
val -= log(self._topics[topic].N() + self._lambda_sum)
# print doc, word, topic, self._docs[doc][topic], self._topics[topic][word] def prob(self, doc, word, topic):
val = log(self._docs[doc][topic] + self._alpha)
return val # This is constant across a document, so we don't need to compute this term
# val -= log(self._docs[doc].N() + self._alpha_sum)
def sample_word(self, doc, position): val += log(self._topics[topic][word] + self._lambda)
word = self._data[doc][position] val -= log(self._topics[topic].N() + self._lambda_sum)
old_topic = self._state[doc][position] # print doc, word, topic, self._docs[doc][topic], self._topics[topic][word]
if old_topic != -1:
self.change_count(doc, word, old_topic, -1)
probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)] return val
new_topic = log_sample(probs)
#print doc, word, new_topic
self.change_count(doc, word, new_topic, 1) def sample_word(self, doc, position):
self._state[doc][position] = new_topic word = self._data[doc][position]
def change_count(self, doc, word, topic, delta): old_topic = self._state[doc][position]
self._docs[doc].inc(topic, delta) if old_topic != -1:
self._topics[topic].inc(word, delta) self.change_count(doc, word, old_topic, -1)
def sample(self, iterations = 100, hyper_delay = 10): probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)]
assert self._state new_topic = log_sample(probs)
for ii in xrange(iterations): # print doc, word, new_topic
for dd in self._data:
for ww in xrange(len(self._data[dd])):
self.sample_word(dd, ww)
print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda)))
if hyper_delay >= 0 and ii % hyper_delay == 0:
self.optimize_hyperparameters()
def print_topics(self, num_words=15): self.change_count(doc, word, new_topic, 1)
for ii in self._topics: self._state[doc][position] = new_topic
print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words])))
def change_count(self, doc, word, topic, delta):
self._docs[doc].inc(topic, delta)
self._topics[topic].inc(word, delta)
def sample(self, iterations=100, hyper_delay=10):
assert self._state
for ii in xrange(iterations):
for dd in self._data:
for ww in xrange(len(self._data[dd])):
self.sample_word(dd, ww)
print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda)))
if hyper_delay >= 0 and ii % hyper_delay == 0:
self.optimize_hyperparameters()
def print_topics(self, num_words=15):
for ii in self._topics:
print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words])))
if __name__ == "__main__": if __name__ == "__main__":
stories = MStory.objects(story_feed_id=199) stories = MStory.objects(story_feed_id=199)
d = create_data(stories, doc_limit=250, delimiter="") d = create_data(stories, doc_limit=250, delimiter="")
lda = LdaSampler(5) lda = LdaSampler(5)
lda.initialize(d) lda.initialize(d)
lda.sample(50) lda.sample(50)
lda.print_topics() lda.print_topics()

View file

@ -6,34 +6,49 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
initial = True initial = True
dependencies = [ dependencies = [
('rss_feeds', '0001_initial'), ("rss_feeds", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL), migrations.swappable_dependency(settings.AUTH_USER_MODEL),
] ]
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
name='Category', name="Category",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('category', models.CharField(max_length=255)), "id",
('count', models.IntegerField(default=0)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), ),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ("category", models.CharField(max_length=255)),
("count", models.IntegerField(default=0)),
("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
], ],
), ),
migrations.CreateModel( migrations.CreateModel(
name='FeatureCategory', name="FeatureCategory",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('feature', models.CharField(max_length=255)), "id",
('category', models.CharField(max_length=255)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
('count', models.IntegerField(default=0)), ),
('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), ("feature", models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ("category", models.CharField(max_length=255)),
("count", models.IntegerField(default=0)),
("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
], ],
), ),
] ]

View file

@ -10,24 +10,26 @@ from apps.rss_feeds.models import Feed
from apps.analyzer.tasks import EmailPopularityQuery from apps.analyzer.tasks import EmailPopularityQuery
from utils import log as logging from utils import log as logging
class FeatureCategory(models.Model): class FeatureCategory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
feed = models.ForeignKey(Feed, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE)
feature = models.CharField(max_length=255) feature = models.CharField(max_length=255)
category = models.CharField(max_length=255) category = models.CharField(max_length=255)
count = models.IntegerField(default=0) count = models.IntegerField(default=0)
def __str__(self): def __str__(self):
return '%s - %s (%s)' % (self.feature, self.category, self.count) return "%s - %s (%s)" % (self.feature, self.category, self.count)
class Category(models.Model): class Category(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
feed = models.ForeignKey(Feed, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE)
category = models.CharField(max_length=255) category = models.CharField(max_length=255)
count = models.IntegerField(default=0) count = models.IntegerField(default=0)
def __str__(self): def __str__(self):
return '%s (%s)' % (self.category, self.count) return "%s (%s)" % (self.category, self.count)
class MPopularityQuery(mongo.Document): class MPopularityQuery(mongo.Document):
@ -35,55 +37,53 @@ class MPopularityQuery(mongo.Document):
query = mongo.StringField() query = mongo.StringField()
is_emailed = mongo.BooleanField() is_emailed = mongo.BooleanField()
creation_date = mongo.DateTimeField(default=datetime.datetime.now) creation_date = mongo.DateTimeField(default=datetime.datetime.now)
meta = { meta = {
'collection': 'popularity_query', "collection": "popularity_query",
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
return "%s - \"%s\"" % (self.email, self.query) return '%s - "%s"' % (self.email, self.query)
def queue_email(self): def queue_email(self):
EmailPopularityQuery.delay(pk=str(self.pk)) EmailPopularityQuery.delay(pk=str(self.pk))
@classmethod @classmethod
def ensure_all_sent(cls, queue=True): def ensure_all_sent(cls, queue=True):
for query in cls.objects.all().order_by('creation_date'): for query in cls.objects.all().order_by("creation_date"):
query.ensure_sent(queue=queue) query.ensure_sent(queue=queue)
def ensure_sent(self, queue=True): def ensure_sent(self, queue=True):
if self.is_emailed: if self.is_emailed:
logging.debug(" ---> Already sent %s" % self) logging.debug(" ---> Already sent %s" % self)
return return
if queue: if queue:
self.queue_email() self.queue_email()
else: else:
self.send_email() self.send_email()
def send_email(self, limit=5000): def send_email(self, limit=5000):
filename = Feed.xls_query_popularity(self.query, limit=limit) filename = Feed.xls_query_popularity(self.query, limit=limit)
xlsx = open(filename, "r") xlsx = open(filename, "r")
params = { params = {"query": self.query}
'query': self.query text = render_to_string("mail/email_popularity_query.txt", params)
} html = render_to_string("mail/email_popularity_query.xhtml", params)
text = render_to_string('mail/email_popularity_query.txt', params) subject = 'Keyword popularity spreadsheet: "%s"' % self.query
html = render_to_string('mail/email_popularity_query.xhtml', params) msg = EmailMultiAlternatives(
subject = "Keyword popularity spreadsheet: \"%s\"" % self.query subject, text, from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, to=["<%s>" % (self.email)]
msg = EmailMultiAlternatives(subject, text, )
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['<%s>' % (self.email)])
msg.attach_alternative(html, "text/html") msg.attach_alternative(html, "text/html")
msg.attach(filename, xlsx.read(), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') msg.attach(filename, xlsx.read(), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
msg.send() msg.send()
self.is_emailed = True self.is_emailed = True
self.save() self.save()
logging.debug(" -> ~BB~FM~SBSent email for popularity query: %s" % self) logging.debug(" -> ~BB~FM~SBSent email for popularity query: %s" % self)
class MClassifierTitle(mongo.Document): class MClassifierTitle(mongo.Document):
user_id = mongo.IntField() user_id = mongo.IntField()
@ -92,68 +92,69 @@ class MClassifierTitle(mongo.Document):
title = mongo.StringField(max_length=255) title = mongo.StringField(max_length=255)
score = mongo.IntField() score = mongo.IntField()
creation_date = mongo.DateTimeField() creation_date = mongo.DateTimeField()
meta = { meta = {
'collection': 'classifier_title', "collection": "classifier_title",
'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
user = User.objects.get(pk=self.user_id) user = User.objects.get(pk=self.user_id)
return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.title[:30]) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.title[:30])
class MClassifierAuthor(mongo.Document): class MClassifierAuthor(mongo.Document):
user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'author')) user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "author"))
feed_id = mongo.IntField() feed_id = mongo.IntField()
social_user_id = mongo.IntField() social_user_id = mongo.IntField()
author = mongo.StringField(max_length=255) author = mongo.StringField(max_length=255)
score = mongo.IntField() score = mongo.IntField()
creation_date = mongo.DateTimeField() creation_date = mongo.DateTimeField()
meta = { meta = {
'collection': 'classifier_author', "collection": "classifier_author",
'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
user = User.objects.get(pk=self.user_id) user = User.objects.get(pk=self.user_id)
return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.author[:30]) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.author[:30])
class MClassifierTag(mongo.Document): class MClassifierTag(mongo.Document):
user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'tag')) user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "tag"))
feed_id = mongo.IntField() feed_id = mongo.IntField()
social_user_id = mongo.IntField() social_user_id = mongo.IntField()
tag = mongo.StringField(max_length=255) tag = mongo.StringField(max_length=255)
score = mongo.IntField() score = mongo.IntField()
creation_date = mongo.DateTimeField() creation_date = mongo.DateTimeField()
meta = { meta = {
'collection': 'classifier_tag', "collection": "classifier_tag",
'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
user = User.objects.get(pk=self.user_id) user = User.objects.get(pk=self.user_id)
return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.tag[:30]) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.tag[:30])
class MClassifierFeed(mongo.Document): class MClassifierFeed(mongo.Document):
user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id')) user_id = mongo.IntField(unique_with=("feed_id", "social_user_id"))
feed_id = mongo.IntField() feed_id = mongo.IntField()
social_user_id = mongo.IntField() social_user_id = mongo.IntField()
score = mongo.IntField() score = mongo.IntField()
creation_date = mongo.DateTimeField() creation_date = mongo.DateTimeField()
meta = { meta = {
'collection': 'classifier_feed', "collection": "classifier_feed",
'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
user = User.objects.get(pk=self.user_id) user = User.objects.get(pk=self.user_id)
if self.feed_id: if self.feed_id:
@ -161,94 +162,105 @@ class MClassifierFeed(mongo.Document):
else: else:
feed = User.objects.get(pk=self.social_user_id) feed = User.objects.get(pk=self.social_user_id)
return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, feed) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, feed)
def compute_story_score(story, classifier_titles, classifier_authors, classifier_tags, classifier_feeds): def compute_story_score(story, classifier_titles, classifier_authors, classifier_tags, classifier_feeds):
intelligence = { intelligence = {
'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]),
'author': apply_classifier_authors(classifier_authors, story), "author": apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story), "tags": apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story), "title": apply_classifier_titles(classifier_titles, story),
} }
score = 0 score = 0
score_max = max(intelligence['title'], score_max = max(intelligence["title"], intelligence["author"], intelligence["tags"])
intelligence['author'], score_min = min(intelligence["title"], intelligence["author"], intelligence["tags"])
intelligence['tags'])
score_min = min(intelligence['title'],
intelligence['author'],
intelligence['tags'])
if score_max > 0: if score_max > 0:
score = score_max score = score_max
elif score_min < 0: elif score_min < 0:
score = score_min score = score_min
if score == 0: if score == 0:
score = intelligence['feed'] score = intelligence["feed"]
return score return score
def apply_classifier_titles(classifiers, story): def apply_classifier_titles(classifiers, story):
score = 0 score = 0
for classifier in classifiers: for classifier in classifiers:
if classifier.feed_id != story['story_feed_id']: if classifier.feed_id != story["story_feed_id"]:
continue continue
if classifier.title.lower() in story['story_title'].lower(): if classifier.title.lower() in story["story_title"].lower():
# print 'Titles: (%s) %s -- %s' % (classifier.title in story['story_title'], classifier.title, story['story_title']) # print 'Titles: (%s) %s -- %s' % (classifier.title in story['story_title'], classifier.title, story['story_title'])
score = classifier.score score = classifier.score
if score > 0: return score if score > 0:
return score
return score return score
def apply_classifier_authors(classifiers, story): def apply_classifier_authors(classifiers, story):
score = 0 score = 0
for classifier in classifiers: for classifier in classifiers:
if classifier.feed_id != story['story_feed_id']: if classifier.feed_id != story["story_feed_id"]:
continue continue
if story.get('story_authors') and classifier.author == story.get('story_authors'): if story.get("story_authors") and classifier.author == story.get("story_authors"):
# print 'Authors: %s -- %s' % (classifier.author, story['story_authors']) # print 'Authors: %s -- %s' % (classifier.author, story['story_authors'])
score = classifier.score score = classifier.score
if score > 0: return classifier.score if score > 0:
return classifier.score
return score return score
def apply_classifier_tags(classifiers, story): def apply_classifier_tags(classifiers, story):
score = 0 score = 0
for classifier in classifiers: for classifier in classifiers:
if classifier.feed_id != story['story_feed_id']: if classifier.feed_id != story["story_feed_id"]:
continue continue
if story['story_tags'] and classifier.tag in story['story_tags']: if story["story_tags"] and classifier.tag in story["story_tags"]:
# print 'Tags: (%s-%s) %s -- %s' % (classifier.tag in story['story_tags'], classifier.score, classifier.tag, story['story_tags']) # print 'Tags: (%s-%s) %s -- %s' % (classifier.tag in story['story_tags'], classifier.score, classifier.tag, story['story_tags'])
score = classifier.score score = classifier.score
if score > 0: return classifier.score if score > 0:
return classifier.score
return score return score
def apply_classifier_feeds(classifiers, feed, social_user_ids=None): def apply_classifier_feeds(classifiers, feed, social_user_ids=None):
if not feed and not social_user_ids: return 0 if not feed and not social_user_ids:
return 0
feed_id = None feed_id = None
if feed: if feed:
feed_id = feed if isinstance(feed, int) else feed.pk feed_id = feed if isinstance(feed, int) else feed.pk
if social_user_ids and not isinstance(social_user_ids, list): if social_user_ids and not isinstance(social_user_ids, list):
social_user_ids = [social_user_ids] social_user_ids = [social_user_ids]
for classifier in classifiers: for classifier in classifiers:
if classifier.feed_id == feed_id: if classifier.feed_id == feed_id:
# print 'Feeds: %s -- %s' % (classifier.feed_id, feed.pk) # print 'Feeds: %s -- %s' % (classifier.feed_id, feed.pk)
return classifier.score return classifier.score
if (social_user_ids and not classifier.feed_id and if social_user_ids and not classifier.feed_id and classifier.social_user_id in social_user_ids:
classifier.social_user_id in social_user_ids):
return classifier.score return classifier.score
return 0 return 0
def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier_feeds=None, classifier_authors=None,
classifier_titles=None, classifier_tags=None): def get_classifiers_for_user(
user,
feed_id=None,
social_user_id=None,
classifier_feeds=None,
classifier_authors=None,
classifier_titles=None,
classifier_tags=None,
):
params = dict(user_id=user.pk) params = dict(user_id=user.pk)
if isinstance(feed_id, list): if isinstance(feed_id, list):
params['feed_id__in'] = feed_id params["feed_id__in"] = feed_id
elif feed_id: elif feed_id:
params['feed_id'] = feed_id params["feed_id"] = feed_id
if social_user_id: if social_user_id:
if isinstance(social_user_id, str): if isinstance(social_user_id, str):
social_user_id = int(social_user_id.replace('social:', '')) social_user_id = int(social_user_id.replace("social:", ""))
params['social_user_id'] = social_user_id params["social_user_id"] = social_user_id
if classifier_authors is None: if classifier_authors is None:
classifier_authors = list(MClassifierAuthor.objects(**params)) classifier_authors = list(MClassifierAuthor.objects(**params))
@ -258,49 +270,56 @@ def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier
classifier_tags = list(MClassifierTag.objects(**params)) classifier_tags = list(MClassifierTag.objects(**params))
if classifier_feeds is None: if classifier_feeds is None:
if not social_user_id and feed_id: if not social_user_id and feed_id:
params['social_user_id'] = 0 params["social_user_id"] = 0
classifier_feeds = list(MClassifierFeed.objects(**params)) classifier_feeds = list(MClassifierFeed.objects(**params))
feeds = [] feeds = []
for f in classifier_feeds: for f in classifier_feeds:
if f.social_user_id and not f.feed_id: if f.social_user_id and not f.feed_id:
feeds.append(('social:%s' % f.social_user_id, f.score)) feeds.append(("social:%s" % f.social_user_id, f.score))
else: else:
feeds.append((f.feed_id, f.score)) feeds.append((f.feed_id, f.score))
payload = { payload = {
'feeds': dict(feeds), "feeds": dict(feeds),
'authors': dict([(a.author, a.score) for a in classifier_authors]), "authors": dict([(a.author, a.score) for a in classifier_authors]),
'titles': dict([(t.title, t.score) for t in classifier_titles]), "titles": dict([(t.title, t.score) for t in classifier_titles]),
'tags': dict([(t.tag, t.score) for t in classifier_tags]), "tags": dict([(t.tag, t.score) for t in classifier_tags]),
} }
return payload return payload
def sort_classifiers_by_feed(user, feed_ids=None,
classifier_feeds=None, def sort_classifiers_by_feed(
classifier_authors=None, user,
classifier_titles=None, feed_ids=None,
classifier_tags=None): classifier_feeds=None,
classifier_authors=None,
classifier_titles=None,
classifier_tags=None,
):
def sort_by_feed(classifiers): def sort_by_feed(classifiers):
feed_classifiers = defaultdict(list) feed_classifiers = defaultdict(list)
for classifier in classifiers: for classifier in classifiers:
feed_classifiers[classifier.feed_id].append(classifier) feed_classifiers[classifier.feed_id].append(classifier)
return feed_classifiers return feed_classifiers
classifiers = {} classifiers = {}
if feed_ids: if feed_ids:
classifier_feeds = sort_by_feed(classifier_feeds) classifier_feeds = sort_by_feed(classifier_feeds)
classifier_authors = sort_by_feed(classifier_authors) classifier_authors = sort_by_feed(classifier_authors)
classifier_titles = sort_by_feed(classifier_titles) classifier_titles = sort_by_feed(classifier_titles)
classifier_tags = sort_by_feed(classifier_tags) classifier_tags = sort_by_feed(classifier_tags)
for feed_id in feed_ids: for feed_id in feed_ids:
classifiers[feed_id] = get_classifiers_for_user(user, feed_id=feed_id, classifiers[feed_id] = get_classifiers_for_user(
classifier_feeds=classifier_feeds[feed_id], user,
classifier_authors=classifier_authors[feed_id], feed_id=feed_id,
classifier_titles=classifier_titles[feed_id], classifier_feeds=classifier_feeds[feed_id],
classifier_tags=classifier_tags[feed_id]) classifier_authors=classifier_authors[feed_id],
classifier_titles=classifier_titles[feed_id],
classifier_tags=classifier_tags[feed_id],
)
return classifiers return classifiers

View file

@ -1,39 +1,39 @@
import re import re
from pprint import pprint from pprint import pprint
class PhraseFilter: class PhraseFilter:
def __init__(self): def __init__(self):
self.phrases = {} self.phrases = {}
def run(self, text, storyid): def run(self, text, storyid):
chunks = self.chunk(text) chunks = self.chunk(text)
self.count_phrases(chunks, storyid) self.count_phrases(chunks, storyid)
def print_phrases(self): def print_phrases(self):
pprint(self.phrases) pprint(self.phrases)
def get_phrases(self): def get_phrases(self):
return self.phrases.keys() return self.phrases.keys()
# =========== # ===========
# = Chunker = # = Chunker =
# =========== # ===========
def chunk(self, text): def chunk(self, text):
chunks = [t.strip() for t in re.split('[^a-zA-Z-]+', text) if t] chunks = [t.strip() for t in re.split("[^a-zA-Z-]+", text) if t]
# chunks = self._lowercase(chunks) # chunks = self._lowercase(chunks)
return chunks return chunks
def _lowercase(self, chunks): def _lowercase(self, chunks):
return [c.lower() for c in chunks] return [c.lower() for c in chunks]
# ================== # ==================
# = Phrase Counter = # = Phrase Counter =
# ================== # ==================
def count_phrases(self, chunks, storyid): def count_phrases(self, chunks, storyid):
for l in range(1, len(chunks)+1): for l in range(1, len(chunks) + 1):
combinations = self._get_combinations(chunks, l) combinations = self._get_combinations(chunks, l)
# print "Combinations: %s" % combinations # print "Combinations: %s" % combinations
for phrase in combinations: for phrase in combinations:
@ -41,23 +41,23 @@ class PhraseFilter:
self.phrases[phrase] = [] self.phrases[phrase] = []
if storyid not in self.phrases[phrase]: if storyid not in self.phrases[phrase]:
self.phrases[phrase].append(storyid) self.phrases[phrase].append(storyid)
def _get_combinations(self, chunks, length): def _get_combinations(self, chunks, length):
combinations = [] combinations = []
for i, chunk in enumerate(chunks): for i, chunk in enumerate(chunks):
# 0,1,2,3,4,5,6 = 01 12 23 34 45 56 # 0,1,2,3,4,5,6 = 01 12 23 34 45 56
combination = [] combination = []
for l in range(length): for l in range(length):
if i+l < len(chunks): if i + l < len(chunks):
# print i, l, chunks[i+l], len(chunks) # print i, l, chunks[i+l], len(chunks)
combination.append(chunks[i+l]) combination.append(chunks[i + l])
combinations.append(' '.join(combination)) combinations.append(" ".join(combination))
return combinations return combinations
# ================= # =================
# = Phrase Paring = # = Phrase Paring =
# ================= # =================
def pare_phrases(self): def pare_phrases(self):
# Kill singles # Kill singles
for phrase, counts in self.phrases.items(): for phrase, counts in self.phrases.items():
@ -67,27 +67,32 @@ class PhraseFilter:
if len(phrase) < 4: if len(phrase) < 4:
del self.phrases[phrase] del self.phrases[phrase]
continue continue
# Kill repeats # Kill repeats
for phrase in self.phrases.keys(): for phrase in self.phrases.keys():
for phrase2 in self.phrases.keys(): for phrase2 in self.phrases.keys():
if phrase in self.phrases and len(phrase2) > len(phrase) and phrase in phrase2 and phrase != phrase2: if (
phrase in self.phrases
and len(phrase2) > len(phrase)
and phrase in phrase2
and phrase != phrase2
):
del self.phrases[phrase] del self.phrases[phrase]
if __name__ == '__main__':
if __name__ == "__main__":
phrasefilter = PhraseFilter() phrasefilter = PhraseFilter()
phrasefilter.run('House of the Day: 123 Atlantic Ave. #3', 1) phrasefilter.run("House of the Day: 123 Atlantic Ave. #3", 1)
phrasefilter.run('House of the Day: 456 Plankton St. #3', 4) phrasefilter.run("House of the Day: 456 Plankton St. #3", 4)
phrasefilter.run('Coop of the Day: 321 Pacific St.', 2) phrasefilter.run("Coop of the Day: 321 Pacific St.", 2)
phrasefilter.run('Streetlevel: 393 Pacific St.', 11) phrasefilter.run("Streetlevel: 393 Pacific St.", 11)
phrasefilter.run('Coop of the Day: 456 Jefferson Ave.', 3) phrasefilter.run("Coop of the Day: 456 Jefferson Ave.", 3)
phrasefilter.run('Extra, Extra', 5) phrasefilter.run("Extra, Extra", 5)
phrasefilter.run('Extra, Extra', 6) phrasefilter.run("Extra, Extra", 6)
phrasefilter.run('Early Addition', 7) phrasefilter.run("Early Addition", 7)
phrasefilter.run('Early Addition', 8) phrasefilter.run("Early Addition", 8)
phrasefilter.run('Development Watch', 9) phrasefilter.run("Development Watch", 9)
phrasefilter.run('Streetlevel', 10) phrasefilter.run("Streetlevel", 10)
phrasefilter.pare_phrases() phrasefilter.pare_phrases()
phrasefilter.print_phrases() phrasefilter.print_phrases()

View file

@ -1,12 +1,12 @@
from newsblur_web.celeryapp import app from newsblur_web.celeryapp import app
from utils import log as logging from utils import log as logging
@app.task() @app.task()
def EmailPopularityQuery(pk): def EmailPopularityQuery(pk):
from apps.analyzer.models import MPopularityQuery from apps.analyzer.models import MPopularityQuery
query = MPopularityQuery.objects.get(pk=pk) query = MPopularityQuery.objects.get(pk=pk)
logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query) logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query)
query.send_email() query.send_email()

View file

@ -2,6 +2,7 @@ from django.test.client import Client
from apps.rss_feeds.models import MStory from apps.rss_feeds.models import MStory
from django.test import TestCase from django.test import TestCase
from django.core import management from django.core import management
# from apps.analyzer.classifier import FisherClassifier # from apps.analyzer.classifier import FisherClassifier
import nltk import nltk
from itertools import groupby from itertools import groupby
@ -11,16 +12,17 @@ from apps.analyzer.phrase_filter import PhraseFilter
class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder): class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder):
"""A tool for the finding and ranking of quadgram collocations or other association measures. """A tool for the finding and ranking of quadgram collocations or other association measures.
It is often useful to use from_words() rather thanconstructing an instance directly. It is often useful to use from_words() rather thanconstructing an instance directly.
""" """
def __init__(self, word_fd, quadgram_fd, trigram_fd, bigram_fd, wildcard_fd): def __init__(self, word_fd, quadgram_fd, trigram_fd, bigram_fd, wildcard_fd):
"""Construct a TrigramCollocationFinder, given FreqDists for appearances of words, bigrams, two words with any word between them,and trigrams.""" """Construct a TrigramCollocationFinder, given FreqDists for appearances of words, bigrams, two words with any word between them,and trigrams."""
nltk.collocations.AbstractCollocationFinder.__init__(self, word_fd, quadgram_fd) nltk.collocations.AbstractCollocationFinder.__init__(self, word_fd, quadgram_fd)
self.trigram_fd = trigram_fd self.trigram_fd = trigram_fd
self.bigram_fd = bigram_fd self.bigram_fd = bigram_fd
self.wildcard_fd = wildcard_fd self.wildcard_fd = wildcard_fd
@classmethod @classmethod
def from_words(cls, words): def from_words(cls, words):
wfd = nltk.probability.FreqDist() wfd = nltk.probability.FreqDist()
@ -28,20 +30,20 @@ class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder):
tfd = nltk.probability.FreqDist() tfd = nltk.probability.FreqDist()
bfd = nltk.probability.FreqDist() bfd = nltk.probability.FreqDist()
wildfd = nltk.probability.FreqDist() wildfd = nltk.probability.FreqDist()
for w1, w2, w3 ,w4 in nltk.util.ingrams(words, 4, pad_right=True): for w1, w2, w3, w4 in nltk.util.ingrams(words, 4, pad_right=True):
wfd.inc(w1) wfd.inc(w1)
if w4 is None: if w4 is None:
continue continue
else: else:
qfd.inc((w1,w2,w3,w4)) qfd.inc((w1, w2, w3, w4))
bfd.inc((w1,w2)) bfd.inc((w1, w2))
tfd.inc((w1,w2,w3)) tfd.inc((w1, w2, w3))
wildfd.inc((w1,w3,w4)) wildfd.inc((w1, w3, w4))
wildfd.inc((w1,w2,w4)) wildfd.inc((w1, w2, w4))
return cls(wfd, qfd, tfd, bfd, wildfd) return cls(wfd, qfd, tfd, bfd, wildfd)
def score_ngram(self, score_fn, w1, w2, w3, w4): def score_ngram(self, score_fn, w1, w2, w3, w4):
n_all = self.word_fd.N() n_all = self.word_fd.N()
n_iiii = self.ngram_fd[(w1, w2, w3, w4)] n_iiii = self.ngram_fd[(w1, w2, w3, w4)]
@ -59,63 +61,78 @@ class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder):
n_xixi = self.trigram_fd[(w2, w3)] n_xixi = self.trigram_fd[(w2, w3)]
n_xxii = self.trigram_fd[(w3, w4)] n_xxii = self.trigram_fd[(w3, w4)]
n_xxxi = self.trigram_fd[(w3, w4)] n_xxxi = self.trigram_fd[(w3, w4)]
return score_fn(n_iiii, return score_fn(
(n_iiix, n_iixi, n_ixii, n_xiii), n_iiii,
(n_iixx, n_ixix, n_ixxi, n_ixxx), (n_iiix, n_iixi, n_ixii, n_xiii),
(n_xiix, n_xixi, n_xxii, n_xxxi), (n_iixx, n_ixix, n_ixxi, n_ixxx),
n_all) (n_xiix, n_xixi, n_xxii, n_xxxi),
n_all,
)
class CollocationTest(TestCase): class CollocationTest(TestCase):
fixtures = ["brownstoner.json"]
fixtures = ['brownstoner.json']
def setUp(self): def setUp(self):
self.client = Client() self.client = Client()
def test_bigrams(self): def test_bigrams(self):
# bigram_measures = nltk.collocations.BigramAssocMeasures() # bigram_measures = nltk.collocations.BigramAssocMeasures()
trigram_measures = nltk.collocations.TrigramAssocMeasures() trigram_measures = nltk.collocations.TrigramAssocMeasures()
tokens = [ tokens = [
'Co-op', 'of', 'the', 'day', "Co-op",
'House', 'of', 'the', 'day', "of",
'Condo', 'of', 'the', 'day', "the",
'Development', 'Watch', "day",
'Co-op', 'of', 'the', 'day', "House",
"of",
"the",
"day",
"Condo",
"of",
"the",
"day",
"Development",
"Watch",
"Co-op",
"of",
"the",
"day",
] ]
finder = nltk.collocations.TrigramCollocationFinder.from_words(tokens) finder = nltk.collocations.TrigramCollocationFinder.from_words(tokens)
finder.apply_freq_filter(2) finder.apply_freq_filter(2)
# return the 10 n-grams with the highest PMI # return the 10 n-grams with the highest PMI
print(finder.nbest(trigram_measures.pmi, 10)) print(finder.nbest(trigram_measures.pmi, 10))
titles = [ titles = [
'Co-op of the day', "Co-op of the day",
'Condo of the day', "Condo of the day",
'Co-op of the day', "Co-op of the day",
'House of the day', "House of the day",
'Development Watch', "Development Watch",
'Streetlevel', "Streetlevel",
] ]
tokens = nltk.tokenize.word(' '.join(titles)) tokens = nltk.tokenize.word(" ".join(titles))
ngrams = nltk.ngrams(tokens, 4) ngrams = nltk.ngrams(tokens, 4)
d = [key for key, group in groupby(sorted(ngrams)) if len(list(group)) >= 2] d = [key for key, group in groupby(sorted(ngrams)) if len(list(group)) >= 2]
print(d) print(d)
class ClassifierTest(TestCase): class ClassifierTest(TestCase):
fixtures = ["classifiers.json", "brownstoner.json"]
fixtures = ['classifiers.json', 'brownstoner.json']
def setUp(self): def setUp(self):
self.client = Client() self.client = Client()
#
#
# def test_filter(self): # def test_filter(self):
# user = User.objects.all() # user = User.objects.all()
# feed = Feed.objects.all() # feed = Feed.objects.all()
# #
# management.call_command('loaddata', 'brownstoner.json', verbosity=0) # management.call_command('loaddata', 'brownstoner.json', verbosity=0)
# response = self.client.get('/reader/refresh_feed', { "feed_id": 1, "force": True }) # response = self.client.get('/reader/refresh_feed', { "feed_id": 1, "force": True })
# management.call_command('loaddata', 'brownstoner2.json', verbosity=0) # management.call_command('loaddata', 'brownstoner2.json', verbosity=0)
@ -124,28 +141,32 @@ class ClassifierTest(TestCase):
# response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True })
# management.call_command('loaddata', 'gothamist2.json', verbosity=0) # management.call_command('loaddata', 'gothamist2.json', verbosity=0)
# response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True })
# #
# stories = Story.objects.filter(story_feed=feed[1]).order_by('-story_date')[:100] # stories = Story.objects.filter(story_feed=feed[1]).order_by('-story_date')[:100]
# #
# phrasefilter = PhraseFilter() # phrasefilter = PhraseFilter()
# for story in stories: # for story in stories:
# # print story.story_title, story.id # # print story.story_title, story.id
# phrasefilter.run(story.story_title, story.id) # phrasefilter.run(story.story_title, story.id)
# #
# phrasefilter.pare_phrases() # phrasefilter.pare_phrases()
# phrasefilter.print_phrases() # phrasefilter.print_phrases()
# #
def test_train(self): def test_train(self):
# user = User.objects.all() # user = User.objects.all()
# feed = Feed.objects.all() # feed = Feed.objects.all()
management.call_command('loaddata', 'brownstoner.json', verbosity=0, commit=False, skip_checks=False) management.call_command("loaddata", "brownstoner.json", verbosity=0, commit=False, skip_checks=False)
management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) management.call_command(
management.call_command('loaddata', 'brownstoner2.json', verbosity=0, commit=False, skip_checks=False) "refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False
management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) )
management.call_command("loaddata", "brownstoner2.json", verbosity=0, commit=False, skip_checks=False)
management.call_command(
"refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False
)
stories = MStory.objects(story_feed_id=1)[:53] stories = MStory.objects(story_feed_id=1)[:53]
phrasefilter = PhraseFilter() phrasefilter = PhraseFilter()
for story in stories: for story in stories:
# print story.story_title, story.id # print story.story_title, story.id
@ -154,46 +175,45 @@ class ClassifierTest(TestCase):
phrasefilter.pare_phrases() phrasefilter.pare_phrases()
phrases = phrasefilter.get_phrases() phrases = phrasefilter.get_phrases()
print(phrases) print(phrases)
tokenizer = Tokenizer(phrases) tokenizer = Tokenizer(phrases)
classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases) classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases)
classifier.train('good', 'House of the Day: 393 Pacific St.')
classifier.train('good', 'House of the Day: 393 Pacific St.')
classifier.train('good', 'Condo of the Day: 393 Pacific St.')
classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3')
classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3')
classifier.train('good', 'Development Watch: 393 Pacific St. #3')
classifier.train('bad', 'Development Watch: 393 Pacific St. #3')
classifier.train('bad', 'Development Watch: 393 Pacific St. #3')
classifier.train('bad', 'Development Watch: 393 Pacific St. #3')
classifier.train('bad', 'Streetlevel: 393 Pacific St. #3')
guess = dict(classifier.guess('Co-op of the Day: 413 Atlantic'))
self.assertTrue(guess['good'] > .99)
self.assertTrue('bad' not in guess)
guess = dict(classifier.guess('House of the Day: 413 Atlantic'))
self.assertTrue(guess['good'] > .99)
self.assertTrue('bad' not in guess)
guess = dict(classifier.guess('Development Watch: Yatta'))
self.assertTrue(guess['bad'] > .7)
self.assertTrue(guess['good'] < .3)
guess = dict(classifier.guess('Development Watch: 393 Pacific St.')) classifier.train("good", "House of the Day: 393 Pacific St.")
self.assertTrue(guess['bad'] > .7) classifier.train("good", "House of the Day: 393 Pacific St.")
self.assertTrue(guess['good'] < .3) classifier.train("good", "Condo of the Day: 393 Pacific St.")
classifier.train("good", "Co-op of the Day: 393 Pacific St. #3")
guess = dict(classifier.guess('Streetlevel: 123 Carlton St.')) classifier.train("good", "Co-op of the Day: 393 Pacific St. #3")
self.assertTrue(guess['bad'] > .99) classifier.train("good", "Development Watch: 393 Pacific St. #3")
self.assertTrue('good' not in guess) classifier.train("bad", "Development Watch: 393 Pacific St. #3")
classifier.train("bad", "Development Watch: 393 Pacific St. #3")
classifier.train("bad", "Development Watch: 393 Pacific St. #3")
classifier.train("bad", "Streetlevel: 393 Pacific St. #3")
guess = classifier.guess('Extra, Extra') guess = dict(classifier.guess("Co-op of the Day: 413 Atlantic"))
self.assertTrue('bad' not in guess) self.assertTrue(guess["good"] > 0.99)
self.assertTrue('good' not in guess) self.assertTrue("bad" not in guess)
guess = classifier.guess('Nothing doing: 393 Pacific St.') guess = dict(classifier.guess("House of the Day: 413 Atlantic"))
self.assertTrue('bad' not in guess) self.assertTrue(guess["good"] > 0.99)
self.assertTrue('good' not in guess) self.assertTrue("bad" not in guess)
guess = dict(classifier.guess("Development Watch: Yatta"))
self.assertTrue(guess["bad"] > 0.7)
self.assertTrue(guess["good"] < 0.3)
guess = dict(classifier.guess("Development Watch: 393 Pacific St."))
self.assertTrue(guess["bad"] > 0.7)
self.assertTrue(guess["good"] < 0.3)
guess = dict(classifier.guess("Streetlevel: 123 Carlton St."))
self.assertTrue(guess["bad"] > 0.99)
self.assertTrue("good" not in guess)
guess = classifier.guess("Extra, Extra")
self.assertTrue("bad" not in guess)
self.assertTrue("good" not in guess)
guess = classifier.guess("Nothing doing: 393 Pacific St.")
self.assertTrue("bad" not in guess)
self.assertTrue("good" not in guess)

View file

@ -9,6 +9,7 @@ See the README for a usage example.
import sys import sys
import os import os
class tfidf: class tfidf:
def __init__(self): def __init__(self):
self.weighted = False self.weighted = False
@ -19,7 +20,7 @@ class tfidf:
# building a dictionary # building a dictionary
doc_dict = {} doc_dict = {}
for w in list_of_words: for w in list_of_words:
doc_dict[w] = doc_dict.get(w, 0.) + 1.0 doc_dict[w] = doc_dict.get(w, 0.0) + 1.0
self.corpus_dict[w] = self.corpus_dict.get(w, 0.0) + 1.0 self.corpus_dict[w] = self.corpus_dict.get(w, 0.0) + 1.0
# normalizing the dictionary # normalizing the dictionary
@ -53,4 +54,4 @@ class tfidf:
score += (query_dict[k] / self.corpus_dict[k]) + (doc_dict[k] / self.corpus_dict[k]) score += (query_dict[k] / self.corpus_dict[k]) + (doc_dict[k] / self.corpus_dict[k])
sims.append([doc[0], score]) sims.append([doc[0], score])
return sims return sims

View file

@ -1,28 +1,30 @@
import re import re
class Tokenizer: class Tokenizer:
"""A simple regex-based whitespace tokenizer. """A simple regex-based whitespace tokenizer.
It expects a string and can return all tokens lower-cased It expects a string and can return all tokens lower-cased
or in their existing case. or in their existing case.
""" """
WORD_RE = re.compile('[^a-zA-Z-]+') WORD_RE = re.compile("[^a-zA-Z-]+")
def __init__(self, phrases, lower=False): def __init__(self, phrases, lower=False):
self.phrases = phrases self.phrases = phrases
self.lower = lower self.lower = lower
def tokenize(self, doc): def tokenize(self, doc):
print(doc) print(doc)
formatted_doc = ' '.join(self.WORD_RE.split(doc)) formatted_doc = " ".join(self.WORD_RE.split(doc))
print(formatted_doc) print(formatted_doc)
for phrase in self.phrases: for phrase in self.phrases:
if phrase in formatted_doc: if phrase in formatted_doc:
yield phrase yield phrase
if __name__ == '__main__':
phrases = ['Extra Extra', 'Streetlevel', 'House of the Day'] if __name__ == "__main__":
phrases = ["Extra Extra", "Streetlevel", "House of the Day"]
tokenizer = Tokenizer(phrases) tokenizer = Tokenizer(phrases)
doc = 'Extra, Extra' doc = "Extra, Extra"
tokenizer.tokenize(doc) tokenizer.tokenize(doc)

View file

@ -2,8 +2,8 @@ from django.conf.urls import url
from apps.analyzer import views from apps.analyzer import views
urlpatterns = [ urlpatterns = [
url(r'^$', views.index), url(r"^$", views.index),
url(r'^save/?', views.save_classifier), url(r"^save/?", views.save_classifier),
url(r'^popularity/?', views.popularity_query), url(r"^popularity/?", views.popularity_query),
url(r'^(?P<feed_id>\d+)', views.get_classifiers_feed), url(r"^(?P<feed_id>\d+)", views.get_classifiers_feed),
] ]

View file

@ -15,34 +15,38 @@ from utils import json_functions as json
from utils.user_functions import get_user from utils.user_functions import get_user
from utils.user_functions import ajax_login_required from utils.user_functions import ajax_login_required
def index(requst): def index(requst):
pass pass
@require_POST @require_POST
@ajax_login_required @ajax_login_required
@json.json_view @json.json_view
def save_classifier(request): def save_classifier(request):
post = request.POST post = request.POST
feed_id = post['feed_id'] feed_id = post["feed_id"]
feed = None feed = None
social_user_id = None social_user_id = None
if feed_id.startswith('social:'): if feed_id.startswith("social:"):
social_user_id = int(feed_id.replace('social:', '')) social_user_id = int(feed_id.replace("social:", ""))
feed_id = None feed_id = None
else: else:
feed_id = int(feed_id) feed_id = int(feed_id)
feed = get_object_or_404(Feed, pk=feed_id) feed = get_object_or_404(Feed, pk=feed_id)
code = 0 code = 0
message = 'OK' message = "OK"
payload = {} payload = {}
logging.user(request, "~FGSaving classifier: ~SB%s~SN ~FW%s" % (feed, post)) logging.user(request, "~FGSaving classifier: ~SB%s~SN ~FW%s" % (feed, post))
# Mark subscription as dirty, so unread counts can be recalculated # Mark subscription as dirty, so unread counts can be recalculated
usersub = None usersub = None
socialsub = None socialsub = None
if social_user_id: if social_user_id:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, subscription_user_id=social_user_id) socialsub = MSocialSubscription.objects.get(
user_id=request.user.pk, subscription_user_id=social_user_id
)
if not socialsub.needs_unread_recalc: if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True socialsub.needs_unread_recalc = True
socialsub.save() socialsub.save()
@ -55,31 +59,31 @@ def save_classifier(request):
usersub.needs_unread_recalc = True usersub.needs_unread_recalc = True
usersub.is_trained = True usersub.is_trained = True
usersub.save() usersub.save()
def _save_classifier(ClassifierCls, content_type): def _save_classifier(ClassifierCls, content_type):
classifiers = { classifiers = {
'like_'+content_type: 1, "like_" + content_type: 1,
'dislike_'+content_type: -1, "dislike_" + content_type: -1,
'remove_like_'+content_type: 0, "remove_like_" + content_type: 0,
'remove_dislike_'+content_type: 0, "remove_dislike_" + content_type: 0,
} }
for opinion, score in classifiers.items(): for opinion, score in classifiers.items():
if opinion in post: if opinion in post:
post_contents = post.getlist(opinion) post_contents = post.getlist(opinion)
for post_content in post_contents: for post_content in post_contents:
if not post_content: continue if not post_content:
continue
classifier_dict = { classifier_dict = {
'user_id': request.user.pk, "user_id": request.user.pk,
'feed_id': feed_id or 0, "feed_id": feed_id or 0,
'social_user_id': social_user_id or 0, "social_user_id": social_user_id or 0,
} }
if content_type in ('author', 'tag', 'title'): if content_type in ("author", "tag", "title"):
max_length = ClassifierCls._fields[content_type].max_length max_length = ClassifierCls._fields[content_type].max_length
classifier_dict.update({content_type: post_content[:max_length]}) classifier_dict.update({content_type: post_content[:max_length]})
if content_type == 'feed': if content_type == "feed":
if not post_content.startswith('social:'): if not post_content.startswith("social:"):
classifier_dict['feed_id'] = post_content classifier_dict["feed_id"] = post_content
try: try:
classifier = ClassifierCls.objects.get(**classifier_dict) classifier = ClassifierCls.objects.get(**classifier_dict)
except ClassifierCls.DoesNotExist: except ClassifierCls.DoesNotExist:
@ -94,59 +98,77 @@ def save_classifier(request):
classifier.delete() classifier.delete()
elif classifier.score != score: elif classifier.score != score:
if score == 0: if score == 0:
if ((classifier.score == 1 and opinion.startswith('remove_like')) if (classifier.score == 1 and opinion.startswith("remove_like")) or (
or (classifier.score == -1 and opinion.startswith('remove_dislike'))): classifier.score == -1 and opinion.startswith("remove_dislike")
):
classifier.delete() classifier.delete()
else: else:
classifier.score = score classifier.score = score
classifier.save() classifier.save()
_save_classifier(MClassifierAuthor, 'author') _save_classifier(MClassifierAuthor, "author")
_save_classifier(MClassifierTag, 'tag') _save_classifier(MClassifierTag, "tag")
_save_classifier(MClassifierTitle, 'title') _save_classifier(MClassifierTitle, "title")
_save_classifier(MClassifierFeed, 'feed') _save_classifier(MClassifierFeed, "feed")
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id) r.publish(request.user.username, "feed:%s" % feed_id)
response = dict(code=code, message=message, payload=payload) response = dict(code=code, message=message, payload=payload)
return response return response
@json.json_view @json.json_view
def get_classifiers_feed(request, feed_id): def get_classifiers_feed(request, feed_id):
user = get_user(request) user = get_user(request)
code = 0 code = 0
payload = get_classifiers_for_user(user, feed_id=feed_id) payload = get_classifiers_for_user(user, feed_id=feed_id)
response = dict(code=code, payload=payload) response = dict(code=code, payload=payload)
return response return response
def popularity_query(request): def popularity_query(request):
if request.method == 'POST': if request.method == "POST":
form = PopularityQueryForm(request.POST) form = PopularityQueryForm(request.POST)
if form.is_valid(): if form.is_valid():
logging.user(request.user, "~BC~FRPopularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) logging.user(
query = MPopularityQuery.objects.create(email=request.POST['email'], request.user,
query=request.POST['query']) '~BC~FRPopularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"'
% (request.POST["email"], request.POST["query"]),
)
query = MPopularityQuery.objects.create(email=request.POST["email"], query=request.POST["query"])
query.queue_email() query.queue_email()
response = render(request, 'analyzer/popularity_query.xhtml', { response = render(
'success': True, request,
'popularity_query_form': form, "analyzer/popularity_query.xhtml",
}) {
response.set_cookie('newsblur_popularity_query', request.POST['query']) "success": True,
"popularity_query_form": form,
},
)
response.set_cookie("newsblur_popularity_query", request.POST["query"])
return response return response
else: else:
logging.user(request.user, "~BC~FRFailed popularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) logging.user(
request.user,
'~BC~FRFailed popularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"'
% (request.POST["email"], request.POST["query"]),
)
else: else:
logging.user(request.user, "~BC~FRPopularity query form loading") logging.user(request.user, "~BC~FRPopularity query form loading")
form = PopularityQueryForm(initial={'query': request.COOKIES.get('newsblur_popularity_query', "")}) form = PopularityQueryForm(initial={"query": request.COOKIES.get("newsblur_popularity_query", "")})
response = render(request, 'analyzer/popularity_query.xhtml', { response = render(
'popularity_query_form': form, request,
}) "analyzer/popularity_query.xhtml",
{
"popularity_query_form": form,
},
)
return response return response

View file

@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application.
from django.test import TestCase from django.test import TestCase
class SimpleTest(TestCase): class SimpleTest(TestCase):
def test_basic_addition(self): def test_basic_addition(self):
""" """
@ -14,10 +15,12 @@ class SimpleTest(TestCase):
""" """
self.assertEqual(1 + 1, 2) self.assertEqual(1 + 1, 2)
__test__ = {"doctest": """
__test__ = {
"doctest": """
Another way to test that 1 + 1 is equal to 2. Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2 >>> 1 + 1 == 2
True True
"""} """
}

View file

@ -2,18 +2,18 @@ from django.conf.urls import url
from apps.api import views from apps.api import views
urlpatterns = [ urlpatterns = [
url(r'^logout', views.logout, name='api-logout'), url(r"^logout", views.logout, name="api-logout"),
url(r'^login', views.login, name='api-login'), url(r"^login", views.login, name="api-login"),
url(r'^signup', views.signup, name='api-signup'), url(r"^signup", views.signup, name="api-signup"),
url(r'^add_site_load_script/(?P<token>\w+)', views.add_site_load_script, name='api-add-site-load-script'), url(r"^add_site_load_script/(?P<token>\w+)", views.add_site_load_script, name="api-add-site-load-script"),
url(r'^add_site/(?P<token>\w+)', views.add_site, name='api-add-site'), url(r"^add_site/(?P<token>\w+)", views.add_site, name="api-add-site"),
url(r'^add_url/(?P<token>\w+)', views.add_site, name='api-add-site'), url(r"^add_url/(?P<token>\w+)", views.add_site, name="api-add-site"),
url(r'^add_site/?$', views.add_site_authed, name='api-add-site-authed'), url(r"^add_site/?$", views.add_site_authed, name="api-add-site-authed"),
url(r'^add_url/?$', views.add_site_authed, name='api-add-site-authed'), url(r"^add_url/?$", views.add_site_authed, name="api-add-site-authed"),
url(r'^check_share_on_site/(?P<token>\w+)', views.check_share_on_site, name='api-check-share-on-site'), url(r"^check_share_on_site/(?P<token>\w+)", views.check_share_on_site, name="api-check-share-on-site"),
url(r'^share_story/(?P<token>\w+)', views.share_story, name='api-share-story'), url(r"^share_story/(?P<token>\w+)", views.share_story, name="api-share-story"),
url(r'^save_story/(?P<token>\w+)', views.save_story, name='api-save-story'), url(r"^save_story/(?P<token>\w+)", views.save_story, name="api-save-story"),
url(r'^share_story/?$', views.share_story), url(r"^share_story/?$", views.share_story),
url(r'^save_story/?$', views.save_story), url(r"^save_story/?$", views.save_story),
url(r'^ip_addresses/?$', views.ip_addresses), url(r"^ip_addresses/?$", views.ip_addresses),
] ]

View file

@ -29,10 +29,10 @@ from utils.view_functions import required_params
def login(request): def login(request):
code = -1 code = -1
errors = None errors = None
user_agent = request.environ.get('HTTP_USER_AGENT', '') user_agent = request.environ.get("HTTP_USER_AGENT", "")
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"]
if not user_agent or user_agent.lower() in ['nativehost']: if not user_agent or user_agent.lower() in ["nativehost"]:
errors = dict(user_agent="You must set a user agent to login.") errors = dict(user_agent="You must set a user agent to login.")
logging.user(request, "~FG~BB~SK~FRBlocked ~FGAPI Login~SN~FW: %s / %s" % (user_agent, ip)) logging.user(request, "~FG~BB~SK~FRBlocked ~FGAPI Login~SN~FW: %s / %s" % (user_agent, ip))
elif request.method == "POST": elif request.method == "POST":
@ -40,19 +40,20 @@ def login(request):
if form.errors: if form.errors:
errors = form.errors errors = form.errors
if form.is_valid(): if form.is_valid():
login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend")
logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s / %s" % (user_agent, ip)) logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s / %s" % (user_agent, ip))
code = 1 code = 1
else: else:
errors = dict(method="Invalid method. Use POST. You used %s" % request.method) errors = dict(method="Invalid method. Use POST. You used %s" % request.method)
return dict(code=code, errors=errors) return dict(code=code, errors=errors)
@json.json_view @json.json_view
def signup(request): def signup(request):
code = -1 code = -1
errors = None errors = None
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"]
if request.method == "POST": if request.method == "POST":
form = SignupForm(data=request.POST) form = SignupForm(data=request.POST)
@ -61,48 +62,47 @@ def signup(request):
if form.is_valid(): if form.is_valid():
try: try:
new_user = form.save() new_user = form.save()
login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend")
logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s / %s" % (new_user.email, ip)) logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s / %s" % (new_user.email, ip))
code = 1 code = 1
except forms.ValidationError as e: except forms.ValidationError as e:
errors = [e.args[0]] errors = [e.args[0]]
else: else:
errors = dict(method="Invalid method. Use POST. You used %s" % request.method) errors = dict(method="Invalid method. Use POST. You used %s" % request.method)
return dict(code=code, errors=errors) return dict(code=code, errors=errors)
@json.json_view @json.json_view
def logout(request): def logout(request):
code = 1 code = 1
logging.user(request, "~FG~BBAPI Logout~FW") logging.user(request, "~FG~BBAPI Logout~FW")
logout_user(request) logout_user(request)
return dict(code=code) return dict(code=code)
def add_site_load_script(request, token): def add_site_load_script(request, token):
code = 0 code = 0
usf = None usf = None
profile = None profile = None
user_profile = None user_profile = None
starred_counts = {} starred_counts = {}
def image_base64(image_name, path='icons/circular/'): def image_base64(image_name, path="icons/circular/"):
image_file = open(os.path.join(settings.MEDIA_ROOT, 'img/%s%s' % (path, image_name)), 'rb') image_file = open(os.path.join(settings.MEDIA_ROOT, "img/%s%s" % (path, image_name)), "rb")
return base64.b64encode(image_file.read()).decode('utf-8') return base64.b64encode(image_file.read()).decode("utf-8")
accept_image = image_base64('newuser_icn_setup.png') accept_image = image_base64("newuser_icn_setup.png")
error_image = image_base64('newuser_icn_sharewith_active.png') error_image = image_base64("newuser_icn_sharewith_active.png")
new_folder_image = image_base64('g_icn_arrow_right.png') new_folder_image = image_base64("g_icn_arrow_right.png")
add_image = image_base64('g_icn_expand_hover.png') add_image = image_base64("g_icn_expand_hover.png")
try: try:
profiles = Profile.objects.filter(secret_token=token) profiles = Profile.objects.filter(secret_token=token)
if profiles: if profiles:
profile = profiles[0] profile = profiles[0]
usf = UserSubscriptionFolders.objects.get( usf = UserSubscriptionFolders.objects.get(user=profile.user)
user=profile.user
)
user_profile = MSocialProfile.get_user(user_id=profile.user.pk) user_profile = MSocialProfile.get_user(user_id=profile.user.pk)
starred_counts = MStarredStoryCounts.user_counts(profile.user.pk) starred_counts = MStarredStoryCounts.user_counts(profile.user.pk)
else: else:
@ -111,29 +111,34 @@ def add_site_load_script(request, token):
code = -1 code = -1
except UserSubscriptionFolders.DoesNotExist: except UserSubscriptionFolders.DoesNotExist:
code = -1 code = -1
return render(request, 'api/share_bookmarklet.js', { return render(
'code': code, request,
'token': token, "api/share_bookmarklet.js",
'folders': (usf and usf.folders) or [], {
'user': profile and profile.user or {}, "code": code,
'user_profile': user_profile and json.encode(user_profile.canonical()) or {}, "token": token,
'starred_counts': json.encode(starred_counts), "folders": (usf and usf.folders) or [],
'accept_image': accept_image, "user": profile and profile.user or {},
'error_image': error_image, "user_profile": user_profile and json.encode(user_profile.canonical()) or {},
'add_image': add_image, "starred_counts": json.encode(starred_counts),
'new_folder_image': new_folder_image, "accept_image": accept_image,
}, "error_image": error_image,
content_type='application/javascript') "add_image": add_image,
"new_folder_image": new_folder_image,
},
content_type="application/javascript",
)
def add_site(request, token): def add_site(request, token):
code = 0 code = 0
get_post = getattr(request, request.method) get_post = getattr(request, request.method)
url = get_post.get('url') url = get_post.get("url")
folder = get_post.get('folder') folder = get_post.get("folder")
new_folder = get_post.get('new_folder') new_folder = get_post.get("new_folder")
callback = get_post.get('callback', '') callback = get_post.get("callback", "")
if not url: if not url:
code = -1 code = -1
else: else:
@ -144,35 +149,40 @@ def add_site(request, token):
usf.add_folder(folder, new_folder) usf.add_folder(folder, new_folder)
folder = new_folder folder = new_folder
code, message, us = UserSubscription.add_subscription( code, message, us = UserSubscription.add_subscription(
user=profile.user, user=profile.user, feed_address=url, folder=folder, bookmarklet=True
feed_address=url,
folder=folder,
bookmarklet=True
) )
except Profile.DoesNotExist: except Profile.DoesNotExist:
code = -1 code = -1
if code > 0: if code > 0:
message = 'OK' message = "OK"
logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request)
request=request)
return HttpResponse(
return HttpResponse(callback + '(' + json.encode({ callback
'code': code, + "("
'message': message, + json.encode(
'usersub': us and us.feed_id, {
}) + ')', content_type='text/plain') "code": code,
"message": message,
"usersub": us and us.feed_id,
}
)
+ ")",
content_type="text/plain",
)
@ajax_login_required @ajax_login_required
def add_site_authed(request): def add_site_authed(request):
code = 0 code = 0
url = request.GET['url'] url = request.GET["url"]
folder = request.GET['folder'] folder = request.GET["folder"]
new_folder = request.GET.get('new_folder') new_folder = request.GET.get("new_folder")
callback = request.GET['callback'] callback = request.GET["callback"]
user = get_user(request) user = get_user(request)
if not url: if not url:
code = -1 code = -1
else: else:
@ -181,40 +191,45 @@ def add_site_authed(request):
usf.add_folder(folder, new_folder) usf.add_folder(folder, new_folder)
folder = new_folder folder = new_folder
code, message, us = UserSubscription.add_subscription( code, message, us = UserSubscription.add_subscription(
user=user, user=user, feed_address=url, folder=folder, bookmarklet=True
feed_address=url,
folder=folder,
bookmarklet=True
) )
if code > 0: if code > 0:
message = 'OK' message = "OK"
logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), request=request)
request=request)
return HttpResponse(
return HttpResponse(callback + '(' + json.encode({ callback
'code': code, + "("
'message': message, + json.encode(
'usersub': us and us.feed_id, {
}) + ')', content_type='text/plain') "code": code,
"message": message,
"usersub": us and us.feed_id,
}
)
+ ")",
content_type="text/plain",
)
def check_share_on_site(request, token): def check_share_on_site(request, token):
code = 0 code = 0
story_url = request.GET['story_url'] story_url = request.GET["story_url"]
rss_url = request.GET.get('rss_url') rss_url = request.GET.get("rss_url")
callback = request.GET['callback'] callback = request.GET["callback"]
other_stories = None other_stories = None
same_stories = None same_stories = None
usersub = None usersub = None
message = None message = None
user = None user = None
users = {} users = {}
your_story = None your_story = None
same_stories = None same_stories = None
other_stories = None other_stories = None
previous_stories = None previous_stories = None
if not story_url: if not story_url:
code = -1 code = -1
else: else:
@ -223,7 +238,7 @@ def check_share_on_site(request, token):
user = user_profile.user user = user_profile.user
except Profile.DoesNotExist: except Profile.DoesNotExist:
code = -1 code = -1
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url) logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url)
feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False) feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
if not feed: if not feed:
@ -239,9 +254,9 @@ def check_share_on_site(request, token):
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url) logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url)
feed = Feed.get_feed_from_url(base_url, create=False, fetch=False) feed = Feed.get_feed_from_url(base_url, create=False, fetch=False)
if not feed: if not feed:
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + '/')) logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + "/"))
feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False) feed = Feed.get_feed_from_url(base_url + "/", create=False, fetch=False)
if feed and user: if feed and user:
try: try:
usersub = UserSubscription.objects.filter(user=user, feed=feed) usersub = UserSubscription.objects.filter(user=user, feed=feed)
@ -249,23 +264,27 @@ def check_share_on_site(request, token):
usersub = None usersub = None
if user: if user:
feed_id = feed and feed.pk feed_id = feed and feed.pk
your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id, your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(
user_id=user.pk, story_url=story_url) feed_id, user_id=user.pk, story_url=story_url
previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by('-shared_date').limit(3) )
previous_stories = [{ previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by("-shared_date").limit(3)
"user_id": story.user_id, previous_stories = [
"story_title": story.story_title, {
"comments": story.comments, "user_id": story.user_id,
"shared_date": story.shared_date, "story_title": story.story_title,
"relative_date": relative_timesince(story.shared_date), "comments": story.comments,
"blurblog_permalink": story.blurblog_permalink(), "shared_date": story.shared_date,
} for story in previous_stories] "relative_date": relative_timesince(story.shared_date),
"blurblog_permalink": story.blurblog_permalink(),
}
for story in previous_stories
]
user_ids = set([user_profile.user.pk]) user_ids = set([user_profile.user.pk])
for story in same_stories: for story in same_stories:
user_ids.add(story['user_id']) user_ids.add(story["user_id"])
for story in other_stories: for story in other_stories:
user_ids.add(story['user_id']) user_ids.add(story["user_id"])
profiles = MSocialProfile.profiles(user_ids) profiles = MSocialProfile.profiles(user_ids)
for profile in profiles: for profile in profiles:
@ -273,39 +292,47 @@ def check_share_on_site(request, token):
"username": profile.username, "username": profile.username,
"photo_url": profile.photo_url, "photo_url": profile.photo_url,
} }
logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), request=request)
request=request)
response = HttpResponse(
response = HttpResponse(callback + '(' + json.encode({ callback
'code' : code, + "("
'message' : message, + json.encode(
'feed' : feed, {
'subscribed' : bool(usersub), "code": code,
'your_story' : your_story, "message": message,
'same_stories' : same_stories, "feed": feed,
'other_stories' : other_stories, "subscribed": bool(usersub),
'previous_stories' : previous_stories, "your_story": your_story,
'users' : users, "same_stories": same_stories,
}) + ')', content_type='text/plain') "other_stories": other_stories,
response['Access-Control-Allow-Origin'] = '*' "previous_stories": previous_stories,
response['Access-Control-Allow-Methods'] = 'GET' "users": users,
}
)
+ ")",
content_type="text/plain",
)
response["Access-Control-Allow-Origin"] = "*"
response["Access-Control-Allow-Methods"] = "GET"
return response return response
@required_params('story_url')
@required_params("story_url")
def share_story(request, token=None): def share_story(request, token=None):
code = 0 code = 0
story_url = request.POST['story_url'] story_url = request.POST["story_url"]
comments = request.POST.get('comments', "") comments = request.POST.get("comments", "")
title = request.POST.get('title', None) title = request.POST.get("title", None)
content = request.POST.get('content', None) content = request.POST.get("content", None)
rss_url = request.POST.get('rss_url', None) rss_url = request.POST.get("rss_url", None)
feed_id = request.POST.get('feed_id', None) or 0 feed_id = request.POST.get("feed_id", None) or 0
feed = None feed = None
message = None message = None
profile = None profile = None
if request.user.is_authenticated: if request.user.is_authenticated:
profile = request.user.profile profile = request.user.profile
else: else:
@ -317,14 +344,19 @@ def share_story(request, token=None):
message = "Not authenticated, couldn't find user by token." message = "Not authenticated, couldn't find user by token."
else: else:
message = "Not authenticated, no token supplied and not authenticated." message = "Not authenticated, no token supplied and not authenticated."
if not profile: if not profile:
return HttpResponse(json.encode({ return HttpResponse(
'code': code, json.encode(
'message': message, {
'story': None, "code": code,
}), content_type='text/plain') "message": message,
"story": None,
}
),
content_type="text/plain",
)
if feed_id: if feed_id:
feed = Feed.get_by_id(feed_id) feed = Feed.get_by_id(feed_id)
else: else:
@ -336,7 +368,7 @@ def share_story(request, token=None):
feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) feed = Feed.get_feed_from_url(story_url, create=True, fetch=True)
if feed: if feed:
feed_id = feed.pk feed_id = feed.pk
if content: if content:
content = lxml.html.fromstring(content) content = lxml.html.fromstring(content)
content.make_links_absolute(story_url) content.make_links_absolute(story_url)
@ -346,13 +378,15 @@ def share_story(request, token=None):
importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG)
document = importer.fetch(skip_save=True, return_document=True) document = importer.fetch(skip_save=True, return_document=True)
if not content: if not content:
content = document['content'] content = document["content"]
if not title: if not title:
title = document['title'] title = document["title"]
shared_story = MSharedStory.objects.filter(user_id=profile.user.pk, shared_story = (
story_feed_id=feed_id, MSharedStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url)
story_guid=story_url).limit(1).first() .limit(1)
.first()
)
if not shared_story: if not shared_story:
story_db = { story_db = {
"story_guid": story_url, "story_guid": story_url,
@ -361,7 +395,6 @@ def share_story(request, token=None):
"story_feed_id": feed_id, "story_feed_id": feed_id,
"story_content": content, "story_content": content,
"story_date": datetime.datetime.now(), "story_date": datetime.datetime.now(),
"user_id": profile.user.pk, "user_id": profile.user.pk,
"comments": comments, "comments": comments,
"has_comments": bool(comments), "has_comments": bool(comments),
@ -382,49 +415,57 @@ def share_story(request, token=None):
shared_story.has_comments = bool(comments) shared_story.has_comments = bool(comments)
shared_story.story_feed_id = feed_id shared_story.story_feed_id = feed_id
shared_story.save() shared_story.save()
logging.user(profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments)) logging.user(
profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments)
)
message = "Updating shared story from site: %s: %s" % (story_url, comments) message = "Updating shared story from site: %s: %s" % (story_url, comments)
try: try:
socialsub = MSocialSubscription.objects.get(user_id=profile.user.pk, socialsub = MSocialSubscription.objects.get(
subscription_user_id=profile.user.pk) user_id=profile.user.pk, subscription_user_id=profile.user.pk
)
except MSocialSubscription.DoesNotExist: except MSocialSubscription.DoesNotExist:
socialsub = None socialsub = None
if socialsub: if socialsub:
socialsub.mark_story_ids_as_read([shared_story.story_hash], socialsub.mark_story_ids_as_read(
shared_story.story_feed_id, [shared_story.story_hash], shared_story.story_feed_id, request=request
request=request) )
else: else:
RUserStory.mark_read(profile.user.pk, shared_story.story_feed_id, shared_story.story_hash) RUserStory.mark_read(profile.user.pk, shared_story.story_feed_id, shared_story.story_hash)
shared_story.publish_update_to_subscribers() shared_story.publish_update_to_subscribers()
response = HttpResponse(json.encode({ response = HttpResponse(
'code': code, json.encode(
'message': message, {
'story': shared_story, "code": code,
}), content_type='text/plain') "message": message,
response['Access-Control-Allow-Origin'] = '*' "story": shared_story,
response['Access-Control-Allow-Methods'] = 'POST' }
),
content_type="text/plain",
)
response["Access-Control-Allow-Origin"] = "*"
response["Access-Control-Allow-Methods"] = "POST"
return response return response
@required_params('story_url', 'title')
@required_params("story_url", "title")
def save_story(request, token=None): def save_story(request, token=None):
code = 0 code = 0
story_url = request.POST['story_url'] story_url = request.POST["story_url"]
user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') or [] user_tags = request.POST.getlist("user_tags") or request.POST.getlist("user_tags[]") or []
add_user_tag = request.POST.get('add_user_tag', None) add_user_tag = request.POST.get("add_user_tag", None)
title = request.POST['title'] title = request.POST["title"]
content = request.POST.get('content', None) content = request.POST.get("content", None)
rss_url = request.POST.get('rss_url', None) rss_url = request.POST.get("rss_url", None)
user_notes = request.POST.get('user_notes', None) user_notes = request.POST.get("user_notes", None)
feed_id = request.POST.get('feed_id', None) or 0 feed_id = request.POST.get("feed_id", None) or 0
feed = None feed = None
message = None message = None
profile = None profile = None
if request.user.is_authenticated: if request.user.is_authenticated:
profile = request.user.profile profile = request.user.profile
else: else:
@ -436,14 +477,19 @@ def save_story(request, token=None):
message = "Not authenticated, couldn't find user by token." message = "Not authenticated, couldn't find user by token."
else: else:
message = "Not authenticated, no token supplied and not authenticated." message = "Not authenticated, no token supplied and not authenticated."
if not profile: if not profile:
return HttpResponse(json.encode({ return HttpResponse(
'code': code, json.encode(
'message': message, {
'story': None, "code": code,
}), content_type='text/plain') "message": message,
"story": None,
}
),
content_type="text/plain",
)
if feed_id: if feed_id:
feed = Feed.get_by_id(feed_id) feed = Feed.get_by_id(feed_id)
else: else:
@ -455,7 +501,7 @@ def save_story(request, token=None):
feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) feed = Feed.get_feed_from_url(story_url, create=True, fetch=True)
if feed: if feed:
feed_id = feed.pk feed_id = feed.pk
if content: if content:
content = lxml.html.fromstring(content) content = lxml.html.fromstring(content)
content.make_links_absolute(story_url) content.make_links_absolute(story_url)
@ -463,16 +509,18 @@ def save_story(request, token=None):
else: else:
importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG)
document = importer.fetch(skip_save=True, return_document=True) document = importer.fetch(skip_save=True, return_document=True)
content = document['content'] content = document["content"]
if not title: if not title:
title = document['title'] title = document["title"]
if add_user_tag: if add_user_tag:
user_tags = user_tags + [tag for tag in add_user_tag.split(',')] user_tags = user_tags + [tag for tag in add_user_tag.split(",")]
starred_story = MStarredStory.objects.filter(user_id=profile.user.pk, starred_story = (
story_feed_id=feed_id, MStarredStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url)
story_guid=story_url).limit(1).first() .limit(1)
.first()
)
if not starred_story: if not starred_story:
story_db = { story_db = {
"story_guid": story_url, "story_guid": story_url,
@ -498,26 +546,34 @@ def save_story(request, token=None):
starred_story.story_feed_id = feed_id starred_story.story_feed_id = feed_id
starred_story.user_notes = user_notes starred_story.user_notes = user_notes
starred_story.save() starred_story.save()
logging.user(profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags)) logging.user(
profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags)
)
message = "Updating saved story from site: %s: %s" % (story_url, user_tags) message = "Updating saved story from site: %s: %s" % (story_url, user_tags)
MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
response = HttpResponse(json.encode({ response = HttpResponse(
'code': code, json.encode(
'message': message, {
'story': starred_story, "code": code,
}), content_type='text/plain') "message": message,
response['Access-Control-Allow-Origin'] = '*' "story": starred_story,
response['Access-Control-Allow-Methods'] = 'POST' }
),
content_type="text/plain",
)
response["Access-Control-Allow-Origin"] = "*"
response["Access-Control-Allow-Methods"] = "POST"
return response return response
def ip_addresses(request): def ip_addresses(request):
# Read local file /srv/newsblur/apps/api/ip_addresses.txt and return that # Read local file /srv/newsblur/apps/api/ip_addresses.txt and return that
with open('/srv/newsblur/apps/api/ip_addresses.txt', 'r') as f: with open("/srv/newsblur/apps/api/ip_addresses.txt", "r") as f:
addresses = f.read() addresses = f.read()
mail_admins(f"IP Addresses accessed from {request.META['REMOTE_ADDR']} by {request.user}", addresses) mail_admins(f"IP Addresses accessed from {request.META['REMOTE_ADDR']} by {request.user}", addresses)
return HttpResponse(addresses, content_type='text/plain') return HttpResponse(addresses, content_type="text/plain")

View file

@ -6,20 +6,21 @@ from utils import json_functions as json
from utils.feed_functions import add_object_to_folder from utils.feed_functions import add_object_to_folder
from utils import log as logging from utils import log as logging
class MCategory(mongo.Document): class MCategory(mongo.Document):
title = mongo.StringField() title = mongo.StringField()
description = mongo.StringField() description = mongo.StringField()
feed_ids = mongo.ListField(mongo.IntField()) feed_ids = mongo.ListField(mongo.IntField())
meta = { meta = {
'collection': 'category', "collection": "category",
'indexes': ['title'], "indexes": ["title"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
return "%s: %s sites" % (self.title, len(self.feed_ids)) return "%s: %s sites" % (self.title, len(self.feed_ids))
@classmethod @classmethod
def audit(cls): def audit(cls):
categories = cls.objects.all() categories = cls.objects.all()
@ -39,28 +40,28 @@ class MCategory(mongo.Document):
@classmethod @classmethod
def add(cls, title, description): def add(cls, title, description):
return cls.objects.create(title=title, description=description) return cls.objects.create(title=title, description=description)
@classmethod @classmethod
def serialize(cls, category=None): def serialize(cls, category=None):
categories = cls.objects.all() categories = cls.objects.all()
if category: if category:
categories = categories.filter(title=category) categories = categories.filter(title=category)
data = dict(categories=[], feeds={}) data = dict(categories=[], feeds={})
feed_ids = set() feed_ids = set()
for category in categories: for category in categories:
category_output = { category_output = {
'title': category.title, "title": category.title,
'description': category.description, "description": category.description,
'feed_ids': category.feed_ids, "feed_ids": category.feed_ids,
} }
data['categories'].append(category_output) data["categories"].append(category_output)
feed_ids.update(list(category.feed_ids)) feed_ids.update(list(category.feed_ids))
feeds = Feed.objects.filter(pk__in=feed_ids) feeds = Feed.objects.filter(pk__in=feed_ids)
for feed in feeds: for feed in feeds:
data['feeds'][feed.pk] = feed.canonical() data["feeds"][feed.pk] = feed.canonical()
return data return data
@classmethod @classmethod
@ -68,8 +69,10 @@ class MCategory(mongo.Document):
category_sites = MCategorySite.objects.all() category_sites = MCategorySite.objects.all()
if category_title: if category_title:
category_sites = category_sites.filter(category_title=category_title) category_sites = category_sites.filter(category_title=category_title)
category_groups = groupby(sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title) category_groups = groupby(
sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title
)
for category_title, sites in category_groups: for category_title, sites in category_groups:
try: try:
category = cls.objects.get(title=category_title) category = cls.objects.get(title=category_title)
@ -79,27 +82,26 @@ class MCategory(mongo.Document):
category.feed_ids = [site.feed_id for site in sites] category.feed_ids = [site.feed_id for site in sites]
category.save() category.save()
print(" ---> Reloaded category: %s" % category) print(" ---> Reloaded category: %s" % category)
@classmethod @classmethod
def subscribe(cls, user_id, category_title): def subscribe(cls, user_id, category_title):
category = cls.objects.get(title=category_title) category = cls.objects.get(title=category_title)
for feed_id in category.feed_ids: for feed_id in category.feed_ids:
us, _ = UserSubscription.objects.get_or_create( us, _ = UserSubscription.objects.get_or_create(
feed_id=feed_id, feed_id=feed_id,
user_id=user_id, user_id=user_id,
defaults={ defaults={
'needs_unread_recalc': True, "needs_unread_recalc": True,
'active': True, "active": True,
} },
) )
usf, created = UserSubscriptionFolders.objects.get_or_create( usf, created = UserSubscriptionFolders.objects.get_or_create(
user_id=user_id, user_id=user_id, defaults={"folders": "[]"}
defaults={'folders': '[]'}
) )
usf.add_folder('', category.title) usf.add_folder("", category.title)
folders = json.decode(usf.folders) folders = json.decode(usf.folders)
for feed_id in category.feed_ids: for feed_id in category.feed_ids:
feed = Feed.get_by_id(feed_id) feed = Feed.get_by_id(feed_id)
@ -108,27 +110,26 @@ class MCategory(mongo.Document):
folders = add_object_to_folder(feed.pk, category.title, folders) folders = add_object_to_folder(feed.pk, category.title, folders)
usf.folders = json.encode(folders) usf.folders = json.encode(folders)
usf.save() usf.save()
class MCategorySite(mongo.Document): class MCategorySite(mongo.Document):
feed_id = mongo.IntField() feed_id = mongo.IntField()
category_title = mongo.StringField() category_title = mongo.StringField()
meta = { meta = {
'collection': 'category_site', "collection": "category_site",
'indexes': ['feed_id', 'category_title'], "indexes": ["feed_id", "category_title"],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
feed = Feed.get_by_id(self.feed_id) feed = Feed.get_by_id(self.feed_id)
return "%s: %s" % (self.category_title, feed) return "%s: %s" % (self.category_title, feed)
@classmethod @classmethod
def add(cls, category_title, feed_id): def add(cls, category_title, feed_id):
category_site, created = cls.objects.get_or_create(category_title=category_title, category_site, created = cls.objects.get_or_create(category_title=category_title, feed_id=feed_id)
feed_id=feed_id)
if not created: if not created:
print(" ---> Site is already in category: %s" % category_site) print(" ---> Site is already in category: %s" % category_site)
else: else:

View file

@ -2,6 +2,6 @@ from django.conf.urls import url
from apps.categories import views from apps.categories import views
urlpatterns = [ urlpatterns = [
url(r'^$', views.all_categories, name='all-categories'), url(r"^$", views.all_categories, name="all-categories"),
url(r'^subscribe/?$', views.subscribe, name='categories-subscribe'), url(r"^subscribe/?$", views.subscribe, name="categories-subscribe"),
] ]

View file

@ -3,35 +3,42 @@ from apps.reader.models import UserSubscriptionFolders
from utils import json_functions as json from utils import json_functions as json
from utils.user_functions import ajax_login_required from utils.user_functions import ajax_login_required
@json.json_view @json.json_view
def all_categories(request): def all_categories(request):
categories = MCategory.serialize() categories = MCategory.serialize()
return categories return categories
@ajax_login_required @ajax_login_required
@json.json_view @json.json_view
def subscribe(request): def subscribe(request):
user = request.user user = request.user
categories = MCategory.serialize() categories = MCategory.serialize()
category_titles = [c['title'] for c in categories['categories']] category_titles = [c["title"] for c in categories["categories"]]
subscribe_category_titles = request.POST.getlist('category') or request.POST.getlist('category[]') subscribe_category_titles = request.POST.getlist("category") or request.POST.getlist("category[]")
invalid_category_title = False invalid_category_title = False
for category_title in subscribe_category_titles: for category_title in subscribe_category_titles:
if category_title not in category_titles: if category_title not in category_titles:
invalid_category_title = True invalid_category_title = True
if not subscribe_category_titles or invalid_category_title: if not subscribe_category_titles or invalid_category_title:
message = "Choose one or more of these categories: %s" % ', '.join(category_titles) message = "Choose one or more of these categories: %s" % ", ".join(category_titles)
return dict(code=-1, message=message) return dict(code=-1, message=message)
for category_title in subscribe_category_titles: for category_title in subscribe_category_titles:
MCategory.subscribe(user.pk, category_title) MCategory.subscribe(user.pk, category_title)
usf = UserSubscriptionFolders.objects.get(user=user.pk) usf = UserSubscriptionFolders.objects.get(user=user.pk)
return dict(code=1, message="Subscribed to %s %s" % ( return dict(
len(subscribe_category_titles), code=1,
'category' if len(subscribe_category_titles) == 1 else 'categories', message="Subscribed to %s %s"
), folders=json.decode(usf.folders)) % (
len(subscribe_category_titles),
"category" if len(subscribe_category_titles) == 1 else "categories",
),
folders=json.decode(usf.folders),
)

View file

@ -7,7 +7,6 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
initial = True initial = True
dependencies = [ dependencies = [
@ -16,19 +15,30 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
name='OAuthToken', name="OAuthToken",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('session_id', models.CharField(blank=True, max_length=50, null=True)), "id",
('uuid', models.CharField(blank=True, max_length=50, null=True)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
('remote_ip', models.CharField(blank=True, max_length=50, null=True)), ),
('request_token', models.CharField(max_length=50)), ("session_id", models.CharField(blank=True, max_length=50, null=True)),
('request_token_secret', models.CharField(max_length=50)), ("uuid", models.CharField(blank=True, max_length=50, null=True)),
('access_token', models.CharField(max_length=50)), ("remote_ip", models.CharField(blank=True, max_length=50, null=True)),
('access_token_secret', models.CharField(max_length=50)), ("request_token", models.CharField(max_length=50)),
('credential', models.TextField(blank=True, null=True)), ("request_token_secret", models.CharField(max_length=50)),
('created_date', models.DateTimeField(default=datetime.datetime.now)), ("access_token", models.CharField(max_length=50)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ("access_token_secret", models.CharField(max_length=50)),
("credential", models.TextField(blank=True, null=True)),
("created_date", models.DateTimeField(default=datetime.datetime.now)),
(
"user",
models.OneToOneField(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
], ],
), ),
] ]

View file

@ -28,75 +28,73 @@ class OAuthToken(models.Model):
access_token_secret = models.CharField(max_length=50) access_token_secret = models.CharField(max_length=50)
credential = models.TextField(null=True, blank=True) credential = models.TextField(null=True, blank=True)
created_date = models.DateTimeField(default=datetime.datetime.now) created_date = models.DateTimeField(default=datetime.datetime.now)
class Importer: class Importer:
def clear_feeds(self): def clear_feeds(self):
UserSubscription.objects.filter(user=self.user).delete() UserSubscription.objects.filter(user=self.user).delete()
def clear_folders(self): def clear_folders(self):
UserSubscriptionFolders.objects.filter(user=self.user).delete() UserSubscriptionFolders.objects.filter(user=self.user).delete()
def get_folders(self): def get_folders(self):
self.usf, _ = UserSubscriptionFolders.objects.get_or_create(user=self.user, self.usf, _ = UserSubscriptionFolders.objects.get_or_create(
defaults={'folders': '[]'}) user=self.user, defaults={"folders": "[]"}
)
return json.decode(self.usf.folders) return json.decode(self.usf.folders)
class OPMLExporter(Importer): class OPMLExporter(Importer):
def __init__(self, user): def __init__(self, user):
self.user = user self.user = user
self.fetch_feeds() self.fetch_feeds()
def process(self, verbose=False): def process(self, verbose=False):
now = str(datetime.datetime.now()) now = str(datetime.datetime.now())
root = Element('opml') root = Element("opml")
root.set('version', '1.1') root.set("version", "1.1")
root.append(Comment('Generated by NewsBlur - newsblur.com')) root.append(Comment("Generated by NewsBlur - newsblur.com"))
head = SubElement(root, 'head') head = SubElement(root, "head")
title = SubElement(head, 'title') title = SubElement(head, "title")
title.text = 'NewsBlur Feeds' title.text = "NewsBlur Feeds"
dc = SubElement(head, 'dateCreated') dc = SubElement(head, "dateCreated")
dc.text = now dc.text = now
dm = SubElement(head, 'dateModified') dm = SubElement(head, "dateModified")
dm.text = now dm.text = now
folders = self.get_folders() folders = self.get_folders()
body = SubElement(root, 'body') body = SubElement(root, "body")
self.process_outline(body, folders, verbose=verbose) self.process_outline(body, folders, verbose=verbose)
return tostring(root, encoding='utf8', method='xml') return tostring(root, encoding="utf8", method="xml")
def process_outline(self, body, folders, verbose=False): def process_outline(self, body, folders, verbose=False):
for obj in folders: for obj in folders:
if isinstance(obj, int) and obj in self.feeds: if isinstance(obj, int) and obj in self.feeds:
feed = self.feeds[obj] feed = self.feeds[obj]
if verbose: if verbose:
print(" ---> Adding feed: %s - %s" % (feed['id'], print(" ---> Adding feed: %s - %s" % (feed["id"], feed["feed_title"][:30]))
feed['feed_title'][:30]))
feed_attrs = self.make_feed_row(feed) feed_attrs = self.make_feed_row(feed)
body.append(Element('outline', feed_attrs)) body.append(Element("outline", feed_attrs))
elif isinstance(obj, dict): elif isinstance(obj, dict):
for folder_title, folder_objs in list(obj.items()): for folder_title, folder_objs in list(obj.items()):
if verbose: if verbose:
print(" ---> Adding folder: %s" % folder_title) print(" ---> Adding folder: %s" % folder_title)
folder_element = Element('outline', {'text': folder_title, 'title': folder_title}) folder_element = Element("outline", {"text": folder_title, "title": folder_title})
body.append(self.process_outline(folder_element, folder_objs, verbose=verbose)) body.append(self.process_outline(folder_element, folder_objs, verbose=verbose))
return body return body
def make_feed_row(self, feed): def make_feed_row(self, feed):
feed_attrs = { feed_attrs = {
'text': feed['feed_title'], "text": feed["feed_title"],
'title': feed['feed_title'], "title": feed["feed_title"],
'type': 'rss', "type": "rss",
'version': 'RSS', "version": "RSS",
'htmlUrl': feed['feed_link'] or "", "htmlUrl": feed["feed_link"] or "",
'xmlUrl': feed['feed_address'] or "", "xmlUrl": feed["feed_address"] or "",
} }
return feed_attrs return feed_attrs
def fetch_feeds(self): def fetch_feeds(self):
subs = UserSubscription.objects.filter(user=self.user) subs = UserSubscription.objects.filter(user=self.user)
self.feeds = [] self.feeds = []
@ -113,16 +111,15 @@ class OPMLExporter(Importer):
class OPMLImporter(Importer): class OPMLImporter(Importer):
def __init__(self, opml_xml, user): def __init__(self, opml_xml, user):
self.user = user self.user = user
self.opml_xml = opml_xml self.opml_xml = opml_xml
@timelimit(10) @timelimit(10)
def try_processing(self): def try_processing(self):
folders = self.process() folders = self.process()
return folders return folders
def process(self): def process(self):
# self.clear_feeds() # self.clear_feeds()
@ -136,38 +133,37 @@ class OPMLImporter(Importer):
# self.clear_folders() # self.clear_folders()
self.usf.folders = json.encode(folders) self.usf.folders = json.encode(folders)
self.usf.save() self.usf.save()
return folders return folders
def process_outline(self, outline, folders, in_folder=''): def process_outline(self, outline, folders, in_folder=""):
for item in outline: for item in outline:
if (not hasattr(item, 'xmlUrl') and if not hasattr(item, "xmlUrl") and (hasattr(item, "text") or hasattr(item, "title")):
(hasattr(item, 'text') or hasattr(item, 'title'))):
folder = item folder = item
title = getattr(item, 'text', None) or getattr(item, 'title', None) title = getattr(item, "text", None) or getattr(item, "title", None)
# if hasattr(folder, 'text'): # if hasattr(folder, 'text'):
# logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text)) # logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text))
obj = {title: []} obj = {title: []}
folders = add_object_to_folder(obj, in_folder, folders) folders = add_object_to_folder(obj, in_folder, folders)
folders = self.process_outline(folder, folders, title) folders = self.process_outline(folder, folders, title)
elif hasattr(item, 'xmlUrl'): elif hasattr(item, "xmlUrl"):
feed = item feed = item
if not hasattr(feed, 'htmlUrl'): if not hasattr(feed, "htmlUrl"):
setattr(feed, 'htmlUrl', None) setattr(feed, "htmlUrl", None)
# If feed title matches what's in the DB, don't override it on subscription. # If feed title matches what's in the DB, don't override it on subscription.
feed_title = getattr(feed, 'title', None) or getattr(feed, 'text', None) feed_title = getattr(feed, "title", None) or getattr(feed, "text", None)
if not feed_title: if not feed_title:
setattr(feed, 'title', feed.htmlUrl or feed.xmlUrl) setattr(feed, "title", feed.htmlUrl or feed.xmlUrl)
user_feed_title = None user_feed_title = None
else: else:
setattr(feed, 'title', feed_title) setattr(feed, "title", feed_title)
user_feed_title = feed.title user_feed_title = feed.title
feed_address = urlnorm.normalize(feed.xmlUrl) feed_address = urlnorm.normalize(feed.xmlUrl)
feed_link = urlnorm.normalize(feed.htmlUrl) feed_link = urlnorm.normalize(feed.htmlUrl)
if len(feed_address) > Feed._meta.get_field('feed_address').max_length: if len(feed_address) > Feed._meta.get_field("feed_address").max_length:
continue continue
if feed_link and len(feed_link) > Feed._meta.get_field('feed_link').max_length: if feed_link and len(feed_link) > Feed._meta.get_field("feed_link").max_length:
continue continue
# logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,)) # logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,))
feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title) feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title)
@ -178,32 +174,31 @@ class OPMLImporter(Importer):
if duplicate_feed: if duplicate_feed:
feed_db = duplicate_feed[0].feed feed_db = duplicate_feed[0].feed
else: else:
feed_data['active_subscribers'] = 1 feed_data["active_subscribers"] = 1
feed_data['num_subscribers'] = 1 feed_data["num_subscribers"] = 1
feed_db, _ = Feed.find_or_create(feed_address=feed_address, feed_db, _ = Feed.find_or_create(
feed_link=feed_link, feed_address=feed_address, feed_link=feed_link, defaults=dict(**feed_data)
defaults=dict(**feed_data)) )
if user_feed_title == feed_db.feed_title: if user_feed_title == feed_db.feed_title:
user_feed_title = None user_feed_title = None
try: try:
us = UserSubscription.objects.get( us = UserSubscription.objects.get(feed=feed_db, user=self.user)
feed=feed_db,
user=self.user)
except UserSubscription.DoesNotExist: except UserSubscription.DoesNotExist:
us = None us = None
if not us: if not us:
us = UserSubscription( us = UserSubscription(
feed=feed_db, feed=feed_db,
user=self.user, user=self.user,
needs_unread_recalc=True, needs_unread_recalc=True,
mark_read_date=datetime.datetime.utcnow() - datetime.timedelta(days=1), mark_read_date=datetime.datetime.utcnow() - datetime.timedelta(days=1),
active=self.user.profile.is_premium, active=self.user.profile.is_premium,
user_title=user_feed_title) user_title=user_feed_title,
)
us.save() us.save()
if self.user.profile.is_premium and not us.active: if self.user.profile.is_premium and not us.active:
us.active = True us.active = True
us.save() us.save()
@ -214,25 +209,25 @@ class OPMLImporter(Importer):
folders = add_object_to_folder(feed_db.pk, in_folder, folders) folders = add_object_to_folder(feed_db.pk, in_folder, folders)
return folders return folders
def count_feeds_in_opml(self): def count_feeds_in_opml(self):
opml_count = len(opml.from_string(self.opml_xml)) opml_count = len(opml.from_string(self.opml_xml))
sub_count = UserSubscription.objects.filter(user=self.user).count() sub_count = UserSubscription.objects.filter(user=self.user).count()
return max(sub_count, opml_count) return max(sub_count, opml_count)
class UploadedOPML(mongo.Document): class UploadedOPML(mongo.Document):
user_id = mongo.IntField() user_id = mongo.IntField()
opml_file = mongo.StringField() opml_file = mongo.StringField()
upload_date = mongo.DateTimeField(default=datetime.datetime.now) upload_date = mongo.DateTimeField(default=datetime.datetime.now)
def __str__(self): def __str__(self):
user = User.objects.get(pk=self.user_id) user = User.objects.get(pk=self.user_id)
return "%s: %s characters" % (user.username, len(self.opml_file)) return "%s: %s characters" % (user.username, len(self.opml_file))
meta = { meta = {
'collection': 'uploaded_opml', "collection": "uploaded_opml",
'allow_inheritance': False, "allow_inheritance": False,
'order': '-upload_date', "order": "-upload_date",
'indexes': ['user_id', '-upload_date'], "indexes": ["user_id", "-upload_date"],
} }

View file

@ -12,14 +12,14 @@ def ProcessOPML(user_id):
logging.user(user, "~FR~SBOPML upload (task) starting...") logging.user(user, "~FR~SBOPML upload (task) starting...")
opml = UploadedOPML.objects.filter(user_id=user_id).first() opml = UploadedOPML.objects.filter(user_id=user_id).first()
opml_importer = OPMLImporter(opml.opml_file.encode('utf-8'), user) opml_importer = OPMLImporter(opml.opml_file.encode("utf-8"), user)
opml_importer.process() opml_importer.process()
feed_count = UserSubscription.objects.filter(user=user).count() feed_count = UserSubscription.objects.filter(user=user).count()
user.profile.send_upload_opml_finished_email(feed_count) user.profile.send_upload_opml_finished_email(feed_count)
logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count)) logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count))
MActivity.new_opml_import(user_id=user.pk, count=feed_count) MActivity.new_opml_import(user_id=user.pk, count=feed_count)
UserSubscription.queue_new_feeds(user) UserSubscription.queue_new_feeds(user)
UserSubscription.refresh_stale_feeds(user, exclude_new=True) UserSubscription.refresh_stale_feeds(user, exclude_new=True)

View file

@ -8,62 +8,95 @@ from apps.rss_feeds.models import merge_feeds, DuplicateFeed, Feed
from utils import json_functions as json_functions from utils import json_functions as json_functions
import json import json
from django.core.management import call_command from django.core.management import call_command
class Test_Import(TestCase): class Test_Import(TestCase):
fixtures = [ fixtures = ["apps/rss_feeds/fixtures/initial_data.json", "opml_import.json"]
'apps/rss_feeds/fixtures/initial_data.json',
'opml_import.json'
]
def setUp(self): def setUp(self):
self.client = Client() self.client = Client()
def test_opml_import(self): def test_opml_import(self):
self.client.login(username='conesus', password='test') self.client.login(username="conesus", password="test")
user = User.objects.get(username='conesus') user = User.objects.get(username="conesus")
# Verify user has no feeds # Verify user has no feeds
subs = UserSubscription.objects.filter(user=user) subs = UserSubscription.objects.filter(user=user)
self.assertEqual(subs.count(), 0) self.assertEqual(subs.count(), 0)
f = open(os.path.join(os.path.dirname(__file__), 'fixtures/opml.xml')) f = open(os.path.join(os.path.dirname(__file__), "fixtures/opml.xml"))
response = self.client.post(reverse('opml-upload'), {'file': f}) response = self.client.post(reverse("opml-upload"), {"file": f})
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
# Verify user now has feeds # Verify user now has feeds
subs = UserSubscription.objects.filter(user=user) subs = UserSubscription.objects.filter(user=user)
self.assertEqual(subs.count(), 54) self.assertEqual(subs.count(), 54)
usf = UserSubscriptionFolders.objects.get(user=user) usf = UserSubscriptionFolders.objects.get(user=user)
print(json_functions.decode(usf.folders)) print(json_functions.decode(usf.folders))
self.assertEqual(json_functions.decode(usf.folders), [{'Tech': [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]}, 1, 2, 3, 6, {'New York': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, {'tech': []}, {'Blogs': [29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, {'The Bloglets': [45, 46, 47, 48, 49]}]}, {'Cooking': [50, 51, 52, 53]}, 54]) self.assertEqual(
json_functions.decode(usf.folders),
[
{"Tech": [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]},
1,
2,
3,
6,
{"New York": [1, 2, 3, 4, 5, 6, 7, 8, 9]},
{"tech": []},
{
"Blogs": [
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
{"The Bloglets": [45, 46, 47, 48, 49]},
]
},
{"Cooking": [50, 51, 52, 53]},
54,
],
)
def test_opml_import__empty(self): def test_opml_import__empty(self):
self.client.login(username='conesus', password='test') self.client.login(username="conesus", password="test")
user = User.objects.get(username='conesus') user = User.objects.get(username="conesus")
# Verify user has default feeds # Verify user has default feeds
subs = UserSubscription.objects.filter(user=user) subs = UserSubscription.objects.filter(user=user)
self.assertEqual(subs.count(), 0) self.assertEqual(subs.count(), 0)
response = self.client.post(reverse('opml-upload')) response = self.client.post(reverse("opml-upload"))
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
# Verify user now has feeds # Verify user now has feeds
subs = UserSubscription.objects.filter(user=user) subs = UserSubscription.objects.filter(user=user)
self.assertEquals(subs.count(), 0) self.assertEquals(subs.count(), 0)
class Test_Duplicate_Feeds(TestCase): class Test_Duplicate_Feeds(TestCase):
fixtures = [ fixtures = [
'apps/rss_feeds/fixtures/initial_data.json', "apps/rss_feeds/fixtures/initial_data.json",
] ]
def test_duplicate_feeds(self): def test_duplicate_feeds(self):
# had to load the feed data this way to hit the save() override. # had to load the feed data this way to hit the save() override.
# it wouldn't work with loaddata or fixures # it wouldn't work with loaddata or fixures
with open('apps/feed_import/fixtures/duplicate_feeds.json') as json_file: with open("apps/feed_import/fixtures/duplicate_feeds.json") as json_file:
feed_data = json.loads(json_file.read()) feed_data = json.loads(json_file.read())
feed_data_1 = feed_data[0] feed_data_1 = feed_data[0]
feed_data_2 = feed_data[1] feed_data_2 = feed_data[1]
@ -72,15 +105,15 @@ class Test_Duplicate_Feeds(TestCase):
feed_1.save() feed_1.save()
feed_2.save() feed_2.save()
call_command('loaddata', 'apps/feed_import/fixtures/subscriptions.json') call_command("loaddata", "apps/feed_import/fixtures/subscriptions.json")
user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id
user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id
self.assertNotEqual(user_1_feed_subscription, user_2_feed_subscription) self.assertNotEqual(user_1_feed_subscription, user_2_feed_subscription)
original_feed_id = merge_feeds(user_1_feed_subscription, user_2_feed_subscription) original_feed_id = merge_feeds(user_1_feed_subscription, user_2_feed_subscription)
user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id
user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id
self.assertEqual(user_1_feed_subscription, user_2_feed_subscription) self.assertEqual(user_1_feed_subscription, user_2_feed_subscription)

View file

@ -2,6 +2,6 @@ from django.conf.urls import url
from apps.feed_import import views from apps.feed_import import views
urlpatterns = [ urlpatterns = [
url(r'^opml_upload/?$', views.opml_upload, name='opml-upload'), url(r"^opml_upload/?$", views.opml_upload, name="opml-upload"),
url(r'^opml_export/?$', views.opml_export, name='opml-export'), url(r"^opml_export/?$", views.opml_export, name="opml-export"),
] ]

View file

@ -7,6 +7,7 @@ from bson.errors import InvalidStringData
import uuid import uuid
from django.contrib.sites.models import Site from django.contrib.sites.models import Site
from django.contrib.auth.models import User from django.contrib.auth.models import User
# from django.db import IntegrityError # from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings from django.conf import settings
@ -29,11 +30,11 @@ def opml_upload(request):
message = "OK" message = "OK"
code = 1 code = 1
payload = {} payload = {}
if request.method == 'POST': if request.method == "POST":
if 'file' in request.FILES: if "file" in request.FILES:
logging.user(request, "~FR~SBOPML upload starting...") logging.user(request, "~FR~SBOPML upload starting...")
file = request.FILES['file'] file = request.FILES["file"]
xml_opml = file.read() xml_opml = file.read()
try: try:
UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml) UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml)
@ -41,7 +42,7 @@ def opml_upload(request):
folders = None folders = None
code = -1 code = -1
message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?" message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?"
opml_importer = OPMLImporter(xml_opml, request.user) opml_importer = OPMLImporter(xml_opml, request.user)
try: try:
folders = opml_importer.try_processing() folders = opml_importer.try_processing()
@ -49,7 +50,9 @@ def opml_upload(request):
folders = None folders = None
ProcessOPML.delay(request.user.pk) ProcessOPML.delay(request.user.pk)
feed_count = opml_importer.count_feeds_in_opml() feed_count = opml_importer.count_feeds_in_opml()
logging.user(request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count) logging.user(
request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count
)
payload = dict(folders=folders, delayed=True, feed_count=feed_count) payload = dict(folders=folders, delayed=True, feed_count=feed_count)
code = 2 code = 2
message = "" message = ""
@ -64,32 +67,35 @@ def opml_upload(request):
payload = dict(folders=folders, feeds=feeds) payload = dict(folders=folders, feeds=feeds)
logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds))) logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds)))
from apps.social.models import MActivity from apps.social.models import MActivity
MActivity.new_opml_import(user_id=request.user.pk, count=len(feeds)) MActivity.new_opml_import(user_id=request.user.pk, count=len(feeds))
UserSubscription.queue_new_feeds(request.user) UserSubscription.queue_new_feeds(request.user)
UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True)
else: else:
message = "Attach an .opml file." message = "Attach an .opml file."
code = -1 code = -1
return HttpResponse(json.encode(dict(message=message, code=code, payload=payload)), return HttpResponse(
content_type='text/html') json.encode(dict(message=message, code=code, payload=payload)), content_type="text/html"
)
def opml_export(request): def opml_export(request):
user = get_user(request) user = get_user(request)
now = datetime.datetime.now() now = datetime.datetime.now()
if request.GET.get('user_id') and user.is_staff: if request.GET.get("user_id") and user.is_staff:
user = User.objects.get(pk=request.GET['user_id']) user = User.objects.get(pk=request.GET["user_id"])
exporter = OPMLExporter(user) exporter = OPMLExporter(user)
opml = exporter.process() opml = exporter.process()
from apps.social.models import MActivity from apps.social.models import MActivity
MActivity.new_opml_export(user_id=user.pk, count=exporter.feed_count) MActivity.new_opml_export(user_id=user.pk, count=exporter.feed_count)
response = HttpResponse(opml, content_type='text/xml; charset=utf-8') response = HttpResponse(opml, content_type="text/xml; charset=utf-8")
response['Content-Disposition'] = 'attachment; filename=NewsBlur-%s-%s.opml' % ( response["Content-Disposition"] = "attachment; filename=NewsBlur-%s-%s.opml" % (
user.username, user.username,
now.strftime('%Y-%m-%d') now.strftime("%Y-%m-%d"),
) )
return response
return response

View file

@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application.
from django.test import TestCase from django.test import TestCase
class SimpleTest(TestCase): class SimpleTest(TestCase):
def test_basic_addition(self): def test_basic_addition(self):
""" """
@ -14,10 +15,12 @@ class SimpleTest(TestCase):
""" """
self.failUnlessEqual(1 + 1, 2) self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
__test__ = {
"doctest": """
Another way to test that 1 + 1 is equal to 2. Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2 >>> 1 + 1 == 2
True True
"""} """
}

View file

@ -2,5 +2,5 @@ from django.conf.urls import url
from apps.mobile import views from apps.mobile import views
urlpatterns = [ urlpatterns = [
url(r'^$', views.index, name='mobile-index'), url(r"^$", views.index, name="mobile-index"),
] ]

View file

@ -8,5 +8,6 @@ from apps.reader.models import UserSubscription, UserSubscriptionFolders
from utils import json_functions as json from utils import json_functions as json
from utils import log as logging from utils import log as logging
def index(request): def index(request):
return render(request, 'mobile/mobile_workspace.xhtml', {}) return render(request, "mobile/mobile_workspace.xhtml", {})

View file

@ -1,24 +1,38 @@
from django.conf.urls import url from django.conf.urls import url
from apps.monitor.views import ( AppServers, AppTimes, from apps.monitor.views import (
Classifiers, DbTimes, Errors, FeedCounts, Feeds, LoadTimes, AppServers,
Stories, TasksCodes, TasksPipeline, TasksServers, TasksTimes, AppTimes,
Updates, Users, FeedSizes Classifiers,
DbTimes,
Errors,
FeedCounts,
Feeds,
LoadTimes,
Stories,
TasksCodes,
TasksPipeline,
TasksServers,
TasksTimes,
Updates,
Users,
FeedSizes,
) )
urlpatterns = [ urlpatterns = [
url(r'^app-servers?$', AppServers.as_view(), name="app_servers"), url(r"^app-servers?$", AppServers.as_view(), name="app_servers"),
url(r'^app-times?$', AppTimes.as_view(), name="app_times"), url(r"^app-times?$", AppTimes.as_view(), name="app_times"),
url(r'^classifiers?$', Classifiers.as_view(), name="classifiers"), url(r"^classifiers?$", Classifiers.as_view(), name="classifiers"),
url(r'^db-times?$', DbTimes.as_view(), name="db_times"), url(r"^db-times?$", DbTimes.as_view(), name="db_times"),
url(r'^errors?$', Errors.as_view(), name="errors"), url(r"^errors?$", Errors.as_view(), name="errors"),
url(r'^feed-counts?$', FeedCounts.as_view(), name="feed_counts"), url(r"^feed-counts?$", FeedCounts.as_view(), name="feed_counts"),
url(r'^feed-sizes?$', FeedSizes.as_view(), name="feed_sizes"), url(r"^feed-sizes?$", FeedSizes.as_view(), name="feed_sizes"),
url(r'^feeds?$', Feeds.as_view(), name="feeds"), url(r"^feeds?$", Feeds.as_view(), name="feeds"),
url(r'^load-times?$', LoadTimes.as_view(), name="load_times"), url(r"^load-times?$", LoadTimes.as_view(), name="load_times"),
url(r'^stories?$', Stories.as_view(), name="stories"), url(r"^stories?$", Stories.as_view(), name="stories"),
url(r'^task-codes?$', TasksCodes.as_view(), name="task_codes"), url(r"^task-codes?$", TasksCodes.as_view(), name="task_codes"),
url(r'^task-pipeline?$', TasksPipeline.as_view(), name="task_pipeline"), url(r"^task-pipeline?$", TasksPipeline.as_view(), name="task_pipeline"),
url(r'^task-servers?$', TasksServers.as_view(), name="task_servers"), url(r"^task-servers?$", TasksServers.as_view(), name="task_servers"),
url(r'^task-times?$', TasksTimes.as_view(), name="task_times"), url(r"^task-times?$", TasksTimes.as_view(), name="task_times"),
url(r'^updates?$', Updates.as_view(), name="updates"), url(r"^updates?$", Updates.as_view(), name="updates"),
url(r'^users?$', Users.as_view(), name="users"), url(r"^users?$", Users.as_view(), name="users"),
] ]

View file

@ -3,11 +3,11 @@ from django.conf import settings
from django.views import View from django.views import View
from django.shortcuts import render from django.shortcuts import render
class AppServers(View):
class AppServers(View):
def get(self, request): def get(self, request):
data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats))
#total = self.total: # total = self.total:
# if total: # if total:
# data['total'] = total[0]['feeds'] # data['total'] = total[0]['feeds']
chart_name = "app_servers" chart_name = "app_servers"
@ -21,38 +21,48 @@ class AppServers(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate(
"$match": { [
"date": { {
"$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gte": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": "$server",
"_id" : "$server", "feeds": {"$sum": 1},
"feeds" : {"$sum": 1}, },
}, },
}]) ]
)
return list(stats) return list(stats)
@property @property
def total(self): def total(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate(
"$match": { [
"date": { {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": 1,
"_id" : 1, "feeds": {"$sum": 1},
"feeds" : {"$sum": 1}, },
}, },
}]) ]
)
return list(stats) return list(stats)

View file

@ -3,10 +3,10 @@ from django.shortcuts import render
import datetime import datetime
from django.conf import settings from django.conf import settings
class AppTimes(View):
class AppTimes(View):
def get(self, request): def get(self, request):
servers = dict((("%s" % s['_id'], s['page_load']) for s in self.stats)) servers = dict((("%s" % s["_id"], s["page_load"]) for s in self.stats))
data = servers data = servers
chart_name = "app_times" chart_name = "app_times"
chart_type = "counter" chart_type = "counter"
@ -20,21 +20,26 @@ class AppTimes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate(
"$match": { [
"date": { {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": "$server",
"_id" : "$server", "page_load": {"$avg": "$page_load"},
"page_load" : {"$avg": "$page_load"}, },
}, },
}]) ]
)
return list(stats) return list(stats)

View file

@ -4,13 +4,12 @@ from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifier
class Classifiers(View): class Classifiers(View):
def get(self, request): def get(self, request):
data = { data = {
'feeds': MClassifierFeed.objects._collection.count(), "feeds": MClassifierFeed.objects._collection.count(),
'authors': MClassifierAuthor.objects._collection.count(), "authors": MClassifierAuthor.objects._collection.count(),
'tags': MClassifierTag.objects._collection.count(), "tags": MClassifierTag.objects._collection.count(),
'titles': MClassifierTitle.objects._collection.count(), "titles": MClassifierTitle.objects._collection.count(),
} }
chart_name = "classifiers" chart_name = "classifiers"
@ -24,5 +23,4 @@ class Classifiers(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -3,24 +3,22 @@ from django.views import View
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class DbTimes(View): class DbTimes(View):
def get(self, request): def get(self, request):
data = { data = {
'sql_avg': MStatistics.get('latest_sql_avg'), "sql_avg": MStatistics.get("latest_sql_avg"),
'mongo_avg': MStatistics.get('latest_mongo_avg'), "mongo_avg": MStatistics.get("latest_mongo_avg"),
'redis_user_avg': MStatistics.get('latest_redis_user_avg'), "redis_user_avg": MStatistics.get("latest_redis_user_avg"),
'redis_story_avg': MStatistics.get('latest_redis_story_avg'), "redis_story_avg": MStatistics.get("latest_redis_story_avg"),
'redis_session_avg': MStatistics.get('latest_redis_session_avg'), "redis_session_avg": MStatistics.get("latest_redis_session_avg"),
'redis_pubsub_avg': MStatistics.get('latest_redis_pubsub_avg'), "redis_pubsub_avg": MStatistics.get("latest_redis_pubsub_avg"),
'task_sql_avg': MStatistics.get('latest_task_sql_avg'), "task_sql_avg": MStatistics.get("latest_task_sql_avg"),
'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), "task_mongo_avg": MStatistics.get("latest_task_mongo_avg"),
'task_redis_user_avg': MStatistics.get('latest_task_redis_user_avg'), "task_redis_user_avg": MStatistics.get("latest_task_redis_user_avg"),
'task_redis_story_avg': MStatistics.get('latest_task_redis_story_avg'), "task_redis_story_avg": MStatistics.get("latest_task_redis_story_avg"),
'task_redis_session_avg': MStatistics.get('latest_task_redis_session_avg'), "task_redis_session_avg": MStatistics.get("latest_task_redis_session_avg"),
'task_redis_pubsub_avg': MStatistics.get('latest_task_redis_pubsub_avg'), "task_redis_pubsub_avg": MStatistics.get("latest_task_redis_pubsub_avg"),
} }
chart_name = "db_times" chart_name = "db_times"
chart_type = "counter" chart_type = "counter"
@ -32,4 +30,4 @@ class DbTimes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -3,23 +3,22 @@ from django.views import View
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class Errors(View):
class Errors(View):
def get(self, request): def get(self, request):
statistics = MStatistics.all() statistics = MStatistics.all()
data = { data = {
'feed_success': statistics['feeds_fetched'], "feed_success": statistics["feeds_fetched"],
} }
chart_name = "errors" chart_name = "errors"
chart_type = "counter" chart_type = "counter"
formatted_data = {} formatted_data = {}
for k, v in data.items(): for k, v in data.items():
formatted_data[k] = f'feed_success {v}' formatted_data[k] = f"feed_success {v}"
context = { context = {
"data": formatted_data, "data": formatted_data,
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -6,44 +6,43 @@ from apps.rss_feeds.models import Feed, DuplicateFeed
from apps.push.models import PushSubscription from apps.push.models import PushSubscription
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class FeedCounts(View):
class FeedCounts(View):
def get(self, request): def get(self, request):
exception_feeds = MStatistics.get("munin:exception_feeds")
exception_feeds = MStatistics.get('munin:exception_feeds')
if not exception_feeds: if not exception_feeds:
exception_feeds = Feed.objects.filter(has_feed_exception=True).count() exception_feeds = Feed.objects.filter(has_feed_exception=True).count()
MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12) MStatistics.set("munin:exception_feeds", exception_feeds, 60 * 60 * 12)
exception_pages = MStatistics.get('munin:exception_pages') exception_pages = MStatistics.get("munin:exception_pages")
if not exception_pages: if not exception_pages:
exception_pages = Feed.objects.filter(has_page_exception=True).count() exception_pages = Feed.objects.filter(has_page_exception=True).count()
MStatistics.set('munin:exception_pages', exception_pages, 60*60*12) MStatistics.set("munin:exception_pages", exception_pages, 60 * 60 * 12)
duplicate_feeds = MStatistics.get('munin:duplicate_feeds') duplicate_feeds = MStatistics.get("munin:duplicate_feeds")
if not duplicate_feeds: if not duplicate_feeds:
duplicate_feeds = DuplicateFeed.objects.count() duplicate_feeds = DuplicateFeed.objects.count()
MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12) MStatistics.set("munin:duplicate_feeds", duplicate_feeds, 60 * 60 * 12)
active_feeds = MStatistics.get('munin:active_feeds') active_feeds = MStatistics.get("munin:active_feeds")
if not active_feeds: if not active_feeds:
active_feeds = Feed.objects.filter(active_subscribers__gt=0).count() active_feeds = Feed.objects.filter(active_subscribers__gt=0).count()
MStatistics.set('munin:active_feeds', active_feeds, 60*60*12) MStatistics.set("munin:active_feeds", active_feeds, 60 * 60 * 12)
push_feeds = MStatistics.get('munin:push_feeds') push_feeds = MStatistics.get("munin:push_feeds")
if not push_feeds: if not push_feeds:
push_feeds = PushSubscription.objects.filter(verified=True).count() push_feeds = PushSubscription.objects.filter(verified=True).count()
MStatistics.set('munin:push_feeds', push_feeds, 60*60*12) MStatistics.set("munin:push_feeds", push_feeds, 60 * 60 * 12)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
data = { data = {
'scheduled_feeds': r.zcard('scheduled_updates'), "scheduled_feeds": r.zcard("scheduled_updates"),
'exception_feeds': exception_feeds, "exception_feeds": exception_feeds,
'exception_pages': exception_pages, "exception_pages": exception_pages,
'duplicate_feeds': duplicate_feeds, "duplicate_feeds": duplicate_feeds,
'active_feeds': active_feeds, "active_feeds": active_feeds,
'push_feeds': push_feeds, "push_feeds": push_feeds,
} }
chart_name = "feed_counts" chart_name = "feed_counts"
chart_type = "counter" chart_type = "counter"
@ -57,6 +56,4 @@ class FeedCounts(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -7,23 +7,24 @@ from apps.rss_feeds.models import Feed, DuplicateFeed
from apps.push.models import PushSubscription from apps.push.models import PushSubscription
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class FeedSizes(View): class FeedSizes(View):
def get(self, request): def get(self, request):
fs_size_bytes = MStatistics.get("munin:fs_size_bytes")
fs_size_bytes = MStatistics.get('munin:fs_size_bytes')
if not fs_size_bytes: if not fs_size_bytes:
fs_size_bytes = Feed.objects.aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] fs_size_bytes = Feed.objects.aggregate(Sum("fs_size_bytes"))["fs_size_bytes__sum"]
MStatistics.set('munin:fs_size_bytes', fs_size_bytes, 60*60*12) MStatistics.set("munin:fs_size_bytes", fs_size_bytes, 60 * 60 * 12)
archive_users_size_bytes = MStatistics.get('munin:archive_users_size_bytes') archive_users_size_bytes = MStatistics.get("munin:archive_users_size_bytes")
if not archive_users_size_bytes: if not archive_users_size_bytes:
archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate(
MStatistics.set('munin:archive_users_size_bytes', archive_users_size_bytes, 60*60*12) Sum("fs_size_bytes")
)["fs_size_bytes__sum"]
MStatistics.set("munin:archive_users_size_bytes", archive_users_size_bytes, 60 * 60 * 12)
data = { data = {
'fs_size_bytes': fs_size_bytes, "fs_size_bytes": fs_size_bytes,
'archive_users_size_bytes': archive_users_size_bytes, "archive_users_size_bytes": archive_users_size_bytes,
} }
chart_name = "feed_sizes" chart_name = "feed_sizes"
chart_type = "counter" chart_type = "counter"
@ -37,6 +38,4 @@ class FeedSizes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -6,37 +6,35 @@ from apps.reader.models import UserSubscription
from apps.social.models import MSocialProfile, MSocialSubscription from apps.social.models import MSocialProfile, MSocialSubscription
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class Feeds(View): class Feeds(View):
def get(self, request): def get(self, request):
feeds_count = MStatistics.get("munin:feeds_count")
feeds_count = MStatistics.get('munin:feeds_count')
if not feeds_count: if not feeds_count:
feeds_count = Feed.objects.all().count() feeds_count = Feed.objects.all().count()
MStatistics.set('munin:feeds_count', feeds_count, 60*60*12) MStatistics.set("munin:feeds_count", feeds_count, 60 * 60 * 12)
subscriptions_count = MStatistics.get('munin:subscriptions_count') subscriptions_count = MStatistics.get("munin:subscriptions_count")
if not subscriptions_count: if not subscriptions_count:
subscriptions_count = UserSubscription.objects.all().count() subscriptions_count = UserSubscription.objects.all().count()
MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12) MStatistics.set("munin:subscriptions_count", subscriptions_count, 60 * 60 * 12)
data = { data = {
'feeds': feeds_count, "feeds": feeds_count,
'subscriptions': subscriptions_count, "subscriptions": subscriptions_count,
'profiles': MSocialProfile.objects._collection.count(), "profiles": MSocialProfile.objects._collection.count(),
'social_subscriptions': MSocialSubscription.objects._collection.count(), "social_subscriptions": MSocialSubscription.objects._collection.count(),
} }
chart_name = "feeds" chart_name = "feeds"
chart_type = "counter" chart_type = "counter"
formatted_data = {} formatted_data = {}
for k, v in data.items(): for k, v in data.items():
formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'
context = { context = {
"data": formatted_data, "data": formatted_data,
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -1,15 +1,15 @@
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class LoadTimes(View):
class LoadTimes(View):
def get(self, request): def get(self, request):
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
data = { data = {
'feed_loadtimes_1min': MStatistics.get('last_1_min_time_taken'), "feed_loadtimes_1min": MStatistics.get("last_1_min_time_taken"),
'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'), "feed_loadtimes_avg_hour": MStatistics.get("latest_avg_time_taken"),
'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'), "feeds_loaded_hour": MStatistics.get("latest_sites_loaded"),
} }
chart_name = "load_times" chart_name = "load_times"
chart_type = "counter" chart_type = "counter"
@ -23,5 +23,4 @@ class LoadTimes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -2,13 +2,13 @@ from django.views import View
from django.shortcuts import render from django.shortcuts import render
from apps.rss_feeds.models import MStory, MStarredStory from apps.rss_feeds.models import MStory, MStarredStory
from apps.rss_feeds.models import MStory, MStarredStory from apps.rss_feeds.models import MStory, MStarredStory
class Stories(View):
class Stories(View):
def get(self, request): def get(self, request):
data = { data = {
'stories': MStory.objects._collection.count(), "stories": MStory.objects._collection.count(),
'starred_stories': MStarredStory.objects._collection.count(), "starred_stories": MStarredStory.objects._collection.count(),
} }
chart_name = "stories" chart_name = "stories"
chart_type = "counter" chart_type = "counter"
@ -21,5 +21,4 @@ class Stories(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -3,10 +3,10 @@ from django.conf import settings
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class TasksCodes(View):
class TasksCodes(View):
def get(self, request): def get(self, request):
data = dict((("_%s" % s['_id'], s['feeds']) for s in self.stats)) data = dict((("_%s" % s["_id"], s["feeds"]) for s in self.stats))
chart_name = "task_codes" chart_name = "task_codes"
chart_type = "counter" chart_type = "counter"
formatted_data = {} formatted_data = {}
@ -18,22 +18,26 @@ class TasksCodes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate(
"$match": { [
"date": { {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": "$feed_code",
"_id" : "$feed_code", "feeds": {"$sum": 1},
"feeds" : {"$sum": 1}, },
}, },
}]) ]
)
return list(stats) return list(stats)

View file

@ -4,10 +4,10 @@ from django.conf import settings
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class TasksPipeline(View):
class TasksPipeline(View):
def get(self, request): def get(self, request):
data =self.stats data = self.stats
chart_name = "task_pipeline" chart_name = "task_pipeline"
chart_type = "counter" chart_type = "counter"
@ -19,27 +19,31 @@ class TasksPipeline(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate(
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ [
"$match": { {
"date": { "$match": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": 1,
"_id": 1, "feed_fetch": {"$avg": "$feed_fetch"},
"feed_fetch": {"$avg": "$feed_fetch"}, "feed_process": {"$avg": "$feed_process"},
"feed_process": {"$avg": "$feed_process"}, "page": {"$avg": "$page"},
"page": {"$avg": "$page"}, "icon": {"$avg": "$icon"},
"icon": {"$avg": "$icon"}, "total": {"$avg": "$total"},
"total": {"$avg": "$total"}, },
}, },
}]) ]
)
stats = list(stats) stats = list(stats)
if stats: if stats:
print(stats) print(stats)

View file

@ -4,10 +4,10 @@ from django.conf import settings
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class TasksServers(View):
class TasksServers(View):
def get(self, request): def get(self, request):
data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats))
chart_name = "task_servers" chart_name = "task_servers"
chart_type = "counter" chart_type = "counter"
@ -19,39 +19,48 @@ class TasksServers(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate(
"$match": { [
"date": { {
"$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gte": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": "$server",
"_id" : "$server", "feeds": {"$sum": 1},
"feeds" : {"$sum": 1}, },
}, },
}]) ]
)
return list(stats) return list(stats)
@property @property
def total(self): def total(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate(
"$match": { [
"date": { {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": 1,
"_id" : 1, "feeds": {"$sum": 1},
"feeds" : {"$sum": 1}, },
}, },
}]) ]
)
return list(stats) return list(stats)

View file

@ -4,10 +4,10 @@ from django.conf import settings
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class TasksTimes(View):
class TasksTimes(View):
def get(self, request): def get(self, request):
data = dict((("%s" % s['_id'], s['total']) for s in self.stats)) data = dict((("%s" % s["_id"], s["total"]) for s in self.stats))
chart_name = "task_times" chart_name = "task_times"
chart_type = "counter" chart_type = "counter"
@ -19,22 +19,26 @@ class TasksTimes(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
@property @property
def stats(self): def stats(self):
stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate(
"$match": { [
"date": { {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), "$match": {
"date": {
"$gt": datetime.datetime.now() - datetime.timedelta(minutes=5),
},
},
}, },
}, {
}, { "$group": {
"$group": { "_id": "$server",
"_id" : "$server", "total": {"$avg": "$total"},
"total" : {"$avg": "$total"}, },
}, },
}]) ]
)
return list(stats) return list(stats)

View file

@ -4,26 +4,26 @@ from django.conf import settings
from django.shortcuts import render from django.shortcuts import render
from django.views import View from django.views import View
class Updates(View):
def get(self, request): class Updates(View):
def get(self, request):
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
data = { data = {
'update_queue': r.scard("queued_feeds"), "update_queue": r.scard("queued_feeds"),
'feeds_fetched': r.zcard("fetched_feeds_last_hour"), "feeds_fetched": r.zcard("fetched_feeds_last_hour"),
'tasked_feeds': r.zcard("tasked_feeds"), "tasked_feeds": r.zcard("tasked_feeds"),
'error_feeds': r.zcard("error_feeds"), "error_feeds": r.zcard("error_feeds"),
'celery_update_feeds': r.llen("update_feeds"), "celery_update_feeds": r.llen("update_feeds"),
'celery_new_feeds': r.llen("new_feeds"), "celery_new_feeds": r.llen("new_feeds"),
'celery_push_feeds': r.llen("push_feeds"), "celery_push_feeds": r.llen("push_feeds"),
'celery_work_queue': r.llen("work_queue"), "celery_work_queue": r.llen("work_queue"),
'celery_search_queue': r.llen("search_indexer"), "celery_search_queue": r.llen("search_indexer"),
} }
chart_name = "updates" chart_name = "updates"
chart_type = "counter" chart_type = "counter"
formatted_data = {} formatted_data = {}
for k, v in data.items(): for k, v in data.items():
formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'
context = { context = {
@ -31,5 +31,4 @@ class Updates(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -7,39 +7,63 @@ from django.views import View
from apps.profile.models import Profile, RNewUserQueue from apps.profile.models import Profile, RNewUserQueue
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
class Users(View):
class Users(View):
def get(self, request): def get(self, request):
last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365) last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365)
last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30) last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30)
last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24) last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 * 24)
expiration_sec = 60*60 # 1 hour expiration_sec = 60 * 60 # 1 hour
data = { data = {
'all': MStatistics.get('munin:users_count', "all": MStatistics.get(
lambda: User.objects.count(), "munin:users_count",
set_default=True, expiration_sec=expiration_sec), lambda: User.objects.count(),
'yearly': MStatistics.get('munin:users_yearly', set_default=True,
lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(), expiration_sec=expiration_sec,
set_default=True, expiration_sec=expiration_sec), ),
'monthly': MStatistics.get('munin:users_monthly', "yearly": MStatistics.get(
lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(), "munin:users_yearly",
set_default=True, expiration_sec=expiration_sec), lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(),
'daily': MStatistics.get('munin:users_daily', set_default=True,
lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(), expiration_sec=expiration_sec,
set_default=True, expiration_sec=expiration_sec), ),
'premium': MStatistics.get('munin:users_premium', "monthly": MStatistics.get(
lambda: Profile.objects.filter(is_premium=True).count(), "munin:users_monthly",
set_default=True, expiration_sec=expiration_sec), lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(),
'archive': MStatistics.get('munin:users_archive', set_default=True,
lambda: Profile.objects.filter(is_archive=True).count(), expiration_sec=expiration_sec,
set_default=True, expiration_sec=expiration_sec), ),
'pro': MStatistics.get('munin:users_pro', "daily": MStatistics.get(
lambda: Profile.objects.filter(is_pro=True).count(), "munin:users_daily",
set_default=True, expiration_sec=expiration_sec), lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(),
'queued': MStatistics.get('munin:users_queued', set_default=True,
lambda: RNewUserQueue.user_count(), expiration_sec=expiration_sec,
set_default=True, expiration_sec=expiration_sec), ),
"premium": MStatistics.get(
"munin:users_premium",
lambda: Profile.objects.filter(is_premium=True).count(),
set_default=True,
expiration_sec=expiration_sec,
),
"archive": MStatistics.get(
"munin:users_archive",
lambda: Profile.objects.filter(is_archive=True).count(),
set_default=True,
expiration_sec=expiration_sec,
),
"pro": MStatistics.get(
"munin:users_pro",
lambda: Profile.objects.filter(is_pro=True).count(),
set_default=True,
expiration_sec=expiration_sec,
),
"queued": MStatistics.get(
"munin:users_queued",
lambda: RNewUserQueue.user_count(),
set_default=True,
expiration_sec=expiration_sec,
),
} }
chart_name = "users" chart_name = "users"
chart_type = "counter" chart_type = "counter"
@ -52,5 +76,4 @@ class Users(View):
"chart_name": chart_name, "chart_name": chart_name,
"chart_type": chart_type, "chart_type": chart_type,
} }
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")

View file

@ -12,6 +12,7 @@ RedisUsedMemory
RedisSize RedisSize
""" """
class RedisGrafanaMetric(View): class RedisGrafanaMetric(View):
category = "Redis" category = "Redis"
@ -23,9 +24,9 @@ class RedisGrafanaMetric(View):
return True return True
def get_info(self): def get_info(self):
host = os.environ.get('REDIS_HOST') or '127.0.0.1' host = os.environ.get("REDIS_HOST") or "127.0.0.1"
port = int(os.environ.get('REDIS_PORT') or '6379') port = int(os.environ.get("REDIS_PORT") or "6379")
if host.startswith('/'): if host.startswith("/"):
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(host) s.connect(host)
else: else:
@ -33,9 +34,9 @@ class RedisGrafanaMetric(View):
s.connect((host, port)) s.connect((host, port))
s.send("*1\r\n$4\r\ninfo\r\n") s.send("*1\r\n$4\r\ninfo\r\n")
buf = "" buf = ""
while '\r\n' not in buf: while "\r\n" not in buf:
buf += s.recv(1024) buf += s.recv(1024)
l, buf = buf.split('\r\n', 1) l, buf = buf.split("\r\n", 1)
if l[0] != "$": if l[0] != "$":
s.close() s.close()
raise Exception("Protocol error") raise Exception("Protocol error")
@ -43,7 +44,7 @@ class RedisGrafanaMetric(View):
if remaining > 0: if remaining > 0:
buf += s.recv(remaining) buf += s.recv(remaining)
s.close() s.close()
return dict(x.split(':', 1) for x in buf.split('\r\n') if ':' in x) return dict(x.split(":", 1) for x in buf.split("\r\n") if ":" in x)
def execute(self): def execute(self):
stats = self.get_info() stats = self.get_info()
@ -57,25 +58,28 @@ class RedisGrafanaMetric(View):
return values return values
def get_fields(self): def get_fields(self):
raise NotImplementedError('You must implement the get_fields function') raise NotImplementedError("You must implement the get_fields function")
def get_context(self): def get_context(self):
raise NotImplementedError('You must implement the get_context function') raise NotImplementedError("You must implement the get_context function")
def get(self, request): def get(self, request):
context = self.get_context() context = self.get_context()
return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") return render(request, "monitor/prometheus_data.html", context, content_type="text/plain")
class RedisActiveConnection(RedisGrafanaMetric): class RedisActiveConnection(RedisGrafanaMetric):
def get_fields(self): def get_fields(self):
return ( return (
('connected_clients', dict( (
label = "connections", "connected_clients",
info = "connections", dict(
type = "GAUGE", label="connections",
)), info="connections",
type="GAUGE",
),
),
) )
def get_context(self): def get_context(self):
raise NotImplementedError('You must implement the get_context function') raise NotImplementedError("You must implement the get_context function")

View file

@ -17,23 +17,23 @@ from utils import log as logging
from utils.story_functions import linkify from utils.story_functions import linkify
from utils.scrubber import Scrubber from utils.scrubber import Scrubber
class EmailNewsletter: class EmailNewsletter:
def receive_newsletter(self, params): def receive_newsletter(self, params):
user = self._user_from_email(params['recipient']) user = self._user_from_email(params["recipient"])
if not user: if not user:
return return
sender_name, sender_username, sender_domain = self._split_sender(params['from']) sender_name, sender_username, sender_domain = self._split_sender(params["from"])
feed_address = self._feed_address(user, "%s@%s" % (sender_username, sender_domain)) feed_address = self._feed_address(user, "%s@%s" % (sender_username, sender_domain))
try: try:
usf = UserSubscriptionFolders.objects.get(user=user) usf = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist: except UserSubscriptionFolders.DoesNotExist:
logging.user(user, "~FRUser does not have a USF, ignoring newsletter.") logging.user(user, "~FRUser does not have a USF, ignoring newsletter.")
return return
usf.add_folder('', 'Newsletters') usf.add_folder("", "Newsletters")
# First look for the email address # First look for the email address
try: try:
feed = Feed.objects.get(feed_address=feed_address) feed = Feed.objects.get(feed_address=feed_address)
@ -46,45 +46,47 @@ class EmailNewsletter:
# If not found, check among titles user has subscribed to # If not found, check among titles user has subscribed to
if not feed: if not feed:
newsletter_subs = UserSubscription.objects.filter(user=user, feed__feed_address__contains="newsletter:").only('feed') newsletter_subs = UserSubscription.objects.filter(
user=user, feed__feed_address__contains="newsletter:"
).only("feed")
newsletter_feed_ids = [us.feed.pk for us in newsletter_subs] newsletter_feed_ids = [us.feed.pk for us in newsletter_subs]
feeds = Feed.objects.filter(feed_title__iexact=sender_name, pk__in=newsletter_feed_ids) feeds = Feed.objects.filter(feed_title__iexact=sender_name, pk__in=newsletter_feed_ids)
if feeds.count(): if feeds.count():
feed = feeds[0] feed = feeds[0]
# Create a new feed if it doesn't exist by sender name or email # Create a new feed if it doesn't exist by sender name or email
if not feed: if not feed:
feed = Feed.objects.create(feed_address=feed_address, feed = Feed.objects.create(
feed_link='http://' + sender_domain, feed_address=feed_address,
feed_title=sender_name, feed_link="http://" + sender_domain,
fetched_once=True, feed_title=sender_name,
known_good=True) fetched_once=True,
known_good=True,
)
feed.update() feed.update()
logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed))
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'reload:%s' % feed.pk) r.publish(user.username, "reload:%s" % feed.pk)
self._check_if_first_newsletter(user) self._check_if_first_newsletter(user)
feed.last_update = datetime.datetime.now() feed.last_update = datetime.datetime.now()
feed.last_story_date = datetime.datetime.now() feed.last_story_date = datetime.datetime.now()
feed.save() feed.save()
if feed.feed_title != sender_name: if feed.feed_title != sender_name:
feed.feed_title = sender_name feed.feed_title = sender_name
feed.save() feed.save()
try: try:
usersub = UserSubscription.objects.get(user=user, feed=feed) usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist: except UserSubscription.DoesNotExist:
_, _, usersub = UserSubscription.add_subscription( _, _, usersub = UserSubscription.add_subscription(
user=user, user=user, feed_address=feed_address, folder="Newsletters"
feed_address=feed_address,
folder='Newsletters'
) )
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'reload:feeds') r.publish(user.username, "reload:feeds")
story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_hash = MStory.ensure_story_hash(params["signature"], feed.pk)
story_content = self._get_content(params) story_content = self._get_content(params)
plain_story_content = self._get_content(params, force_plain=True) plain_story_content = self._get_content(params, force_plain=True)
if len(plain_story_content) > len(story_content): if len(plain_story_content) > len(story_content):
@ -92,15 +94,16 @@ class EmailNewsletter:
story_content = self._clean_content(story_content) story_content = self._clean_content(story_content)
story_params = { story_params = {
"story_feed_id": feed.pk, "story_feed_id": feed.pk,
"story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_date": datetime.datetime.fromtimestamp(int(params["timestamp"])),
"story_title": params['subject'], "story_title": params["subject"],
"story_content": story_content, "story_content": story_content,
"story_author_name": params['from'], "story_author_name": params["from"],
"story_permalink": "https://%s%s" % ( "story_permalink": "https://%s%s"
Site.objects.get_current().domain, % (
reverse('newsletter-story', Site.objects.get_current().domain,
kwargs={'story_hash': story_hash})), reverse("newsletter-story", kwargs={"story_hash": story_hash}),
"story_guid": params['signature'], ),
"story_guid": params["signature"],
} }
try: try:
@ -108,17 +111,17 @@ class EmailNewsletter:
except MStory.DoesNotExist: except MStory.DoesNotExist:
story = MStory(**story_params) story = MStory(**story_params)
story.save() story.save()
usersub.needs_unread_recalc = True usersub.needs_unread_recalc = True
usersub.save() usersub.save()
self._publish_to_subscribers(feed, story.story_hash) self._publish_to_subscribers(feed, story.story_hash)
MFetchHistory.add(feed_id=feed.pk, fetch_type='push') MFetchHistory.add(feed_id=feed.pk, fetch_type="push")
logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed))
return story return story
def _check_if_first_newsletter(self, user, force=False): def _check_if_first_newsletter(self, user, force=False):
if not user.email: if not user.email:
return return
@ -129,10 +132,10 @@ class EmailNewsletter:
if sub.feed.is_newsletter: if sub.feed.is_newsletter:
found_newsletter = True found_newsletter = True
break break
if not found_newsletter and not force: if not found_newsletter and not force:
return return
params = dict(receiver_user_id=user.pk, email_type='first_newsletter') params = dict(receiver_user_id=user.pk, email_type="first_newsletter")
try: try:
MSentEmail.objects.get(**params) MSentEmail.objects.get(**params)
if not force: if not force:
@ -140,23 +143,26 @@ class EmailNewsletter:
return return
except MSentEmail.DoesNotExist: except MSentEmail.DoesNotExist:
MSentEmail.objects.create(**params) MSentEmail.objects.create(**params)
text = render_to_string('mail/email_first_newsletter.txt', {}) text = render_to_string("mail/email_first_newsletter.txt", {})
html = render_to_string('mail/email_first_newsletter.xhtml', {}) html = render_to_string("mail/email_first_newsletter.xhtml", {})
subject = "Your email newsletters are now being sent to NewsBlur" subject = "Your email newsletters are now being sent to NewsBlur"
msg = EmailMultiAlternatives(subject, text, msg = EmailMultiAlternatives(
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, subject,
to=['%s <%s>' % (user, user.email)]) text,
from_email="NewsBlur <%s>" % settings.HELLO_EMAIL,
to=["%s <%s>" % (user, user.email)],
)
msg.attach_alternative(html, "text/html") msg.attach_alternative(html, "text/html")
msg.send() msg.send()
logging.user(user, "~BB~FM~SBSending first newsletter email to: %s" % user.email) logging.user(user, "~BB~FM~SBSending first newsletter email to: %s" % user.email)
def _user_from_email(self, email): def _user_from_email(self, email):
tokens = re.search('(\w+)[\+\-\.](\w+)@newsletters.newsblur.com', email) tokens = re.search("(\w+)[\+\-\.](\w+)@newsletters.newsblur.com", email)
if not tokens: if not tokens:
return return
username, secret_token = tokens.groups() username, secret_token = tokens.groups()
try: try:
profiles = Profile.objects.filter(secret_token=secret_token) profiles = Profile.objects.filter(secret_token=secret_token)
@ -165,55 +171,56 @@ class EmailNewsletter:
profile = profiles[0] profile = profiles[0]
except Profile.DoesNotExist: except Profile.DoesNotExist:
return return
return profile.user return profile.user
def _feed_address(self, user, sender_email): def _feed_address(self, user, sender_email):
return 'newsletter:%s:%s' % (user.pk, sender_email) return "newsletter:%s:%s" % (user.pk, sender_email)
def _split_sender(self, sender): def _split_sender(self, sender):
tokens = re.search('(.*?) <(.*?)@(.*?)>', sender) tokens = re.search("(.*?) <(.*?)@(.*?)>", sender)
if not tokens: if not tokens:
name, domain = sender.split('@') name, domain = sender.split("@")
return name, sender, domain return name, sender, domain
sender_name, sender_username, sender_domain = tokens.group(1), tokens.group(2), tokens.group(3) sender_name, sender_username, sender_domain = tokens.group(1), tokens.group(2), tokens.group(3)
sender_name = sender_name.replace('"', '') sender_name = sender_name.replace('"', "")
return sender_name, sender_username, sender_domain return sender_name, sender_username, sender_domain
def _get_content(self, params, force_plain=False): def _get_content(self, params, force_plain=False):
if 'body-enriched' in params and not force_plain: if "body-enriched" in params and not force_plain:
return params['body-enriched'] return params["body-enriched"]
if 'body-html' in params and not force_plain: if "body-html" in params and not force_plain:
return params['body-html'] return params["body-html"]
if 'stripped-html' in params and not force_plain: if "stripped-html" in params and not force_plain:
return params['stripped-html'] return params["stripped-html"]
if 'body-plain' in params: if "body-plain" in params:
return linkify(linebreaks(params['body-plain'])) return linkify(linebreaks(params["body-plain"]))
if force_plain: if force_plain:
return self._get_content(params, force_plain=False) return self._get_content(params, force_plain=False)
def _clean_content(self, content): def _clean_content(self, content):
original = content original = content
scrubber = Scrubber() scrubber = Scrubber()
content = scrubber.scrub(content) content = scrubber.scrub(content)
if len(content) < len(original)*0.01: if len(content) < len(original) * 0.01:
content = original content = original
content = content.replace('!important', '') content = content.replace("!important", "")
return content return content
def _publish_to_subscribers(self, feed, story_hash): def _publish_to_subscribers(self, feed, story_hash):
try: try:
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
listeners_count = r.publish("%s:story" % feed.pk, 'story:new:%s' % story_hash) listeners_count = r.publish("%s:story" % feed.pk, "story:new:%s" % story_hash)
if listeners_count: if listeners_count:
logging.debug(" ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count)) logging.debug(
" ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count)
)
except redis.ConnectionError: except redis.ConnectionError:
logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.log_title[:30],)) logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.log_title[:30],))
if MUserFeedNotification.feed_has_users(feed.pk) > 0: if MUserFeedNotification.feed_has_users(feed.pk) > 0:
QueueNotifications.delay(feed.pk, 1) QueueNotifications.delay(feed.pk, 1)

View file

@ -2,6 +2,6 @@ from django.conf.urls import url
from apps.newsletters import views from apps.newsletters import views
urlpatterns = [ urlpatterns = [
url(r'^receive/?$', views.newsletter_receive, name='newsletter-receive'), url(r"^receive/?$", views.newsletter_receive, name="newsletter-receive"),
url(r'^story/(?P<story_hash>[\w:]+)/?$', views.newsletter_story, name='newsletter-story'), url(r"^story/(?P<story_hash>[\w:]+)/?$", views.newsletter_story, name="newsletter-story"),
] ]

View file

@ -5,9 +5,10 @@ from utils import log as logging
from apps.newsletters.models import EmailNewsletter from apps.newsletters.models import EmailNewsletter
from apps.rss_feeds.models import Feed, MStory from apps.rss_feeds.models import Feed, MStory
def newsletter_receive(request): def newsletter_receive(request):
""" """
This function is called by mailgun's receive email feature. This is a This function is called by mailgun's receive email feature. This is a
private API used for the newsletter app. private API used for the newsletter app.
""" """
# params = { # params = {
@ -42,24 +43,25 @@ def newsletter_receive(request):
# 'Subject':'Test Newsletter theskimm' # 'Subject':'Test Newsletter theskimm'
# } # }
params = request.POST params = request.POST
response = HttpResponse('OK') response = HttpResponse("OK")
if settings.DEBUG or 'samuel' in params.get('To', ''): if settings.DEBUG or "samuel" in params.get("To", ""):
logging.debug(" ---> Email newsletter: %s" % params) logging.debug(" ---> Email newsletter: %s" % params)
if not params or not len(params.keys()): if not params or not len(params.keys()):
logging.debug(" ***> Email newsletter blank body: %s" % request.body) logging.debug(" ***> Email newsletter blank body: %s" % request.body)
raise Http404 raise Http404
email_newsletter = EmailNewsletter() email_newsletter = EmailNewsletter()
story = email_newsletter.receive_newsletter(params) story = email_newsletter.receive_newsletter(params)
if not story: if not story:
raise Http404 raise Http404
return response return response
def newsletter_story(request, story_hash): def newsletter_story(request, story_hash):
try: try:
story = MStory.objects.get(story_hash=story_hash) story = MStory.objects.get(story_hash=story_hash)
@ -67,4 +69,4 @@ def newsletter_story(request, story_hash):
raise Http404 raise Http404
story = Feed.format_story(story) story = Feed.format_story(story)
return HttpResponse(story['story_content']) return HttpResponse(story["story_content"])

View file

@ -40,21 +40,21 @@ class NotificationFrequency(enum.Enum):
class MUserNotificationTokens(mongo.Document): class MUserNotificationTokens(mongo.Document):
'''A user's push notification tokens''' """A user's push notification tokens"""
user_id = mongo.IntField() user_id = mongo.IntField()
ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) ios_tokens = mongo.ListField(mongo.StringField(max_length=1024))
use_sandbox = mongo.BooleanField(default=False) use_sandbox = mongo.BooleanField(default=False)
meta = { meta = {
'collection': 'notification_tokens', "collection": "notification_tokens",
'indexes': [ "indexes": [
{ {
'fields': ['user_id'], "fields": ["user_id"],
'unique': True, "unique": True,
} }
], ],
'allow_inheritance': False, "allow_inheritance": False,
} }
@classmethod @classmethod
@ -68,7 +68,7 @@ class MUserNotificationTokens(mongo.Document):
class MUserFeedNotification(mongo.Document): class MUserFeedNotification(mongo.Document):
'''A user's notifications of a single feed.''' """A user's notifications of a single feed."""
user_id = mongo.IntField() user_id = mongo.IntField()
feed_id = mongo.IntField() feed_id = mongo.IntField()
@ -82,32 +82,32 @@ class MUserFeedNotification(mongo.Document):
ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) ios_tokens = mongo.ListField(mongo.StringField(max_length=1024))
meta = { meta = {
'collection': 'notifications', "collection": "notifications",
'indexes': [ "indexes": [
'feed_id', "feed_id",
{ {
'fields': ['user_id', 'feed_id'], "fields": ["user_id", "feed_id"],
'unique': True, "unique": True,
}, },
], ],
'allow_inheritance': False, "allow_inheritance": False,
} }
def __str__(self): def __str__(self):
notification_types = [] notification_types = []
if self.is_email: if self.is_email:
notification_types.append('email') notification_types.append("email")
if self.is_web: if self.is_web:
notification_types.append('web') notification_types.append("web")
if self.is_ios: if self.is_ios:
notification_types.append('ios') notification_types.append("ios")
if self.is_android: if self.is_android:
notification_types.append('android') notification_types.append("android")
return "%s/%s: %s -> %s" % ( return "%s/%s: %s -> %s" % (
User.objects.get(pk=self.user_id).username, User.objects.get(pk=self.user_id).username,
Feed.get_by_id(self.feed_id), Feed.get_by_id(self.feed_id),
','.join(notification_types), ",".join(notification_types),
self.last_notification_date, self.last_notification_date,
) )
@ -128,17 +128,17 @@ class MUserFeedNotification(mongo.Document):
for feed in notifications: for feed in notifications:
notifications_by_feed[feed.feed_id] = { notifications_by_feed[feed.feed_id] = {
'notification_types': [], "notification_types": [],
'notification_filter': "focus" if feed.is_focus else "unread", "notification_filter": "focus" if feed.is_focus else "unread",
} }
if feed.is_email: if feed.is_email:
notifications_by_feed[feed.feed_id]['notification_types'].append('email') notifications_by_feed[feed.feed_id]["notification_types"].append("email")
if feed.is_web: if feed.is_web:
notifications_by_feed[feed.feed_id]['notification_types'].append('web') notifications_by_feed[feed.feed_id]["notification_types"].append("web")
if feed.is_ios: if feed.is_ios:
notifications_by_feed[feed.feed_id]['notification_types'].append('ios') notifications_by_feed[feed.feed_id]["notification_types"].append("ios")
if feed.is_android: if feed.is_android:
notifications_by_feed[feed.feed_id]['notification_types'].append('android') notifications_by_feed[feed.feed_id]["notification_types"].append("android")
return notifications_by_feed return notifications_by_feed
@ -153,7 +153,7 @@ class MUserFeedNotification(mongo.Document):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
latest_story_hashes = r.zrange("zF:%s" % feed.pk, -1 * new_stories, -1) latest_story_hashes = r.zrange("zF:%s" % feed.pk, -1 * new_stories, -1)
mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by('-story_date') mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by("-story_date")
stories = Feed.format_stories(mstories) stories = Feed.format_stories(mstories)
total_sent_count = 0 total_sent_count = 0
@ -186,19 +186,19 @@ class MUserFeedNotification(mongo.Document):
if settings.DEBUG: if settings.DEBUG:
logging.debug("Sent too many, ignoring...") logging.debug("Sent too many, ignoring...")
continue continue
if story['story_date'] <= last_notification_date and not force: if story["story_date"] <= last_notification_date and not force:
if settings.DEBUG: if settings.DEBUG:
logging.debug( logging.debug(
"Story date older than last notification date: %s <= %s" "Story date older than last notification date: %s <= %s"
% (story['story_date'], last_notification_date) % (story["story_date"], last_notification_date)
) )
continue continue
if story['story_date'] > user_feed_notification.last_notification_date: if story["story_date"] > user_feed_notification.last_notification_date:
user_feed_notification.last_notification_date = story['story_date'] user_feed_notification.last_notification_date = story["story_date"]
user_feed_notification.save() user_feed_notification.save()
story['story_content'] = html.unescape(story['story_content']) story["story_content"] = html.unescape(story["story_content"])
sent = user_feed_notification.push_story_notification(story, classifiers, usersub) sent = user_feed_notification.push_story_notification(story, classifiers, usersub)
if sent: if sent:
@ -209,49 +209,40 @@ class MUserFeedNotification(mongo.Document):
def classifiers(self, usersub): def classifiers(self, usersub):
classifiers = {} classifiers = {}
if usersub.is_trained: if usersub.is_trained:
classifiers['feeds'] = list( classifiers["feeds"] = list(
MClassifierFeed.objects( MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0)
user_id=self.user_id, feed_id=self.feed_id, social_user_id=0
)
) )
classifiers['authors'] = list( classifiers["authors"] = list(
MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id) MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id)
) )
classifiers['titles'] = list( classifiers["titles"] = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id))
MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id) classifiers["tags"] = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id))
)
classifiers['tags'] = list(
MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)
)
return classifiers return classifiers
def title_and_body(self, story, usersub, notification_title_only=False): def title_and_body(self, story, usersub, notification_title_only=False):
def replace_with_newlines(element): def replace_with_newlines(element):
text = '' text = ""
for elem in element.recursiveChildGenerator(): for elem in element.recursiveChildGenerator():
if isinstance(elem, (str,)): if isinstance(elem, (str,)):
text += elem text += elem
elif elem.name == 'br': elif elem.name == "br":
text += '\n' text += "\n"
elif elem.name == 'p': elif elem.name == "p":
text += '\n\n' text += "\n\n"
text = re.sub(r' +', ' ', text).strip() text = re.sub(r" +", " ", text).strip()
return text return text
feed_title = usersub.user_title or usersub.feed.feed_title feed_title = usersub.user_title or usersub.feed.feed_title
# title = "%s: %s" % (feed_title, story['story_title']) # title = "%s: %s" % (feed_title, story['story_title'])
title = feed_title title = feed_title
soup = BeautifulSoup(story['story_content'].strip(), features="lxml") soup = BeautifulSoup(story["story_content"].strip(), features="lxml")
# if notification_title_only: # if notification_title_only:
subtitle = None subtitle = None
body_title = html.unescape(story['story_title']).strip() body_title = html.unescape(story["story_title"]).strip()
body_content = replace_with_newlines(soup) body_content = replace_with_newlines(soup)
if body_content: if body_content:
if ( if body_title == body_content[: len(body_title)] or body_content[:100] == body_title[:100]:
body_title == body_content[: len(body_title)]
or body_content[:100] == body_title[:100]
):
body_content = "" body_content = ""
else: else:
body_content = f"\n{body_content}" body_content = f"\n{body_content}"
@ -283,7 +274,7 @@ class MUserFeedNotification(mongo.Document):
logging.user( logging.user(
user, user,
"~FCSending push notification: %s/%s (score: %s)" "~FCSending push notification: %s/%s (score: %s)"
% (story['story_title'][:40], story['story_hash'], story_score), % (story["story_title"][:40], story["story_hash"], story_score),
) )
self.send_web(story, user) self.send_web(story, user)
@ -298,7 +289,7 @@ class MUserFeedNotification(mongo.Document):
return return
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'notification:%s,%s' % (story['story_hash'], story['story_title'])) r.publish(user.username, "notification:%s,%s" % (story["story_hash"], story["story_title"]))
def send_ios(self, story, user, usersub): def send_ios(self, story, user, usersub):
if not self.is_ios: if not self.is_ios:
@ -319,45 +310,42 @@ class MUserFeedNotification(mongo.Document):
# 7. cat aps.pem aps_key.noenc.pem > aps.p12.pem # 7. cat aps.pem aps_key.noenc.pem > aps.p12.pem
# 8. Verify: openssl s_client -connect gateway.push.apple.com:2195 -cert aps.p12.pem # 8. Verify: openssl s_client -connect gateway.push.apple.com:2195 -cert aps.p12.pem
# 9. Deploy: aps -l work -t apns,repo,celery # 9. Deploy: aps -l work -t apns,repo,celery
apns = APNsClient( apns = APNsClient("/srv/newsblur/config/certificates/aps.p12.pem", use_sandbox=tokens.use_sandbox)
'/srv/newsblur/config/certificates/aps.p12.pem', use_sandbox=tokens.use_sandbox
)
notification_title_only = is_true(user.profile.preference_value('notification_title_only')) notification_title_only = is_true(user.profile.preference_value("notification_title_only"))
title, subtitle, body = self.title_and_body(story, usersub, notification_title_only) title, subtitle, body = self.title_and_body(story, usersub, notification_title_only)
image_url = None image_url = None
if len(story['image_urls']): if len(story["image_urls"]):
image_url = story['image_urls'][0] image_url = story["image_urls"][0]
# print image_url # print image_url
confirmed_ios_tokens = [] confirmed_ios_tokens = []
for token in tokens.ios_tokens: for token in tokens.ios_tokens:
logging.user( logging.user(
user, user,
'~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s' "~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s"
% (story['story_title'][:50], usersub.feed.feed_title[:50]), % (story["story_title"][:50], usersub.feed.feed_title[:50]),
) )
payload = Payload( payload = Payload(
alert={'title': title, 'subtitle': subtitle, 'body': body}, alert={"title": title, "subtitle": subtitle, "body": body},
category="STORY_CATEGORY", category="STORY_CATEGORY",
mutable_content=True, mutable_content=True,
custom={ custom={
'story_hash': story['story_hash'], "story_hash": story["story_hash"],
'story_feed_id': story['story_feed_id'], "story_feed_id": story["story_feed_id"],
'image_url': image_url, "image_url": image_url,
}, },
) )
try: try:
apns.send_notification(token, payload, topic="com.newsblur.NewsBlur") apns.send_notification(token, payload, topic="com.newsblur.NewsBlur")
except (BadDeviceToken, Unregistered, DeviceTokenNotForTopic): except (BadDeviceToken, Unregistered, DeviceTokenNotForTopic):
logging.user(user, '~BMiOS token expired: ~FR~SB%s' % (token[:50])) logging.user(user, "~BMiOS token expired: ~FR~SB%s" % (token[:50]))
else: else:
confirmed_ios_tokens.append(token) confirmed_ios_tokens.append(token)
if settings.DEBUG: if settings.DEBUG:
logging.user( logging.user(
user, user,
'~BMiOS token good: ~FB~SB%s / %s' "~BMiOS token good: ~FB~SB%s / %s" % (token[:50], len(confirmed_ios_tokens)),
% (token[:50], len(confirmed_ios_tokens)),
) )
if len(confirmed_ios_tokens) < len(tokens.ios_tokens): if len(confirmed_ios_tokens) < len(tokens.ios_tokens):
@ -379,11 +367,14 @@ class MUserFeedNotification(mongo.Document):
r.expire(emails_sent_date_key, 60 * 60 * 24) # Keep for a day r.expire(emails_sent_date_key, 60 * 60 * 24) # Keep for a day
count = int(r.hget(emails_sent_date_key, usersub.user_id) or 0) count = int(r.hget(emails_sent_date_key, usersub.user_id) or 0)
if count > settings.MAX_EMAILS_SENT_PER_DAY_PER_USER: if count > settings.MAX_EMAILS_SENT_PER_DAY_PER_USER:
logging.user(usersub.user, "~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count)) logging.user(
usersub.user,
"~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count),
)
return return
feed = usersub.feed feed = usersub.feed
story_content = self.sanitize_story(story['story_content']) story_content = self.sanitize_story(story["story_content"])
params = { params = {
"story": story, "story": story,
@ -392,14 +383,14 @@ class MUserFeedNotification(mongo.Document):
"feed_title": usersub.user_title or feed.feed_title, "feed_title": usersub.user_title or feed.feed_title,
"favicon_border": feed.favicon_color, "favicon_border": feed.favicon_color,
} }
from_address = 'notifications@newsblur.com' from_address = "notifications@newsblur.com"
to_address = '%s <%s>' % (usersub.user.username, usersub.user.email) to_address = "%s <%s>" % (usersub.user.username, usersub.user.email)
text = render_to_string('mail/email_story_notification.txt', params) text = render_to_string("mail/email_story_notification.txt", params)
html = render_to_string('mail/email_story_notification.xhtml', params) html = render_to_string("mail/email_story_notification.xhtml", params)
subject = '%s: %s' % (usersub.user_title or usersub.feed.feed_title, story['story_title']) subject = "%s: %s" % (usersub.user_title or usersub.feed.feed_title, story["story_title"])
subject = subject.replace('\n', ' ') subject = subject.replace("\n", " ")
msg = EmailMultiAlternatives( msg = EmailMultiAlternatives(
subject, text, from_email='NewsBlur <%s>' % from_address, to=[to_address] subject, text, from_email="NewsBlur <%s>" % from_address, to=[to_address]
) )
msg.attach_alternative(html, "text/html") msg.attach_alternative(html, "text/html")
# try: # try:
@ -409,8 +400,8 @@ class MUserFeedNotification(mongo.Document):
# return # return
logging.user( logging.user(
usersub.user, usersub.user,
'~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s' "~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s"
% (story['story_title'][:50], usersub.feed.feed_title[:50]), % (story["story_title"][:50], usersub.feed.feed_title[:50]),
) )
def sanitize_story(self, story_content): def sanitize_story(self, story_content):
@ -419,15 +410,15 @@ class MUserFeedNotification(mongo.Document):
# Convert videos in newsletters to images # Convert videos in newsletters to images
for iframe in soup("iframe"): for iframe in soup("iframe"):
url = dict(iframe.attrs).get('src', "") url = dict(iframe.attrs).get("src", "")
youtube_id = self.extract_youtube_id(url) youtube_id = self.extract_youtube_id(url)
if youtube_id: if youtube_id:
a = soup.new_tag('a', href=url) a = soup.new_tag("a", href=url)
img = soup.new_tag( img = soup.new_tag(
'img', "img",
style="display: block; 'background-image': \"url(https://%s/img/reader/youtube_play.png), url(http://img.youtube.com/vi/%s/0.jpg)\"" style="display: block; 'background-image': \"url(https://%s/img/reader/youtube_play.png), url(http://img.youtube.com/vi/%s/0.jpg)\""
% (fqdn, youtube_id), % (fqdn, youtube_id),
src='http://img.youtube.com/vi/%s/0.jpg' % youtube_id, src="http://img.youtube.com/vi/%s/0.jpg" % youtube_id,
) )
a.insert(0, img) a.insert(0, img)
iframe.replaceWith(a) iframe.replaceWith(a)
@ -439,20 +430,20 @@ class MUserFeedNotification(mongo.Document):
def extract_youtube_id(self, url): def extract_youtube_id(self, url):
youtube_id = None youtube_id = None
if 'youtube.com' in url: if "youtube.com" in url:
youtube_parts = urllib.parse.urlparse(url) youtube_parts = urllib.parse.urlparse(url)
if '/embed/' in youtube_parts.path: if "/embed/" in youtube_parts.path:
youtube_id = youtube_parts.path.replace('/embed/', '') youtube_id = youtube_parts.path.replace("/embed/", "")
return youtube_id return youtube_id
def story_score(self, story, classifiers): def story_score(self, story, classifiers):
score = compute_story_score( score = compute_story_score(
story, story,
classifier_titles=classifiers.get('titles', []), classifier_titles=classifiers.get("titles", []),
classifier_authors=classifiers.get('authors', []), classifier_authors=classifiers.get("authors", []),
classifier_tags=classifiers.get('tags', []), classifier_tags=classifiers.get("tags", []),
classifier_feeds=classifiers.get('feeds', []), classifier_feeds=classifiers.get("feeds", []),
) )
return score return score

View file

@ -3,9 +3,9 @@ from apps.notifications import views
from oauth2_provider import views as op_views from oauth2_provider import views as op_views
urlpatterns = [ urlpatterns = [
url(r'^$', views.notifications_by_feed, name='notifications-by-feed'), url(r"^$", views.notifications_by_feed, name="notifications-by-feed"),
url(r'^feed/?$', views.set_notifications_for_feed, name='set-notifications-for-feed'), url(r"^feed/?$", views.set_notifications_for_feed, name="set-notifications-for-feed"),
url(r'^apns_token/?$', views.set_apns_token, name='set-apns-token'), url(r"^apns_token/?$", views.set_apns_token, name="set-apns-token"),
url(r'^android_token/?$', views.set_android_token, name='set-android-token'), url(r"^android_token/?$", views.set_android_token, name="set-android-token"),
url(r'^force_push/?$', views.force_push, name='force-push-notification'), url(r"^force_push/?$", views.force_push, name="force-push-notification"),
] ]

View file

@ -17,82 +17,90 @@ def notifications_by_feed(request):
return notifications_by_feed return notifications_by_feed
@ajax_login_required @ajax_login_required
@json.json_view @json.json_view
def set_notifications_for_feed(request): def set_notifications_for_feed(request):
user = get_user(request) user = get_user(request)
feed_id = request.POST['feed_id'] feed_id = request.POST["feed_id"]
notification_types = request.POST.getlist('notification_types') or request.POST.getlist('notification_types[]') notification_types = request.POST.getlist("notification_types") or request.POST.getlist(
notification_filter = request.POST.get('notification_filter') "notification_types[]"
)
notification_filter = request.POST.get("notification_filter")
try: try:
notification = MUserFeedNotification.objects.get(user_id=user.pk, feed_id=feed_id) notification = MUserFeedNotification.objects.get(user_id=user.pk, feed_id=feed_id)
except MUserFeedNotification.DoesNotExist: except MUserFeedNotification.DoesNotExist:
params = { params = {
"user_id": user.pk, "user_id": user.pk,
"feed_id": feed_id, "feed_id": feed_id,
} }
notification = MUserFeedNotification.objects.create(**params) notification = MUserFeedNotification.objects.create(**params)
web_was_off = not notification.is_web web_was_off = not notification.is_web
notification.is_focus = bool(notification_filter == "focus") notification.is_focus = bool(notification_filter == "focus")
notification.is_email = bool('email' in notification_types) notification.is_email = bool("email" in notification_types)
notification.is_ios = bool('ios' in notification_types) notification.is_ios = bool("ios" in notification_types)
notification.is_android = bool('android' in notification_types) notification.is_android = bool("android" in notification_types)
notification.is_web = bool('web' in notification_types) notification.is_web = bool("web" in notification_types)
notification.save() notification.save()
if (not notification.is_email and if (
not notification.is_ios and not notification.is_email
not notification.is_android and and not notification.is_ios
not notification.is_web): and not notification.is_android
and not notification.is_web
):
notification.delete() notification.delete()
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
if web_was_off and notification.is_web: if web_was_off and notification.is_web:
r.publish(user.username, 'notification:setup:%s' % feed_id) r.publish(user.username, "notification:setup:%s" % feed_id)
notifications_by_feed = MUserFeedNotification.feeds_for_user(user.pk) notifications_by_feed = MUserFeedNotification.feeds_for_user(user.pk)
return {"notifications_by_feed": notifications_by_feed} return {"notifications_by_feed": notifications_by_feed}
@ajax_login_required @ajax_login_required
@json.json_view @json.json_view
def set_apns_token(request): def set_apns_token(request):
""" """
Apple Push Notification Service, token is sent by the iOS app. Used to send Apple Push Notification Service, token is sent by the iOS app. Used to send
push notifications to iOS. push notifications to iOS.
""" """
user = get_user(request) user = get_user(request)
tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk)
apns_token = request.POST['apns_token'] apns_token = request.POST["apns_token"]
logging.user(user, "~FCUpdating APNS push token") logging.user(user, "~FCUpdating APNS push token")
if apns_token not in tokens.ios_tokens: if apns_token not in tokens.ios_tokens:
tokens.ios_tokens.append(apns_token) tokens.ios_tokens.append(apns_token)
tokens.save() tokens.save()
return {'message': 'Token saved.'} return {"message": "Token saved."}
return {'message': 'Token already saved.'} return {"message": "Token already saved."}
@ajax_login_required @ajax_login_required
@json.json_view @json.json_view
def set_android_token(request): def set_android_token(request):
""" """
Android's push notification tokens. Not sure why I can't find this function in Android's push notification tokens. Not sure why I can't find this function in
the Android code. the Android code.
""" """
user = get_user(request) user = get_user(request)
tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk)
token = request.POST['token'] token = request.POST["token"]
logging.user(user, "~FCUpdating Android push token") logging.user(user, "~FCUpdating Android push token")
if token not in tokens.android_tokens: if token not in tokens.android_tokens:
tokens.android_tokens.append(token) tokens.android_tokens.append(token)
tokens.save() tokens.save()
return {'message': 'Token saved.'} return {"message": "Token saved."}
return {'message': 'Token already saved.'} return {"message": "Token already saved."}
@required_params(feed_id=int) @required_params(feed_id=int)
@staff_member_required @staff_member_required
@ -102,10 +110,12 @@ def force_push(request):
Intended to force a push notification for a feed for testing. Handier than the console. Intended to force a push notification for a feed for testing. Handier than the console.
""" """
user = get_user(request) user = get_user(request)
feed_id = request.GET['feed_id'] feed_id = request.GET["feed_id"]
count = int(request.GET.get('count', 1)) count = int(request.GET.get("count", 1))
logging.user(user, "~BM~FWForce pushing %s stories: ~SB%s" % (count, Feed.get_by_id(feed_id))) logging.user(user, "~BM~FWForce pushing %s stories: ~SB%s" % (count, Feed.get_by_id(feed_id)))
sent_count, user_count = MUserFeedNotification.push_feed_notifications(feed_id, new_stories=count, force=True) sent_count, user_count = MUserFeedNotification.push_feed_notifications(
feed_id, new_stories=count, force=True
return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)} )
return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)}

View file

@ -1 +1 @@
# No models for OAuth. Use MSocialServices model in social. # No models for OAuth. Use MSocialServices model in social.

View file

@ -3,33 +3,43 @@ from apps.oauth import views
from oauth2_provider import views as op_views from oauth2_provider import views as op_views
urlpatterns = [ urlpatterns = [
url(r'^twitter_connect/?$', views.twitter_connect, name='twitter-connect'), url(r"^twitter_connect/?$", views.twitter_connect, name="twitter-connect"),
url(r'^facebook_connect/?$', views.facebook_connect, name='facebook-connect'), url(r"^facebook_connect/?$", views.facebook_connect, name="facebook-connect"),
url(r'^twitter_disconnect/?$', views.twitter_disconnect, name='twitter-disconnect'), url(r"^twitter_disconnect/?$", views.twitter_disconnect, name="twitter-disconnect"),
url(r'^facebook_disconnect/?$', views.facebook_disconnect, name='facebook-disconnect'), url(r"^facebook_disconnect/?$", views.facebook_disconnect, name="facebook-disconnect"),
url(r'^follow_twitter_account/?$', views.follow_twitter_account, name='social-follow-twitter'), url(r"^follow_twitter_account/?$", views.follow_twitter_account, name="social-follow-twitter"),
url(r'^unfollow_twitter_account/?$', views.unfollow_twitter_account, name='social-unfollow-twitter'), url(r"^unfollow_twitter_account/?$", views.unfollow_twitter_account, name="social-unfollow-twitter"),
# Django OAuth Toolkit # Django OAuth Toolkit
url(r'^status/?$', views.ifttt_status, name="ifttt-status"), url(r"^status/?$", views.ifttt_status, name="ifttt-status"),
url(r'^authorize/?$', op_views.AuthorizationView.as_view(), name="oauth-authorize"), url(r"^authorize/?$", op_views.AuthorizationView.as_view(), name="oauth-authorize"),
url(r'^token/?$', op_views.TokenView.as_view(), name="oauth-token"), url(r"^token/?$", op_views.TokenView.as_view(), name="oauth-token"),
url(r'^oauth2/authorize/?$', op_views.AuthorizationView.as_view(), name="ifttt-authorize"), url(r"^oauth2/authorize/?$", op_views.AuthorizationView.as_view(), name="ifttt-authorize"),
url(r'^oauth2/token/?$', op_views.TokenView.as_view(), name="ifttt-token"), url(r"^oauth2/token/?$", op_views.TokenView.as_view(), name="ifttt-token"),
url(r'^user/info/?$', views.api_user_info, name="ifttt-user-info"), url(r"^user/info/?$", views.api_user_info, name="ifttt-user-info"),
url(r'^triggers/(?P<trigger_slug>new-unread-(focus-)?story)/fields/feed_or_folder/options/?$', url(
views.api_feed_list, name="ifttt-trigger-feedlist"), r"^triggers/(?P<trigger_slug>new-unread-(focus-)?story)/fields/feed_or_folder/options/?$",
url(r'^triggers/(?P<trigger_slug>new-unread-(focus-)?story)/?$', views.api_feed_list,
views.api_unread_story, name="ifttt-trigger-unreadstory"), name="ifttt-trigger-feedlist",
url(r'^triggers/new-saved-story/fields/story_tag/options/?$', ),
views.api_saved_tag_list, name="ifttt-trigger-taglist"), url(
url(r'^triggers/new-saved-story/?$', views.api_saved_story, name="ifttt-trigger-saved"), r"^triggers/(?P<trigger_slug>new-unread-(focus-)?story)/?$",
url(r'^triggers/new-shared-story/fields/blurblog_user/options/?$', views.api_unread_story,
views.api_shared_usernames, name="ifttt-trigger-blurbloglist"), name="ifttt-trigger-unreadstory",
url(r'^triggers/new-shared-story/?$', views.api_shared_story, name="ifttt-trigger-shared"), ),
url(r'^actions/share-story/?$', views.api_share_new_story, name="ifttt-action-share"), url(
url(r'^actions/save-story/?$', views.api_save_new_story, name="ifttt-action-saved"), r"^triggers/new-saved-story/fields/story_tag/options/?$",
url(r'^actions/add-site/?$', views.api_save_new_subscription, name="ifttt-action-subscription"), views.api_saved_tag_list,
url(r'^actions/add-site/fields/folder/options/?$', name="ifttt-trigger-taglist",
views.api_folder_list, name="ifttt-action-folderlist"), ),
url(r"^triggers/new-saved-story/?$", views.api_saved_story, name="ifttt-trigger-saved"),
url(
r"^triggers/new-shared-story/fields/blurblog_user/options/?$",
views.api_shared_usernames,
name="ifttt-trigger-blurbloglist",
),
url(r"^triggers/new-shared-story/?$", views.api_shared_story, name="ifttt-trigger-shared"),
url(r"^actions/share-story/?$", views.api_share_new_story, name="ifttt-action-share"),
url(r"^actions/save-story/?$", views.api_save_new_story, name="ifttt-action-saved"),
url(r"^actions/add-site/?$", views.api_save_new_subscription, name="ifttt-action-subscription"),
url(r"^actions/add-site/fields/folder/options/?$", views.api_folder_list, name="ifttt-action-folderlist"),
] ]

File diff suppressed because it is too large Load diff

View file

@ -3,18 +3,19 @@ from factory.django import DjangoModelFactory
from django.contrib.auth.models import User from django.contrib.auth.models import User
from apps.profile.models import Profile from apps.profile.models import Profile
class UserFactory(DjangoModelFactory): class UserFactory(DjangoModelFactory):
first_name = factory.Faker('first_name') first_name = factory.Faker("first_name")
last_name = factory.Faker('last_name') last_name = factory.Faker("last_name")
username = factory.Faker('email') username = factory.Faker("email")
date_joined = factory.Faker('date_time') date_joined = factory.Faker("date_time")
class Meta: class Meta:
model = User model = User
class ProfileFactory(DjangoModelFactory): class ProfileFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory) user = factory.SubFactory(UserFactory)
class Meta: class Meta:
model = Profile model = Profile

View file

@ -14,135 +14,133 @@ PLANS = [
("newsblur-premium-pro", mark_safe("$299 / year <span class='NB-small'>(~$25/month)</span>")), ("newsblur-premium-pro", mark_safe("$299 / year <span class='NB-small'>(~$25/month)</span>")),
] ]
class HorizRadioRenderer(forms.RadioSelect): class HorizRadioRenderer(forms.RadioSelect):
""" this overrides widget method to put radio buttons horizontally """this overrides widget method to put radio buttons horizontally
instead of vertically. instead of vertically.
""" """
def render(self, name, value, attrs=None, renderer=None): def render(self, name, value, attrs=None, renderer=None):
"""Outputs radios""" """Outputs radios"""
choices = '\n'.join(['%s\n' % w for w in self]) choices = "\n".join(["%s\n" % w for w in self])
return mark_safe('<div class="NB-stripe-plan-choice">%s</div>' % choices) return mark_safe('<div class="NB-stripe-plan-choice">%s</div>' % choices)
class StripePlusPaymentForm(StripePaymentForm): class StripePlusPaymentForm(StripePaymentForm):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
email = kwargs.pop('email') email = kwargs.pop("email")
plan = kwargs.pop('plan', '') plan = kwargs.pop("plan", "")
super(StripePlusPaymentForm, self).__init__(*args, **kwargs) super(StripePlusPaymentForm, self).__init__(*args, **kwargs)
self.fields['email'].initial = email self.fields["email"].initial = email
if plan: if plan:
self.fields['plan'].initial = plan self.fields["plan"].initial = plan
email = forms.EmailField(widget=forms.TextInput(attrs=dict(maxlength=75)), email = forms.EmailField(
label='Email address', widget=forms.TextInput(attrs=dict(maxlength=75)), label="Email address", required=False
required=False) )
plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, choices=PLANS, label="Plan")
choices=PLANS, label='Plan')
class DeleteAccountForm(forms.Form): class DeleteAccountForm(forms.Form):
password = forms.CharField(widget=forms.PasswordInput(), password = forms.CharField(widget=forms.PasswordInput(), label="Confirm your password", required=False)
label="Confirm your password", confirm = forms.CharField(label='Type "Delete" to confirm', widget=forms.TextInput(), required=False)
required=False)
confirm = forms.CharField(label="Type \"Delete\" to confirm",
widget=forms.TextInput(),
required=False)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user') self.user = kwargs.pop("user")
super(DeleteAccountForm, self).__init__(*args, **kwargs) super(DeleteAccountForm, self).__init__(*args, **kwargs)
def clean_password(self): def clean_password(self):
user_auth = authenticate(username=self.user.username, user_auth = authenticate(username=self.user.username, password=self.cleaned_data["password"])
password=self.cleaned_data['password'])
if not user_auth: if not user_auth:
user_auth = blank_authenticate(username=self.user.username) user_auth = blank_authenticate(username=self.user.username)
if not user_auth:
raise forms.ValidationError('Your password doesn\'t match.')
return self.cleaned_data['password'] if not user_auth:
raise forms.ValidationError("Your password doesn't match.")
return self.cleaned_data["password"]
def clean_confirm(self): def clean_confirm(self):
if self.cleaned_data.get('confirm', "").lower() != "delete": if self.cleaned_data.get("confirm", "").lower() != "delete":
raise forms.ValidationError('Please type "DELETE" to confirm deletion.') raise forms.ValidationError('Please type "DELETE" to confirm deletion.')
return self.cleaned_data['confirm'] return self.cleaned_data["confirm"]
class ForgotPasswordForm(forms.Form): class ForgotPasswordForm(forms.Form):
email = forms.CharField(widget=forms.TextInput(), email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False)
label="Your email address",
required=False)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ForgotPasswordForm, self).__init__(*args, **kwargs) super(ForgotPasswordForm, self).__init__(*args, **kwargs)
def clean_email(self): def clean_email(self):
if not self.cleaned_data['email']: if not self.cleaned_data["email"]:
raise forms.ValidationError('Please enter in an email address.') raise forms.ValidationError("Please enter in an email address.")
try: try:
User.objects.get(email__iexact=self.cleaned_data['email']) User.objects.get(email__iexact=self.cleaned_data["email"])
except User.MultipleObjectsReturned: except User.MultipleObjectsReturned:
pass pass
except User.DoesNotExist: except User.DoesNotExist:
raise forms.ValidationError('No user has that email address.') raise forms.ValidationError("No user has that email address.")
return self.cleaned_data["email"]
return self.cleaned_data['email']
class ForgotPasswordReturnForm(forms.Form): class ForgotPasswordReturnForm(forms.Form):
password = forms.CharField(widget=forms.PasswordInput(), password = forms.CharField(widget=forms.PasswordInput(), label="Your new password", required=False)
label="Your new password",
required=False)
class AccountSettingsForm(forms.Form): class AccountSettingsForm(forms.Form):
use_required_attribute = False use_required_attribute = False
username = forms.RegexField(regex=r'^\w+$', username = forms.RegexField(
max_length=30, regex=r"^\w+$",
widget=forms.TextInput(attrs={'class': 'NB-input'}), max_length=30,
label='username', widget=forms.TextInput(attrs={"class": "NB-input"}),
required=False, label="username",
error_messages={ required=False,
'invalid': "Your username may only contain letters and numbers." error_messages={"invalid": "Your username may only contain letters and numbers."},
}) )
email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), email = forms.EmailField(
label='email address', widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}),
required=True, label="email address",
error_messages={'required': 'Please enter an email.'}) required=True,
new_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), error_messages={"required": "Please enter an email."},
label='password', )
required=False) new_password = forms.CharField(
# error_messages={'required': 'Please enter a password.'}) widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False
old_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), )
label='password', # error_messages={'required': 'Please enter a password.'})
required=False) old_password = forms.CharField(
custom_js = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False
label='custom_js', )
required=False) custom_js = forms.CharField(
custom_css = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_js", required=False
label='custom_css', )
required=False) custom_css = forms.CharField(
widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_css", required=False
)
def __init__(self, user, *args, **kwargs): def __init__(self, user, *args, **kwargs):
self.user = user self.user = user
super(AccountSettingsForm, self).__init__(*args, **kwargs) super(AccountSettingsForm, self).__init__(*args, **kwargs)
def clean_username(self): def clean_username(self):
username = self.cleaned_data['username'] username = self.cleaned_data["username"]
return username return username
def clean_password(self): def clean_password(self):
if not self.cleaned_data['password']: if not self.cleaned_data["password"]:
return "" return ""
return self.cleaned_data['password'] return self.cleaned_data["password"]
def clean_email(self): def clean_email(self):
return self.cleaned_data['email'] return self.cleaned_data["email"]
def clean(self): def clean(self):
username = self.cleaned_data.get('username', '') username = self.cleaned_data.get("username", "")
new_password = self.cleaned_data.get('new_password', '') new_password = self.cleaned_data.get("new_password", "")
old_password = self.cleaned_data.get('old_password', '') old_password = self.cleaned_data.get("old_password", "")
email = self.cleaned_data.get('email', None) email = self.cleaned_data.get("email", None)
if username and self.user.username != username: if username and self.user.username != username:
try: try:
User.objects.get(username__iexact=username) User.objects.get(username__iexact=username)
@ -150,26 +148,28 @@ class AccountSettingsForm(forms.Form):
pass pass
else: else:
raise forms.ValidationError("This username is already taken. Try something different.") raise forms.ValidationError("This username is already taken. Try something different.")
if self.user.email != email: if self.user.email != email:
if email and User.objects.filter(email__iexact=email).count(): if email and User.objects.filter(email__iexact=email).count():
raise forms.ValidationError("This email is already being used by another account. Try something different.") raise forms.ValidationError(
"This email is already being used by another account. Try something different."
)
if old_password or new_password: if old_password or new_password:
code = change_password(self.user, old_password, new_password, only_check=True) code = change_password(self.user, old_password, new_password, only_check=True)
if code <= 0: if code <= 0:
raise forms.ValidationError("Your old password is incorrect.") raise forms.ValidationError("Your old password is incorrect.")
return self.cleaned_data return self.cleaned_data
def save(self, profile_callback=None): def save(self, profile_callback=None):
username = self.cleaned_data['username'] username = self.cleaned_data["username"]
new_password = self.cleaned_data.get('new_password', None) new_password = self.cleaned_data.get("new_password", None)
old_password = self.cleaned_data.get('old_password', None) old_password = self.cleaned_data.get("old_password", None)
email = self.cleaned_data.get('email', None) email = self.cleaned_data.get("email", None)
custom_css = self.cleaned_data.get('custom_css', None) custom_css = self.cleaned_data.get("custom_css", None)
custom_js = self.cleaned_data.get('custom_js', None) custom_js = self.cleaned_data.get("custom_js", None)
if username and self.user.username != username: if username and self.user.username != username:
change_password(self.user, self.user.username, username) change_password(self.user, self.user.username, username)
self.user.username = username self.user.username = username
@ -178,28 +178,26 @@ class AccountSettingsForm(forms.Form):
social_profile.username = username social_profile.username = username
social_profile.save() social_profile.save()
self.user.profile.update_email(email) self.user.profile.update_email(email)
if old_password or new_password: if old_password or new_password:
change_password(self.user, old_password, new_password) change_password(self.user, old_password, new_password)
MCustomStyling.save_user(self.user.pk, custom_css, custom_js) MCustomStyling.save_user(self.user.pk, custom_css, custom_js)
class RedeemCodeForm(forms.Form): class RedeemCodeForm(forms.Form):
use_required_attribute = False use_required_attribute = False
gift_code = forms.CharField(widget=forms.TextInput(), gift_code = forms.CharField(widget=forms.TextInput(), label="Gift code", required=True)
label="Gift code",
required=True)
def clean_gift_code(self): def clean_gift_code(self):
gift_code = self.cleaned_data['gift_code'] gift_code = self.cleaned_data["gift_code"]
gift_code = re.sub(r'[^a-zA-Z0-9]', '', gift_code).lower() gift_code = re.sub(r"[^a-zA-Z0-9]", "", gift_code).lower()
if len(gift_code) != 12: if len(gift_code) != 12:
raise forms.ValidationError('Your gift code should be 12 characters long.') raise forms.ValidationError("Your gift code should be 12 characters long.")
newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code) newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code)
if newsblur_gift_code: if newsblur_gift_code:
@ -208,15 +206,17 @@ class RedeemCodeForm(forms.Form):
return newsblur_gift_code.gift_code return newsblur_gift_code.gift_code
else: else:
# Thinkup / Good Web Bundle # Thinkup / Good Web Bundle
req = requests.get('https://www.thinkup.com/join/api/bundle/', params={'code': gift_code}) req = requests.get("https://www.thinkup.com/join/api/bundle/", params={"code": gift_code})
response = req.json() response = req.json()
is_valid = response.get('is_valid', None) is_valid = response.get("is_valid", None)
if is_valid: if is_valid:
return gift_code return gift_code
elif is_valid == False: elif is_valid == False:
raise forms.ValidationError('Your gift code is invalid. Check it for errors.') raise forms.ValidationError("Your gift code is invalid. Check it for errors.")
elif response.get('error', None): elif response.get("error", None):
raise forms.ValidationError('Your gift code is invalid, says the server: %s' % response['error']) raise forms.ValidationError(
"Your gift code is invalid, says the server: %s" % response["error"]
)
return gift_code return gift_code

View file

@ -3,10 +3,10 @@ from django.core.management.base import BaseCommand
from django.db import connections from django.db import connections
from django.db.utils import OperationalError from django.db.utils import OperationalError
class Command(BaseCommand):
class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
db_conn = connections['default'] db_conn = connections["default"]
connected = False connected = False
while not connected: while not connected:
try: try:

View file

@ -1,15 +1,15 @@
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.contrib.auth.models import User from django.contrib.auth.models import User
class Command(BaseCommand):
class Command(BaseCommand):
def add_arguments(self, parser): def add_arguments(self, parser):
parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username")
parser.add_argument("-e", "--email", dest="email", nargs=1, help="Specify email if it doesn't exist") parser.add_argument("-e", "--email", dest="email", nargs=1, help="Specify email if it doesn't exist")
def handle(self, *args, **options): def handle(self, *args, **options):
username = options.get('username') username = options.get("username")
email = options.get('email') email = options.get("email")
user = None user = None
if username: if username:
try: try:
@ -30,11 +30,9 @@ class Command(BaseCommand):
user = users[0] user = users[0]
except User.DoesNotExist: except User.DoesNotExist:
print(" ---> No email found at: %s" % email) print(" ---> No email found at: %s" % email)
if user: if user:
email = options.get("email") or user.email email = options.get("email") or user.email
user.profile.send_forgot_password_email(email) user.profile.send_forgot_password_email(email)
else: else:
print(" ---> No user/email found at: %s/%s" % (username, email)) print(" ---> No user/email found at: %s/%s" % (username, email))

View file

@ -7,25 +7,46 @@ from django.contrib.auth.models import User
from utils import log as logging from utils import log as logging
from apps.profile.models import Profile, PaymentHistory from apps.profile.models import Profile, PaymentHistory
class Command(BaseCommand):
class Command(BaseCommand):
def add_arguments(self, parser): def add_arguments(self, parser):
parser.add_argument("-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back") parser.add_argument(
parser.add_argument("-o", "--offset", dest="offset", nargs=1, type=int, default=0, help="Offset customer (in date DESC)") "-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back"
parser.add_argument("-f", "--force", dest="force", nargs=1, type=bool, default=False, help="Force reimport for every user") )
parser.add_argument(
"-o",
"--offset",
dest="offset",
nargs=1,
type=int,
default=0,
help="Offset customer (in date DESC)",
)
parser.add_argument(
"-f",
"--force",
dest="force",
nargs=1,
type=bool,
default=False,
help="Force reimport for every user",
)
def handle(self, *args, **options): def handle(self, *args, **options):
stripe.api_key = settings.STRIPE_SECRET stripe.api_key = settings.STRIPE_SECRET
week = datetime.datetime.now() - datetime.timedelta(days=int(options.get('days'))) week = datetime.datetime.now() - datetime.timedelta(days=int(options.get("days")))
failed = [] failed = []
limit = 100 limit = 100
offset = options.get('offset') offset = options.get("offset")
while True: while True:
logging.debug(" ---> At %s" % offset) logging.debug(" ---> At %s" % offset)
user_ids = PaymentHistory.objects.filter(payment_provider='paypal', user_ids = (
payment_date__gte=week).values('user_id').distinct()[offset:offset+limit] PaymentHistory.objects.filter(payment_provider="paypal", payment_date__gte=week)
user_ids = [u['user_id'] for u in user_ids] .values("user_id")
.distinct()[offset : offset + limit]
)
user_ids = [u["user_id"] for u in user_ids]
if not len(user_ids): if not len(user_ids):
logging.debug("At %s, finished" % offset) logging.debug("At %s, finished" % offset)
break break
@ -36,7 +57,7 @@ class Command(BaseCommand):
except User.DoesNotExist: except User.DoesNotExist:
logging.debug(" ***> Couldn't find paypal user_id=%s" % user_id) logging.debug(" ***> Couldn't find paypal user_id=%s" % user_id)
failed.append(user_id) failed.append(user_id)
if not user.profile.is_premium: if not user.profile.is_premium:
user.profile.activate_premium() user.profile.activate_premium()
elif user.payments.all().count() != 1: elif user.payments.all().count() != 1:
@ -45,10 +66,9 @@ class Command(BaseCommand):
user.profile.setup_premium_history() user.profile.setup_premium_history()
elif user.profile.premium_expire > datetime.datetime.now() + datetime.timedelta(days=365): elif user.profile.premium_expire > datetime.datetime.now() + datetime.timedelta(days=365):
user.profile.setup_premium_history() user.profile.setup_premium_history()
elif options.get('force'): elif options.get("force"):
user.profile.setup_premium_history() user.profile.setup_premium_history()
else: else:
logging.debug(" ---> %s is fine" % user.username) logging.debug(" ---> %s is fine" % user.username)
return failed return failed

View file

@ -6,16 +6,29 @@ from django.core.management.base import BaseCommand
from utils import log as logging from utils import log as logging
from apps.profile.models import Profile from apps.profile.models import Profile
class Command(BaseCommand): class Command(BaseCommand):
def add_arguments(self, parser) def add_arguments(self, parser):
parser.add_argument("-d", "--days", dest="days", nargs=1, type='int', default=365, help="Number of days to go back") parser.add_argument(
parser.add_argument("-l", "--limit", dest="limit", nargs=1, type='int', default=100, help="Charges per batch") "-d", "--days", dest="days", nargs=1, type="int", default=365, help="Number of days to go back"
parser.add_argument("-s", "--start", dest="start", nargs=1, type='string', default=None, help="Offset customer_id (starting_after)") )
parser.add_argument(
"-l", "--limit", dest="limit", nargs=1, type="int", default=100, help="Charges per batch"
)
parser.add_argument(
"-s",
"--start",
dest="start",
nargs=1,
type="string",
default=None,
help="Offset customer_id (starting_after)",
)
def handle(self, *args, **options): def handle(self, *args, **options):
limit = options.get('limit') limit = options.get("limit")
days = int(options.get('days')) days = int(options.get("days"))
starting_after = options.get('start') starting_after = options.get("start")
Profile.reimport_stripe_history(limit, days, starting_after) Profile.reimport_stripe_history(limit, days, starting_after)

View file

@ -5,11 +5,12 @@ from django.contrib.auth.models import User
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from apps.profile.models import Profile from apps.profile.models import Profile
class Command(BaseCommand):
class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
user = User.objects.last() user = User.objects.last()
profile = Profile.objects.get(user=user) profile = Profile.objects.get(user=user)
profile.delete() profile.delete()
user.delete() user.delete()
print("User and profile for user {0} deleted".format(user)) print("User and profile for user {0} deleted".format(user))

View file

@ -19,16 +19,16 @@ class LastSeenMiddleware(object):
def process_response(self, request, response): def process_response(self, request, response):
if ( if (
( (
request.path == '/' request.path == "/"
or request.path.startswith('/reader/refresh_feeds') or request.path.startswith("/reader/refresh_feeds")
or request.path.startswith('/reader/load_feeds') or request.path.startswith("/reader/load_feeds")
or request.path.startswith('/reader/feeds') or request.path.startswith("/reader/feeds")
) )
and hasattr(request, 'user') and hasattr(request, "user")
and request.user.is_authenticated and request.user.is_authenticated
): ):
hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60) hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60)
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"]
if request.user.profile.last_seen_on < hour_ago: if request.user.profile.last_seen_on < hour_ago:
logging.user( logging.user(
request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (request.user.profile.last_seen_on, ip) request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (request.user.profile.last_seen_on, ip)
@ -50,11 +50,11 @@ class LastSeenMiddleware(object):
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response
@ -65,31 +65,31 @@ class DBProfilerMiddleware:
self.get_response = get_response self.get_response = get_response
def process_request(self, request): def process_request(self, request):
setattr(request, 'activated_segments', []) setattr(request, "activated_segments", [])
if ( if (
# request.path.startswith('/reader/feed') or # request.path.startswith('/reader/feed') or
request.path.startswith('/reader/feed/') request.path.startswith("/reader/feed/")
) and random.random() < 0.05: ) and random.random() < 0.05:
request.activated_segments.append('db_profiler') request.activated_segments.append("db_profiler")
connection.use_debug_cursor = True connection.use_debug_cursor = True
setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG)
settings.DEBUG = True settings.DEBUG = True
def process_celery(self): def process_celery(self):
setattr(self, 'activated_segments', []) setattr(self, "activated_segments", [])
if random.random() < 0.01 or settings.DEBUG_QUERIES: if random.random() < 0.01 or settings.DEBUG_QUERIES:
self.activated_segments.append('db_profiler') self.activated_segments.append("db_profiler")
connection.use_debug_cursor = True connection.use_debug_cursor = True
setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG)
settings.DEBUG = True settings.DEBUG = True
return self return self
def process_exception(self, request, exception): def process_exception(self, request, exception):
if hasattr(request, 'sql_times_elapsed'): if hasattr(request, "sql_times_elapsed"):
self._save_times(request.sql_times_elapsed) self._save_times(request.sql_times_elapsed)
def process_response(self, request, response): def process_response(self, request, response):
if hasattr(request, 'sql_times_elapsed'): if hasattr(request, "sql_times_elapsed"):
# middleware = SQLLogToConsoleMiddleware() # middleware = SQLLogToConsoleMiddleware()
# middleware.process_celery(self) # middleware.process_celery(self)
# logging.debug(" ---> ~FGProfiling~FB app: %s" % request.sql_times_elapsed) # logging.debug(" ---> ~FGProfiling~FB app: %s" % request.sql_times_elapsed)
@ -99,16 +99,16 @@ class DBProfilerMiddleware:
def process_celery_finished(self): def process_celery_finished(self):
middleware = SQLLogToConsoleMiddleware() middleware = SQLLogToConsoleMiddleware()
middleware.process_celery(self) middleware.process_celery(self)
if hasattr(self, 'sql_times_elapsed'): if hasattr(self, "sql_times_elapsed"):
logging.debug(" ---> ~FGProfiling~FB task: %s" % self.sql_times_elapsed) logging.debug(" ---> ~FGProfiling~FB task: %s" % self.sql_times_elapsed)
self._save_times(self.sql_times_elapsed, 'task_') self._save_times(self.sql_times_elapsed, "task_")
def process_request_finished(self): def process_request_finished(self):
middleware = SQLLogToConsoleMiddleware() middleware = SQLLogToConsoleMiddleware()
middleware.process_celery(self) middleware.process_celery(self)
if hasattr(self, 'sql_times_elapsed'): if hasattr(self, "sql_times_elapsed"):
logging.debug(" ---> ~FGProfiling~FB app: %s" % self.sql_times_elapsed) logging.debug(" ---> ~FGProfiling~FB app: %s" % self.sql_times_elapsed)
self._save_times(self.sql_times_elapsed, 'app_') self._save_times(self.sql_times_elapsed, "app_")
def _save_times(self, db_times, prefix=""): def _save_times(self, db_times, prefix=""):
if not db_times: if not db_times:
@ -118,7 +118,7 @@ class DBProfilerMiddleware:
pipe = r.pipeline() pipe = r.pipeline()
minute = round_time(round_to=60) minute = round_time(round_to=60)
for db, duration in list(db_times.items()): for db, duration in list(db_times.items()):
key = "DB:%s%s:%s" % (prefix, db, minute.strftime('%s')) key = "DB:%s%s:%s" % (prefix, db, minute.strftime("%s"))
pipe.incr("%s:c" % key) pipe.incr("%s:c" % key)
pipe.expireat("%s:c" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) pipe.expireat("%s:c" % key, (minute + datetime.timedelta(days=2)).strftime("%s"))
if duration: if duration:
@ -128,11 +128,11 @@ class DBProfilerMiddleware:
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response
@ -144,7 +144,7 @@ class SQLLogToConsoleMiddleware:
def activated(self, request): def activated(self, request):
return settings.DEBUG_QUERIES or ( return settings.DEBUG_QUERIES or (
hasattr(request, 'activated_segments') and 'db_profiler' in request.activated_segments hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments
) )
def process_response(self, request, response): def process_response(self, request, response):
@ -152,38 +152,39 @@ class SQLLogToConsoleMiddleware:
return response return response
if connection.queries: if connection.queries:
queries = connection.queries queries = connection.queries
if getattr(connection, 'queriesx', False): if getattr(connection, "queriesx", False):
queries.extend(connection.queriesx) queries.extend(connection.queriesx)
connection.queriesx = [] connection.queriesx = []
time_elapsed = sum([float(q['time']) for q in connection.queries]) time_elapsed = sum([float(q["time"]) for q in connection.queries])
for query in queries: for query in queries:
sql_time = float(query['time']) sql_time = float(query["time"])
query['color'] = '~FC' if sql_time < 0.015 else '~FK~SB' if sql_time < 0.05 else '~FR~SB' query["color"] = "~FC" if sql_time < 0.015 else "~FK~SB" if sql_time < 0.05 else "~FR~SB"
if query.get('mongo'): if query.get("mongo"):
query['sql'] = "~FM%s %s: %s" % (query['mongo']['op'], query['mongo']['collection'], query['mongo']['query']) query["sql"] = "~FM%s %s: %s" % (
elif query.get('redis_user'): query["mongo"]["op"],
query['sql'] = "~FC%s" % (query['redis_user']['query']) query["mongo"]["collection"],
elif query.get('redis_story'): query["mongo"]["query"],
query['sql'] = "~FC%s" % (query['redis_story']['query']) )
elif query.get('redis_session'): elif query.get("redis_user"):
query['sql'] = "~FC%s" % (query['redis_session']['query']) query["sql"] = "~FC%s" % (query["redis_user"]["query"])
elif query.get('redis_pubsub'): elif query.get("redis_story"):
query['sql'] = "~FC%s" % (query['redis_pubsub']['query']) query["sql"] = "~FC%s" % (query["redis_story"]["query"])
elif query.get('db_redis'): elif query.get("redis_session"):
query['sql'] = "~FC%s" % (query['db_redis']['query']) query["sql"] = "~FC%s" % (query["redis_session"]["query"])
elif 'sql' not in query: elif query.get("redis_pubsub"):
query["sql"] = "~FC%s" % (query["redis_pubsub"]["query"])
elif query.get("db_redis"):
query["sql"] = "~FC%s" % (query["db_redis"]["query"])
elif "sql" not in query:
logging.debug(" ***> Query log missing: %s" % query) logging.debug(" ***> Query log missing: %s" % query)
else: else:
query['sql'] = re.sub(r'SELECT (.*?) FROM', 'SELECT * FROM', query['sql']) query["sql"] = re.sub(r"SELECT (.*?) FROM", "SELECT * FROM", query["sql"])
query['sql'] = re.sub(r'SELECT', '~FYSELECT', query['sql']) query["sql"] = re.sub(r"SELECT", "~FYSELECT", query["sql"])
query['sql'] = re.sub(r'INSERT', '~FGINSERT', query['sql']) query["sql"] = re.sub(r"INSERT", "~FGINSERT", query["sql"])
query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql']) query["sql"] = re.sub(r"UPDATE", "~FY~SBUPDATE", query["sql"])
query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql']) query["sql"] = re.sub(r"DELETE", "~FR~SBDELETE", query["sql"])
if ( if settings.DEBUG_QUERIES and not getattr(settings, "DEBUG_QUERIES_SUMMARY_ONLY", False):
settings.DEBUG_QUERIES
and not getattr(settings, 'DEBUG_QUERIES_SUMMARY_ONLY', False)
):
t = Template( t = Template(
"{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] {{sql.color}}{{sql.time}}~SN~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}" "{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] {{sql.color}}{{sql.time}}~SN~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}"
) )
@ -191,51 +192,51 @@ class SQLLogToConsoleMiddleware:
t.render( t.render(
Context( Context(
{ {
'sqllog': queries, "sqllog": queries,
'count': len(queries), "count": len(queries),
'time': time_elapsed, "time": time_elapsed,
} }
) )
) )
) )
times_elapsed = { times_elapsed = {
'sql': sum( "sql": sum(
[ [
float(q['time']) float(q["time"])
for q in queries for q in queries
if not q.get('mongo') if not q.get("mongo")
and not q.get('redis_user') and not q.get("redis_user")
and not q.get('redis_story') and not q.get("redis_story")
and not q.get('redis_session') and not q.get("redis_session")
and not q.get('redis_pubsub') and not q.get("redis_pubsub")
] ]
), ),
'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]), "mongo": sum([float(q["time"]) for q in queries if q.get("mongo")]),
'redis_user': sum([float(q['time']) for q in queries if q.get('redis_user')]), "redis_user": sum([float(q["time"]) for q in queries if q.get("redis_user")]),
'redis_story': sum([float(q['time']) for q in queries if q.get('redis_story')]), "redis_story": sum([float(q["time"]) for q in queries if q.get("redis_story")]),
'redis_session': sum([float(q['time']) for q in queries if q.get('redis_session')]), "redis_session": sum([float(q["time"]) for q in queries if q.get("redis_session")]),
'redis_pubsub': sum([float(q['time']) for q in queries if q.get('redis_pubsub')]), "redis_pubsub": sum([float(q["time"]) for q in queries if q.get("redis_pubsub")]),
} }
setattr(request, 'sql_times_elapsed', times_elapsed) setattr(request, "sql_times_elapsed", times_elapsed)
else: else:
print(" ***> No queries") print(" ***> No queries")
if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG):
settings.DEBUG = False settings.DEBUG = False
return response return response
def process_celery(self, profiler): def process_celery(self, profiler):
self.process_response(profiler, None) self.process_response(profiler, None)
if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG):
settings.DEBUG = False settings.DEBUG = False
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response
@ -246,7 +247,7 @@ SIMPSONS_QUOTES = [
("Ralph", "Me fail English? That's unpossible."), ("Ralph", "Me fail English? That's unpossible."),
( (
"Lionel Hutz", "Lionel Hutz",
"This is the greatest case of false advertising I've seen since I sued the movie \"The Never Ending Story.\"", 'This is the greatest case of false advertising I\'ve seen since I sued the movie "The Never Ending Story."',
), ),
("Sideshow Bob", "No children have ever meddled with the Republican Party and lived to tell about it."), ("Sideshow Bob", "No children have ever meddled with the Republican Party and lived to tell about it."),
( (
@ -261,7 +262,7 @@ SIMPSONS_QUOTES = [
), ),
( (
"Comic Book Guy", "Comic Book Guy",
"Your questions have become more redundant and annoying than the last three \"Highlander\" movies.", 'Your questions have become more redundant and annoying than the last three "Highlander" movies.',
), ),
("Chief Wiggum", "Uh, no, you got the wrong number. This is 9-1...2."), ("Chief Wiggum", "Uh, no, you got the wrong number. This is 9-1...2."),
( (
@ -282,11 +283,11 @@ SIMPSONS_QUOTES = [
), ),
( (
"Lionel Hutz", "Lionel Hutz",
"Well, he's kind of had it in for me ever since I accidentally ran over his dog. Actually, replace \"accidentally\" with \"repeatedly\" and replace \"dog\" with \"son.\"", 'Well, he\'s kind of had it in for me ever since I accidentally ran over his dog. Actually, replace "accidentally" with "repeatedly" and replace "dog" with "son."',
), ),
( (
"Comic Book Guy", "Comic Book Guy",
"Last night's \"Itchy and Scratchy Show\" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.", 'Last night\'s "Itchy and Scratchy Show" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.',
), ),
("Homer", "I'm normally not a praying man, but if you're up there, please save me, Superman."), ("Homer", "I'm normally not a praying man, but if you're up there, please save me, Superman."),
("Homer", "Save me, Jeebus."), ("Homer", "Save me, Jeebus."),
@ -307,7 +308,7 @@ SIMPSONS_QUOTES = [
("Homer", "Fame was like a drug. But what was even more like a drug were the drugs."), ("Homer", "Fame was like a drug. But what was even more like a drug were the drugs."),
( (
"Homer", "Homer",
"Books are useless! I only ever read one book, \"To Kill A Mockingbird,\" and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?", 'Books are useless! I only ever read one book, "To Kill A Mockingbird," and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?',
), ),
( (
"Chief Wiggum", "Chief Wiggum",
@ -325,8 +326,8 @@ SIMPSONS_QUOTES = [
"Homer", "Homer",
"You know, the one with all the well meaning rules that don't work out in real life, uh, Christianity.", "You know, the one with all the well meaning rules that don't work out in real life, uh, Christianity.",
), ),
("Smithers", "Uh, no, they're saying \"Boo-urns, Boo-urns.\""), ("Smithers", 'Uh, no, they\'re saying "Boo-urns, Boo-urns."'),
("Hans Moleman", "I was saying \"Boo-urns.\""), ("Hans Moleman", 'I was saying "Boo-urns."'),
("Homer", "Kids, you tried your best and you failed miserably. The lesson is, never try."), ("Homer", "Kids, you tried your best and you failed miserably. The lesson is, never try."),
("Homer", "Here's to alcohol, the cause of - and solution to - all life's problems."), ("Homer", "Here's to alcohol, the cause of - and solution to - all life's problems."),
( (
@ -350,7 +351,7 @@ SIMPSONS_QUOTES = [
), ),
( (
"Troy McClure", "Troy McClure",
"Hi. I'm Troy McClure. You may remember me from such self-help tapes as \"Smoke Yourself Thin\" and \"Get Some Confidence, Stupid!\"", 'Hi. I\'m Troy McClure. You may remember me from such self-help tapes as "Smoke Yourself Thin" and "Get Some Confidence, Stupid!"',
), ),
("Homer", "A woman is a lot like a refrigerator. Six feet tall, 300 pounds...it makes ice."), ("Homer", "A woman is a lot like a refrigerator. Six feet tall, 300 pounds...it makes ice."),
( (
@ -425,7 +426,7 @@ SIMPSONS_QUOTES = [
("Barney", "Jesus must be spinning in his grave!"), ("Barney", "Jesus must be spinning in his grave!"),
( (
"Superintendent Chalmers", "Superintendent Chalmers",
"\"Thank the Lord\"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don't have a place within an organized religion.", '"Thank the Lord"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don\'t have a place within an organized religion.',
), ),
("Mr Burns", "[answering the phone] Ahoy hoy?"), ("Mr Burns", "[answering the phone] Ahoy hoy?"),
("Comic Book Guy", "Oh, a *sarcasm* detector. Oh, that's a *really* useful invention!"), ("Comic Book Guy", "Oh, a *sarcasm* detector. Oh, that's a *really* useful invention!"),
@ -487,18 +488,18 @@ class SimpsonsMiddleware:
def process_response(self, request, response): def process_response(self, request, response):
quote = random.choice(SIMPSONS_QUOTES) quote = random.choice(SIMPSONS_QUOTES)
source = quote[0].replace(' ', '-') source = quote[0].replace(" ", "-")
response["X-%s" % source] = quote[1] response["X-%s" % source] = quote[1]
return response return response
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response
@ -515,11 +516,11 @@ class ServerHostnameMiddleware:
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response
@ -530,7 +531,7 @@ class TimingMiddleware:
self.get_response = get_response self.get_response = get_response
def process_request(self, request): def process_request(self, request):
setattr(request, 'start_time', time.time()) setattr(request, "start_time", time.time())
def __call__(self, request): def __call__(self, request):
response = self.process_request(request) response = self.process_request(request)
@ -541,8 +542,8 @@ class TimingMiddleware:
BANNED_USER_AGENTS = ( BANNED_USER_AGENTS = (
'feed reader-background', "feed reader-background",
'missing', "missing",
) )
BANNED_USERNAMES = () BANNED_USERNAMES = ()
@ -553,46 +554,46 @@ class UserAgentBanMiddleware:
self.get_response = get_response self.get_response = get_response
def process_request(self, request): def process_request(self, request):
user_agent = request.environ.get('HTTP_USER_AGENT', 'missing').lower() user_agent = request.environ.get("HTTP_USER_AGENT", "missing").lower()
if 'profile' in request.path: if "profile" in request.path:
return return
if 'haproxy' in request.path: if "haproxy" in request.path:
return return
if 'dbcheck' in request.path: if "dbcheck" in request.path:
return return
if 'account' in request.path: if "account" in request.path:
return return
if 'push' in request.path: if "push" in request.path:
return return
if getattr(settings, 'TEST_DEBUG'): if getattr(settings, "TEST_DEBUG"):
return return
if any(ua in user_agent for ua in BANNED_USER_AGENTS): if any(ua in user_agent for ua in BANNED_USER_AGENTS):
data = {'error': 'User agent banned: %s' % user_agent, 'code': -1} data = {"error": "User agent banned: %s" % user_agent, "code": -1}
logging.user( logging.user(
request, "~FB~SN~BBBanned UA: ~SB%s / %s (%s)" % (user_agent, request.path, request.META) request, "~FB~SN~BBBanned UA: ~SB%s / %s (%s)" % (user_agent, request.path, request.META)
) )
return HttpResponse(json.encode(data), status=403, content_type='text/json') return HttpResponse(json.encode(data), status=403, content_type="text/json")
if request.user.is_authenticated and any( if request.user.is_authenticated and any(
username == request.user.username for username in BANNED_USERNAMES username == request.user.username for username in BANNED_USERNAMES
): ):
data = {'error': 'User banned: %s' % request.user.username, 'code': -1} data = {"error": "User banned: %s" % request.user.username, "code": -1}
logging.user( logging.user(
request, request,
"~FB~SN~BBBanned Username: ~SB%s / %s (%s)" % (request.user, request.path, request.META), "~FB~SN~BBBanned Username: ~SB%s / %s (%s)" % (request.user, request.path, request.META),
) )
return HttpResponse(json.encode(data), status=403, content_type='text/json') return HttpResponse(json.encode(data), status=403, content_type="text/json")
def __call__(self, request): def __call__(self, request):
response = None response = None
if hasattr(self, 'process_request'): if hasattr(self, "process_request"):
response = self.process_request(request) response = self.process_request(request)
if not response: if not response:
response = self.get_response(request) response = self.get_response(request)
if hasattr(self, 'process_response'): if hasattr(self, "process_response"):
response = self.process_response(request, response) response = self.process_response(request, response)
return response return response

File diff suppressed because one or more lines are too long

View file

@ -6,15 +6,19 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('profile', '0001_initial'), ("profile", "0001_initial"),
] ]
operations = [ operations = [
migrations.AlterField( migrations.AlterField(
model_name='stripeids', model_name="stripeids",
name='user', name="user",
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stripe_ids', to=settings.AUTH_USER_MODEL), field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="stripe_ids",
to=settings.AUTH_USER_MODEL,
),
), ),
] ]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -4,15 +4,14 @@ from django.db import migrations, models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('profile', '0004_auto_20220110_2106'), ("profile", "0004_auto_20220110_2106"),
] ]
operations = [ operations = [
migrations.AddField( migrations.AddField(
model_name='profile', model_name="profile",
name='is_archive', name="is_archive",
field=models.BooleanField(blank=True, default=False, null=True), field=models.BooleanField(blank=True, default=False, null=True),
), ),
] ]

View file

@ -4,15 +4,14 @@ from django.db import migrations, models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('profile', '0005_profile_is_archive'), ("profile", "0005_profile_is_archive"),
] ]
operations = [ operations = [
migrations.AddField( migrations.AddField(
model_name='profile', model_name="profile",
name='days_of_unread', name="days_of_unread",
field=models.IntegerField(default=30, blank=True, null=True), field=models.IntegerField(default=30, blank=True, null=True),
), ),
] ]

File diff suppressed because one or more lines are too long

View file

@ -4,15 +4,14 @@ from django.db import migrations, models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('profile', '0007_auto_20220125_2108'), ("profile", "0007_auto_20220125_2108"),
] ]
operations = [ operations = [
migrations.AddField( migrations.AddField(
model_name='profile', model_name="profile",
name='paypal_sub_id', name="paypal_sub_id",
field=models.CharField(blank=True, max_length=24, null=True), field=models.CharField(blank=True, max_length=24, null=True),
), ),
] ]

View file

@ -6,19 +6,29 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL), migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('profile', '0008_profile_paypal_sub_id'), ("profile", "0008_profile_paypal_sub_id"),
] ]
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
name='PaypalIds', name="PaypalIds",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('paypal_sub_id', models.CharField(blank=True, max_length=24, null=True)), "id",
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='paypal_ids', to=settings.AUTH_USER_MODEL)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
),
("paypal_sub_id", models.CharField(blank=True, max_length=24, null=True)),
(
"user",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="paypal_ids",
to=settings.AUTH_USER_MODEL,
),
),
], ],
), ),
] ]

View file

@ -4,15 +4,14 @@ from django.db import migrations, models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('profile', '0009_paypalids'), ("profile", "0009_paypalids"),
] ]
operations = [ operations = [
migrations.AddField( migrations.AddField(
model_name='profile', model_name="profile",
name='active_provider', name="active_provider",
field=models.CharField(blank=True, max_length=24, null=True), field=models.CharField(blank=True, max_length=24, null=True),
), ),
] ]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -5,16 +5,19 @@ from utils import log as logging
from apps.reader.models import UserSubscription, UserSubscriptionFolders from apps.reader.models import UserSubscription, UserSubscriptionFolders
from apps.social.models import MSocialServices, MActivity, MInteraction from apps.social.models import MSocialServices, MActivity, MInteraction
@app.task(name="email-new-user") @app.task(name="email-new-user")
def EmailNewUser(user_id): def EmailNewUser(user_id):
user_profile = Profile.objects.get(user__pk=user_id) user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_user_email() user_profile.send_new_user_email()
@app.task(name="email-new-premium") @app.task(name="email-new-premium")
def EmailNewPremium(user_id): def EmailNewPremium(user_id):
user_profile = Profile.objects.get(user__pk=user_id) user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_premium_email() user_profile.send_new_premium_email()
@app.task() @app.task()
def FetchArchiveFeedsForUser(user_id): def FetchArchiveFeedsForUser(user_id):
# subs = UserSubscription.objects.filter(user=user_id) # subs = UserSubscription.objects.filter(user=user_id)
@ -23,33 +26,39 @@ def FetchArchiveFeedsForUser(user_id):
UserSubscription.fetch_archive_feeds_for_user(user_id) UserSubscription.fetch_archive_feeds_for_user(user_id)
@app.task() @app.task()
def FetchArchiveFeedsChunk(user_id, feed_ids): def FetchArchiveFeedsChunk(user_id, feed_ids):
# logging.debug(" ---> Fetching archive stories: %s for %s" % (feed_ids, user_id)) # logging.debug(" ---> Fetching archive stories: %s for %s" % (feed_ids, user_id))
UserSubscription.fetch_archive_feeds_chunk(user_id, feed_ids) UserSubscription.fetch_archive_feeds_chunk(user_id, feed_ids)
@app.task() @app.task()
def FinishFetchArchiveFeeds(results, user_id, start_time, starting_story_count): def FinishFetchArchiveFeeds(results, user_id, start_time, starting_story_count):
# logging.debug(" ---> Fetching archive stories finished for %s" % (user_id)) # logging.debug(" ---> Fetching archive stories finished for %s" % (user_id))
ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds(user_id, start_time, starting_story_count) ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds(
user_id, start_time, starting_story_count
)
user_profile = Profile.objects.get(user__pk=user_id) user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_premium_archive_email(ending_story_count, pre_archive_count) user_profile.send_new_premium_archive_email(ending_story_count, pre_archive_count)
@app.task(name="email-new-premium-pro") @app.task(name="email-new-premium-pro")
def EmailNewPremiumPro(user_id): def EmailNewPremiumPro(user_id):
user_profile = Profile.objects.get(user__pk=user_id) user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_premium_pro_email() user_profile.send_new_premium_pro_email()
@app.task(name="premium-expire") @app.task(name="premium-expire")
def PremiumExpire(**kwargs): def PremiumExpire(**kwargs):
# Get expired but grace period users # Get expired but grace period users
two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2) two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2)
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30) thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30)
expired_profiles = Profile.objects.filter(is_premium=True, expired_profiles = Profile.objects.filter(
premium_expire__lte=two_days_ago, is_premium=True, premium_expire__lte=two_days_ago, premium_expire__gt=thirty_days_ago
premium_expire__gt=thirty_days_ago) )
logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count()) logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count())
for profile in expired_profiles: for profile in expired_profiles:
if profile.grace_period_email_sent(): if profile.grace_period_email_sent():
@ -57,21 +66,24 @@ def PremiumExpire(**kwargs):
profile.setup_premium_history() profile.setup_premium_history()
if profile.premium_expire < two_days_ago: if profile.premium_expire < two_days_ago:
profile.send_premium_expire_grace_period_email() profile.send_premium_expire_grace_period_email()
# Get fully expired users # Get fully expired users
expired_profiles = Profile.objects.filter(is_premium=True, expired_profiles = Profile.objects.filter(is_premium=True, premium_expire__lte=thirty_days_ago)
premium_expire__lte=thirty_days_ago) logging.debug(
logging.debug(" ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count()) " ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count()
)
for profile in expired_profiles: for profile in expired_profiles:
profile.setup_premium_history() profile.setup_premium_history()
if profile.premium_expire < thirty_days_ago: if profile.premium_expire < thirty_days_ago:
profile.send_premium_expire_email() profile.send_premium_expire_email()
profile.deactivate_premium() profile.deactivate_premium()
@app.task(name="activate-next-new-user") @app.task(name="activate-next-new-user")
def ActivateNextNewUser(): def ActivateNextNewUser():
RNewUserQueue.activate_next() RNewUserQueue.activate_next()
@app.task(name="cleanup-user") @app.task(name="cleanup-user")
def CleanupUser(user_id): def CleanupUser(user_id):
UserSubscription.trim_user_read_stories(user_id) UserSubscription.trim_user_read_stories(user_id)
@ -82,7 +94,7 @@ def CleanupUser(user_id):
UserSubscriptionFolders.add_missing_feeds_for_user(user_id) UserSubscriptionFolders.add_missing_feeds_for_user(user_id)
UserSubscriptionFolders.compact_for_user(user_id) UserSubscriptionFolders.compact_for_user(user_id)
UserSubscription.refresh_stale_feeds(user_id) UserSubscription.refresh_stale_feeds(user_id)
try: try:
ss = MSocialServices.objects.get(user_id=user_id) ss = MSocialServices.objects.get(user_id=user_id)
except MSocialServices.DoesNotExist: except MSocialServices.DoesNotExist:
@ -90,14 +102,14 @@ def CleanupUser(user_id):
return return
ss.sync_twitter_photo() ss.sync_twitter_photo()
@app.task(name="clean-spam") @app.task(name="clean-spam")
def CleanSpam(): def CleanSpam():
logging.debug(" ---> Finding spammers...") logging.debug(" ---> Finding spammers...")
Profile.clear_dead_spammers(confirm=True) Profile.clear_dead_spammers(confirm=True)
@app.task(name="reimport-stripe-history") @app.task(name="reimport-stripe-history")
def ReimportStripeHistory(): def ReimportStripeHistory():
logging.debug(" ---> Reimporting Stripe history...") logging.debug(" ---> Reimporting Stripe history...")
Profile.reimport_stripe_history(limit=10, days=1) Profile.reimport_stripe_history(limit=10, days=1)

View file

@ -5,33 +5,36 @@ from django.urls import reverse
from django.conf import settings from django.conf import settings
from mongoengine.connection import connect, disconnect from mongoengine.connection import connect, disconnect
class Test_Profile(TestCase): class Test_Profile(TestCase):
fixtures = [ fixtures = [
'subscriptions.json', "subscriptions.json",
'rss_feeds.json', "rss_feeds.json",
] ]
def setUp(self): def setUp(self):
disconnect() disconnect()
settings.MONGODB = connect('test_newsblur') settings.MONGODB = connect("test_newsblur")
self.client = Client(HTTP_USER_AGENT='Mozilla/5.0') self.client = Client(HTTP_USER_AGENT="Mozilla/5.0")
def tearDown(self): def tearDown(self):
settings.MONGODB.drop_database('test_newsblur') settings.MONGODB.drop_database("test_newsblur")
def test_create_account(self):
resp = self.client.get(reverse('load-feeds'))
response = json.decode(resp.content)
self.assertEquals(response['authenticated'], False)
response = self.client.post(reverse('welcome-signup'), { def test_create_account(self):
'signup-username': 'test', resp = self.client.get(reverse("load-feeds"))
'signup-password': 'password', response = json.decode(resp.content)
'signup-email': 'test@newsblur.com', self.assertEquals(response["authenticated"], False)
})
response = self.client.post(
reverse("welcome-signup"),
{
"signup-username": "test",
"signup-password": "password",
"signup-email": "test@newsblur.com",
},
)
self.assertEquals(response.status_code, 302) self.assertEquals(response.status_code, 302)
resp = self.client.get(reverse('load-feeds')) resp = self.client.get(reverse("load-feeds"))
response = json.decode(resp.content) response = json.decode(resp.content)
self.assertEquals(response['authenticated'], True) self.assertEquals(response["authenticated"], True)

View file

@ -2,41 +2,45 @@ from django.conf.urls import *
from apps.profile import views from apps.profile import views
urlpatterns = [ urlpatterns = [
url(r'^get_preferences?/?', views.get_preference), url(r"^get_preferences?/?", views.get_preference),
url(r'^set_preference/?', views.set_preference), url(r"^set_preference/?", views.set_preference),
url(r'^set_account_settings/?', views.set_account_settings), url(r"^set_account_settings/?", views.set_account_settings),
url(r'^get_view_setting/?', views.get_view_setting), url(r"^get_view_setting/?", views.get_view_setting),
url(r'^set_view_setting/?', views.set_view_setting), url(r"^set_view_setting/?", views.set_view_setting),
url(r'^clear_view_setting/?', views.clear_view_setting), url(r"^clear_view_setting/?", views.clear_view_setting),
url(r'^set_collapsed_folders/?', views.set_collapsed_folders), url(r"^set_collapsed_folders/?", views.set_collapsed_folders),
url(r'^paypal_form/?', views.paypal_form), url(r"^paypal_form/?", views.paypal_form),
url(r'^paypal_return/?', views.paypal_return, name='paypal-return'), url(r"^paypal_return/?", views.paypal_return, name="paypal-return"),
url(r'^paypal_archive_return/?', views.paypal_archive_return, name='paypal-archive-return'), url(r"^paypal_archive_return/?", views.paypal_archive_return, name="paypal-archive-return"),
url(r'^stripe_return/?', views.paypal_return, name='stripe-return'), url(r"^stripe_return/?", views.paypal_return, name="stripe-return"),
url(r'^switch_stripe_subscription/?', views.switch_stripe_subscription, name='switch-stripe-subscription'), url(
url(r'^switch_paypal_subscription/?', views.switch_paypal_subscription, name='switch-paypal-subscription'), r"^switch_stripe_subscription/?", views.switch_stripe_subscription, name="switch-stripe-subscription"
url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'), ),
url(r'^is_premium_archive/?', views.profile_is_premium_archive, name='profile-is-premium-archive'), url(
r"^switch_paypal_subscription/?", views.switch_paypal_subscription, name="switch-paypal-subscription"
),
url(r"^is_premium/?", views.profile_is_premium, name="profile-is-premium"),
url(r"^is_premium_archive/?", views.profile_is_premium_archive, name="profile-is-premium-archive"),
# url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'), # url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'),
url(r'^paypal_ipn/?', views.paypal_ipn, name='paypal-ipn'), url(r"^paypal_ipn/?", views.paypal_ipn, name="paypal-ipn"),
url(r'^paypal_webhooks/?', views.paypal_webhooks, name='paypal-webhooks'), url(r"^paypal_webhooks/?", views.paypal_webhooks, name="paypal-webhooks"),
url(r'^stripe_form/?', views.stripe_form, name='stripe-form'), url(r"^stripe_form/?", views.stripe_form, name="stripe-form"),
url(r'^stripe_checkout/?', views.stripe_checkout, name='stripe-checkout'), url(r"^stripe_checkout/?", views.stripe_checkout, name="stripe-checkout"),
url(r'^activities/?', views.load_activities, name='profile-activities'), url(r"^activities/?", views.load_activities, name="profile-activities"),
url(r'^payment_history/?', views.payment_history, name='profile-payment-history'), url(r"^payment_history/?", views.payment_history, name="profile-payment-history"),
url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'), url(r"^cancel_premium/?", views.cancel_premium, name="profile-cancel-premium"),
url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'), url(r"^refund_premium/?", views.refund_premium, name="profile-refund-premium"),
url(r'^never_expire_premium/?', views.never_expire_premium, name='profile-never-expire-premium'), url(r"^never_expire_premium/?", views.never_expire_premium, name="profile-never-expire-premium"),
url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'), url(r"^upgrade_premium/?", views.upgrade_premium, name="profile-upgrade-premium"),
url(r'^save_ios_receipt/?', views.save_ios_receipt, name='save-ios-receipt'), url(r"^save_ios_receipt/?", views.save_ios_receipt, name="save-ios-receipt"),
url(r'^save_android_receipt/?', views.save_android_receipt, name='save-android-receipt'), url(r"^save_android_receipt/?", views.save_android_receipt, name="save-android-receipt"),
url(r'^update_payment_history/?', views.update_payment_history, name='profile-update-payment-history'), url(r"^update_payment_history/?", views.update_payment_history, name="profile-update-payment-history"),
url(r'^delete_account/?', views.delete_account, name='profile-delete-account'), url(r"^delete_account/?", views.delete_account, name="profile-delete-account"),
url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'), url(r"^forgot_password_return/?", views.forgot_password_return, name="profile-forgot-password-return"),
url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'), url(r"^forgot_password/?", views.forgot_password, name="profile-forgot-password"),
url(r'^delete_starred_stories/?', views.delete_starred_stories, name='profile-delete-starred-stories'), url(r"^delete_starred_stories/?", views.delete_starred_stories, name="profile-delete-starred-stories"),
url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'), url(r"^delete_all_sites/?", views.delete_all_sites, name="profile-delete-all-sites"),
url(r'^email_optout/?', views.email_optout, name='profile-email-optout'), url(r"^email_optout/?", views.email_optout, name="profile-email-optout"),
url(r'^ios_subscription_status/?', views.ios_subscription_status, name='profile-ios-subscription-status'), url(r"^ios_subscription_status/?", views.ios_subscription_status, name="profile-ios-subscription-status"),
url(r'debug/?', views.trigger_error, name='trigger-error'), url(r"debug/?", views.trigger_error, name="trigger-error"),
] ]

File diff suppressed because it is too large Load diff

View file

@ -6,24 +6,31 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
initial = True initial = True
dependencies = [ dependencies = [
('rss_feeds', '0001_initial'), ("rss_feeds", "0001_initial"),
] ]
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
name='PushSubscription', name="PushSubscription",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('hub', models.URLField(db_index=True)), "id",
('topic', models.URLField(db_index=True)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
('verified', models.BooleanField(default=False)), ),
('verify_token', models.CharField(max_length=60)), ("hub", models.URLField(db_index=True)),
('lease_expires', models.DateTimeField(default=datetime.datetime.now)), ("topic", models.URLField(db_index=True)),
('feed', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='push', to='rss_feeds.Feed')), ("verified", models.BooleanField(default=False)),
("verify_token", models.CharField(max_length=60)),
("lease_expires", models.DateTimeField(default=datetime.datetime.now)),
(
"feed",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE, related_name="push", to="rss_feeds.Feed"
),
),
], ],
), ),
] ]

View file

@ -15,22 +15,20 @@ from apps.rss_feeds.models import Feed
from utils import log as logging from utils import log as logging
from utils.feed_functions import timelimit, TimeoutError from utils.feed_functions import timelimit, TimeoutError
DEFAULT_LEASE_SECONDS = (10 * 24 * 60 * 60) # 10 days DEFAULT_LEASE_SECONDS = 10 * 24 * 60 * 60 # 10 days
class PushSubscriptionManager(models.Manager): class PushSubscriptionManager(models.Manager):
@timelimit(5) @timelimit(5)
def subscribe(self, topic, feed, hub=None, callback=None, def subscribe(self, topic, feed, hub=None, callback=None, lease_seconds=None, force_retry=False):
lease_seconds=None, force_retry=False):
if hub is None: if hub is None:
hub = self._get_hub(topic) hub = self._get_hub(topic)
if hub is None: if hub is None:
raise TypeError('hub cannot be None if the feed does not provide it') raise TypeError("hub cannot be None if the feed does not provide it")
if lease_seconds is None: if lease_seconds is None:
lease_seconds = getattr(settings, 'PUBSUBHUBBUB_LEASE_SECONDS', lease_seconds = getattr(settings, "PUBSUBHUBBUB_LEASE_SECONDS", DEFAULT_LEASE_SECONDS)
DEFAULT_LEASE_SECONDS)
feed = Feed.get_by_id(feed.id) feed = Feed.get_by_id(feed.id)
subscription, created = self.get_or_create(feed=feed) subscription, created = self.get_or_create(feed=feed)
signals.pre_subscribe.send(sender=subscription, created=created) signals.pre_subscribe.send(sender=subscription, created=created)
@ -41,38 +39,44 @@ class PushSubscriptionManager(models.Manager):
subscription.topic = feed.feed_link[:200] subscription.topic = feed.feed_link[:200]
subscription.hub = hub subscription.hub = hub
subscription.save() subscription.save()
if callback is None: if callback is None:
callback_path = reverse('push-callback', args=(subscription.pk,)) callback_path = reverse("push-callback", args=(subscription.pk,))
callback = 'https://' + settings.PUSH_DOMAIN + callback_path callback = "https://" + settings.PUSH_DOMAIN + callback_path
# callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path # callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path
try: try:
response = self._send_request(hub, { response = self._send_request(
'hub.mode' : 'subscribe', hub,
'hub.callback' : callback, {
'hub.topic' : topic, "hub.mode": "subscribe",
'hub.verify' : ['async', 'sync'], "hub.callback": callback,
'hub.verify_token' : subscription.generate_token('subscribe'), "hub.topic": topic,
'hub.lease_seconds' : lease_seconds, "hub.verify": ["async", "sync"],
}) "hub.verify_token": subscription.generate_token("subscribe"),
"hub.lease_seconds": lease_seconds,
},
)
except (requests.ConnectionError, requests.exceptions.MissingSchema): except (requests.ConnectionError, requests.exceptions.MissingSchema):
response = None response = None
if response and response.status_code == 204: if response and response.status_code == 204:
subscription.verified = True subscription.verified = True
elif response and response.status_code == 202: # async verification elif response and response.status_code == 202: # async verification
subscription.verified = False subscription.verified = False
else: else:
error = response and response.text or "" error = response and response.text or ""
if not force_retry and 'You may only subscribe to' in error: if not force_retry and "You may only subscribe to" in error:
extracted_topic = re.search("You may only subscribe to (.*?) ", error) extracted_topic = re.search("You may only subscribe to (.*?) ", error)
if extracted_topic: if extracted_topic:
subscription = self.subscribe(extracted_topic.group(1), subscription = self.subscribe(
feed=feed, hub=hub, force_retry=True) extracted_topic.group(1), feed=feed, hub=hub, force_retry=True
)
else: else:
logging.debug(u' ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)' % ( logging.debug(
subscription.feed.log_title[:30], error[:100], response and response.status_code)) " ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)"
% (subscription.feed.log_title[:30], error[:100], response and response.status_code)
)
subscription.save() subscription.save()
feed.setup_push() feed.setup_push()
@ -80,18 +84,18 @@ class PushSubscriptionManager(models.Manager):
signals.verified.send(sender=subscription) signals.verified.send(sender=subscription)
return subscription return subscription
def _get_hub(self, topic): def _get_hub(self, topic):
parsed = feedparser.parse(topic) parsed = feedparser.parse(topic)
for link in parsed.feed.links: for link in parsed.feed.links:
if link['rel'] == 'hub': if link["rel"] == "hub":
return link['href'] return link["href"]
def _send_request(self, url, data): def _send_request(self, url, data):
return requests.post(url, data=data) return requests.post(url, data=data)
class PushSubscription(models.Model): class PushSubscription(models.Model):
feed = models.OneToOneField(Feed, db_index=True, related_name='push', on_delete=models.CASCADE) feed = models.OneToOneField(Feed, db_index=True, related_name="push", on_delete=models.CASCADE)
hub = models.URLField(db_index=True) hub = models.URLField(db_index=True)
topic = models.URLField(db_index=True) topic = models.URLField(db_index=True)
verified = models.BooleanField(default=False) verified = models.BooleanField(default=False)
@ -104,43 +108,45 @@ class PushSubscription(models.Model):
# unique_together = [ # unique_together = [
# ('hub', 'topic') # ('hub', 'topic')
# ] # ]
def unsubscribe(self): def unsubscribe(self):
feed = self.feed feed = self.feed
self.delete() self.delete()
feed.setup_push() feed.setup_push()
def set_expiration(self, lease_seconds): def set_expiration(self, lease_seconds):
self.lease_expires = datetime.now() + timedelta( self.lease_expires = datetime.now() + timedelta(seconds=lease_seconds)
seconds=lease_seconds)
self.save() self.save()
def generate_token(self, mode): def generate_token(self, mode):
assert self.pk is not None, \ assert self.pk is not None, "Subscription must be saved before generating token"
'Subscription must be saved before generating token' token = (
token = mode[:20] + hashlib.sha1(('%s%i%s' % ( mode[:20]
settings.SECRET_KEY, self.pk, mode)).encode(encoding='utf-8')).hexdigest() + hashlib.sha1(
("%s%i%s" % (settings.SECRET_KEY, self.pk, mode)).encode(encoding="utf-8")
).hexdigest()
)
self.verify_token = token self.verify_token = token
self.save() self.save()
return token return token
def check_urls_against_pushed_data(self, parsed): def check_urls_against_pushed_data(self, parsed):
if hasattr(parsed.feed, 'links'): # single notification if hasattr(parsed.feed, "links"): # single notification
hub_url = self.hub hub_url = self.hub
self_url = self.topic self_url = self.topic
for link in parsed.feed.links: for link in parsed.feed.links:
href = link.get('href', '') href = link.get("href", "")
if any(w in href for w in ['wp-admin', 'wp-cron']): if any(w in href for w in ["wp-admin", "wp-cron"]):
continue continue
if link['rel'] == 'hub': if link["rel"] == "hub":
hub_url = link['href'] hub_url = link["href"]
elif link['rel'] == 'self': elif link["rel"] == "self":
self_url = link['href'] self_url = link["href"]
if hub_url and hub_url.startswith('//'): if hub_url and hub_url.startswith("//"):
hub_url = "http:%s" % hub_url hub_url = "http:%s" % hub_url
needs_update = False needs_update = False
if hub_url and self.hub != hub_url: if hub_url and self.hub != hub_url:
# hub URL has changed; let's update our subscription # hub URL has changed; let's update our subscription
@ -150,23 +156,24 @@ class PushSubscription(models.Model):
needs_update = True needs_update = True
if needs_update: if needs_update:
logging.debug(u' ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s' % ( logging.debug(
self.feed, hub_url, self_url)) " ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s" % (self.feed, hub_url, self_url)
)
expiration_time = self.lease_expires - datetime.now() expiration_time = self.lease_expires - datetime.now()
seconds = expiration_time.days*86400 + expiration_time.seconds seconds = expiration_time.days * 86400 + expiration_time.seconds
try: try:
PushSubscription.objects.subscribe( PushSubscription.objects.subscribe(
self_url, feed=self.feed, hub=hub_url, self_url, feed=self.feed, hub=hub_url, lease_seconds=seconds
lease_seconds=seconds) )
except TimeoutError: except TimeoutError:
logging.debug(u' ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s' % ( logging.debug(
self.feed, hub_url, self_url)) " ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s"
% (self.feed, hub_url, self_url)
)
def __str__(self): def __str__(self):
if self.verified: if self.verified:
verified = u'verified' verified = "verified"
else: else:
verified = u'unverified' verified = "unverified"
return u'to %s on %s: %s' % ( return "to %s on %s: %s" % (self.topic, self.hub, verified)
self.topic, self.hub, verified)

View file

@ -2,6 +2,6 @@
from django.dispatch import Signal from django.dispatch import Signal
pre_subscribe = Signal(providing_args=['created']) pre_subscribe = Signal(providing_args=["created"])
verified = Signal() verified = Signal()
updated = Signal(providing_args=['update']) updated = Signal(providing_args=["update"])

View file

@ -1,17 +1,17 @@
# Copyright 2009 - Participatory Culture Foundation # Copyright 2009 - Participatory Culture Foundation
# #
# This file is part of djpubsubhubbub. # This file is part of djpubsubhubbub.
# #
# Redistribution and use in source and binary forms, with or without # Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions # modification, are permitted provided that the following conditions
# are met: # are met:
# #
# 1. Redistributions of source code must retain the above copyright # 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer. # notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright # 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the # notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution. # documentation and/or other materials provided with the distribution.
# #
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
@ -32,6 +32,7 @@ from django.test import TestCase
from apps.push.models import PushSubscription, PushSubscriptionManager from apps.push.models import PushSubscription, PushSubscriptionManager
from apps.push.signals import pre_subscribe, verified, updated from apps.push.signals import pre_subscribe, verified, updated
class MockResponse(object): class MockResponse(object):
def __init__(self, status, data=None): def __init__(self, status, data=None):
self.status = status self.status = status
@ -42,13 +43,13 @@ class MockResponse(object):
def read(self): def read(self):
if self.data is None: if self.data is None:
return '' return ""
data, self.data = self.data, None data, self.data = self.data, None
return data return data
class PSHBTestBase:
urls = 'apps.push.urls' class PSHBTestBase:
urls = "apps.push.urls"
def setUp(self): def setUp(self):
self._old_send_request = PushSubscriptionManager._send_request self._old_send_request = PushSubscriptionManager._send_request
@ -57,8 +58,10 @@ class PSHBTestBase:
self.requests = [] self.requests = []
self.signals = [] self.signals = []
for connecter in pre_subscribe, verified, updated: for connecter in pre_subscribe, verified, updated:
def callback(signal=None, **kwargs): def callback(signal=None, **kwargs):
self.signals.append((signal, kwargs)) self.signals.append((signal, kwargs))
connecter.connect(callback, dispatch_uid=connecter, weak=False) connecter.connect(callback, dispatch_uid=connecter, weak=False)
def tearDown(self): def tearDown(self):
@ -71,34 +74,32 @@ class PSHBTestBase:
self.requests.append((url, data)) self.requests.append((url, data))
return self.responses.pop() return self.responses.pop()
class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase):
class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase):
def test_sync_verify(self): def test_sync_verify(self):
""" """
If the hub returns a 204 response, the subscription is verified and If the hub returns a 204 response, the subscription is verified and
active. active.
""" """
self.responses.append(MockResponse(204)) self.responses.append(MockResponse(204))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000)
self.assertEquals(len(self.signals), 2) self.assertEquals(len(self.signals), 2)
self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True}))
'created': True})) self.assertEquals(self.signals[1], (verified, {"sender": sub}))
self.assertEquals(self.signals[1], (verified, {'sender': sub})) self.assertEquals(sub.hub, "hub")
self.assertEquals(sub.hub, 'hub') self.assertEquals(sub.topic, "topic")
self.assertEquals(sub.topic, 'topic')
self.assertEquals(sub.verified, True) self.assertEquals(sub.verified, True)
rough_expires = datetime.now() + timedelta(seconds=2000) rough_expires = datetime.now() + timedelta(seconds=2000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off")
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
request = self.requests[0] request = self.requests[0]
self.assertEquals(request[0], 'hub') self.assertEquals(request[0], "hub")
self.assertEquals(request[1]['mode'], 'subscribe') self.assertEquals(request[1]["mode"], "subscribe")
self.assertEquals(request[1]['topic'], 'topic') self.assertEquals(request[1]["topic"], "topic")
self.assertEquals(request[1]['callback'], 'callback') self.assertEquals(request[1]["callback"], "callback")
self.assertEquals(request[1]['verify'], ('async', 'sync')) self.assertEquals(request[1]["verify"], ("async", "sync"))
self.assertEquals(request[1]['verify_token'], sub.verify_token) self.assertEquals(request[1]["verify_token"], sub.verify_token)
self.assertEquals(request[1]['lease_seconds'], 2000) self.assertEquals(request[1]["lease_seconds"], 2000)
def test_async_verify(self): def test_async_verify(self):
""" """
@ -106,25 +107,23 @@ class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase):
subscription is verified. subscription is verified.
""" """
self.responses.append(MockResponse(202)) self.responses.append(MockResponse(202))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000)
self.assertEquals(len(self.signals), 1) self.assertEquals(len(self.signals), 1)
self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True}))
'created': True})) self.assertEquals(sub.hub, "hub")
self.assertEquals(sub.hub, 'hub') self.assertEquals(sub.topic, "topic")
self.assertEquals(sub.topic, 'topic')
self.assertEquals(sub.verified, False) self.assertEquals(sub.verified, False)
rough_expires = datetime.now() + timedelta(seconds=2000) rough_expires = datetime.now() + timedelta(seconds=2000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off")
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
request = self.requests[0] request = self.requests[0]
self.assertEquals(request[0], 'hub') self.assertEquals(request[0], "hub")
self.assertEquals(request[1]['mode'], 'subscribe') self.assertEquals(request[1]["mode"], "subscribe")
self.assertEquals(request[1]['topic'], 'topic') self.assertEquals(request[1]["topic"], "topic")
self.assertEquals(request[1]['callback'], 'callback') self.assertEquals(request[1]["callback"], "callback")
self.assertEquals(request[1]['verify'], ('async', 'sync')) self.assertEquals(request[1]["verify"], ("async", "sync"))
self.assertEquals(request[1]['verify_token'], sub.verify_token) self.assertEquals(request[1]["verify_token"], sub.verify_token)
self.assertEquals(request[1]['lease_seconds'], 2000) self.assertEquals(request[1]["lease_seconds"], 2000)
def test_least_seconds_default(self): def test_least_seconds_default(self):
""" """
@ -132,53 +131,51 @@ class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase):
should default to 2592000 (30 days). should default to 2592000 (30 days).
""" """
self.responses.append(MockResponse(202)) self.responses.append(MockResponse(202))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback') sub = PushSubscription.objects.subscribe("topic", "hub", "callback")
rough_expires = datetime.now() + timedelta(seconds=2592000) rough_expires = datetime.now() + timedelta(seconds=2592000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off")
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
request = self.requests[0] request = self.requests[0]
self.assertEquals(request[1]['lease_seconds'], 2592000) self.assertEquals(request[1]["lease_seconds"], 2592000)
def test_error_on_subscribe_raises_URLError(self): def test_error_on_subscribe_raises_URLError(self):
""" """
If a non-202/204 status is returned, raise a URLError. If a non-202/204 status is returned, raise a URLError.
""" """
self.responses.append(MockResponse(500, 'error data')) self.responses.append(MockResponse(500, "error data"))
try: try:
PushSubscription.objects.subscribe('topic', 'hub', 'callback') PushSubscription.objects.subscribe("topic", "hub", "callback")
except urllib.error.URLError as e: except urllib.error.URLError as e:
self.assertEquals(e.reason, self.assertEquals(e.reason, "error subscribing to topic on hub:\nerror data")
'error subscribing to topic on hub:\nerror data')
else: else:
self.fail('subscription did not raise URLError exception') self.fail("subscription did not raise URLError exception")
class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase): class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase):
def test_verify(self): def test_verify(self):
""" """
Getting the callback from the server should verify the subscription. Getting the callback from the server should verify the subscription.
""" """
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False)
topic='topic', verify_token = sub.generate_token("subscribe")
hub='hub',
verified=False)
verify_token = sub.generate_token('subscribe')
response = self.client.get(reverse('pubsubhubbub_callback', response = self.client.get(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)),
{'hub.mode': 'subscribe', {
'hub.topic': sub.topic, "hub.mode": "subscribe",
'hub.challenge': 'challenge', "hub.topic": sub.topic,
'hub.lease_seconds': 2000, "hub.challenge": "challenge",
'hub.verify_token': verify_token}) "hub.lease_seconds": 2000,
"hub.verify_token": verify_token,
},
)
self.assertEquals(response.status_code, 200) self.assertEquals(response.status_code, 200)
self.assertEquals(response.content, 'challenge') self.assertEquals(response.content, "challenge")
sub = PushSubscription.objects.get(pk=sub.pk) sub = PushSubscription.objects.get(pk=sub.pk)
self.assertEquals(sub.verified, True) self.assertEquals(sub.verified, True)
self.assertEquals(len(self.signals), 1) self.assertEquals(len(self.signals), 1)
self.assertEquals(self.signals[0], (verified, {'sender': sub})) self.assertEquals(self.signals[0], (verified, {"sender": sub}))
def test_404(self): def test_404(self):
""" """
@ -189,54 +186,63 @@ class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase):
* subscription doesn't exist * subscription doesn't exist
* token doesn't match the subscription * token doesn't match the subscription
""" """
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False)
topic='topic', verify_token = sub.generate_token("subscribe")
hub='hub',
verified=False)
verify_token = sub.generate_token('subscribe')
response = self.client.get(reverse('pubsubhubbub_callback', response = self.client.get(
args=(0,)), reverse("pubsubhubbub_callback", args=(0,)),
{'hub.mode': 'subscribe', {
'hub.topic': sub.topic, "hub.mode": "subscribe",
'hub.challenge': 'challenge', "hub.topic": sub.topic,
'hub.lease_seconds': 2000, "hub.challenge": "challenge",
'hub.verify_token': verify_token[1:]}) "hub.lease_seconds": 2000,
"hub.verify_token": verify_token[1:],
},
)
self.assertEquals(response.status_code, 404) self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0) self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback', response = self.client.get(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)),
{'hub.mode': 'subscribe', {
'hub.topic': sub.topic, "hub.mode": "subscribe",
'hub.challenge': 'challenge', "hub.topic": sub.topic,
'hub.lease_seconds': 2000, "hub.challenge": "challenge",
'hub.verify_token': verify_token[1:]}) "hub.lease_seconds": 2000,
"hub.verify_token": verify_token[1:],
},
)
self.assertEquals(response.status_code, 404) self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0) self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback', response = self.client.get(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)),
{'hub.mode': 'subscribe', {
'hub.topic': sub.topic + 'extra', "hub.mode": "subscribe",
'hub.challenge': 'challenge', "hub.topic": sub.topic + "extra",
'hub.lease_seconds': 2000, "hub.challenge": "challenge",
'hub.verify_token': verify_token}) "hub.lease_seconds": 2000,
"hub.verify_token": verify_token,
},
)
self.assertEquals(response.status_code, 404) self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0) self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback', response = self.client.get(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)),
{'hub.mode': 'subscribe', {
'hub.topic': sub.topic, "hub.mode": "subscribe",
'hub.challenge': 'challenge', "hub.topic": sub.topic,
'hub.lease_seconds': 2000, "hub.challenge": "challenge",
'hub.verify_token': verify_token[:-5]}) "hub.lease_seconds": 2000,
"hub.verify_token": verify_token[:-5],
},
)
self.assertEquals(response.status_code, 404) self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0) self.assertEquals(len(self.signals), 0)
class Test_PSHBUpdateCase(PSHBTestBase, TestCase): class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
def test_update(self): def test_update(self):
# this data comes from # this data comes from
# http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.1.html#anchor3 # http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.1.html#anchor3
@ -293,32 +299,27 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
""" """
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(
hub="http://myhub.example.com/endpoint", hub="http://myhub.example.com/endpoint", topic="http://publisher.example.com/happycats.xml"
topic="http://publisher.example.com/happycats.xml") )
callback_data = [] callback_data = []
updated.connect( updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append( lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False
(sender, update)), )
weak=False)
response = self.client.post(reverse('pubsubhubbub_callback', response = self.client.post(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml"
update_data, 'application/atom+xml') )
self.assertEquals(response.status_code, 200) self.assertEquals(response.status_code, 200)
self.assertEquals(len(callback_data), 1) self.assertEquals(len(callback_data), 1)
sender, update = callback_data[0] sender, update = callback_data[0]
self.assertEquals(sender, sub) self.assertEquals(sender, sub)
self.assertEquals(len(update.entries), 4) self.assertEquals(len(update.entries), 4)
self.assertEquals(update.entries[0].id, self.assertEquals(update.entries[0].id, "http://publisher.example.com/happycat25.xml")
'http://publisher.example.com/happycat25.xml') self.assertEquals(update.entries[1].id, "http://publisher.example.com/happycat25.xml")
self.assertEquals(update.entries[1].id, self.assertEquals(update.entries[2].id, "http://publisher.example.com/happycat25.xml")
'http://publisher.example.com/happycat25.xml') self.assertEquals(update.entries[3].id, "http://publisher.example.com/happycat25.xml")
self.assertEquals(update.entries[2].id,
'http://publisher.example.com/happycat25.xml')
self.assertEquals(update.entries[3].id,
'http://publisher.example.com/happycat25.xml')
def test_update_with_changed_hub(self): def test_update_with_changed_hub(self):
update_data = """<?xml version="1.0"?> update_data = """<?xml version="1.0"?>
@ -343,31 +344,32 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(
hub="hub", hub="hub",
topic="http://publisher.example.com/happycats.xml", topic="http://publisher.example.com/happycats.xml",
lease_expires=datetime.now() + timedelta(days=1)) lease_expires=datetime.now() + timedelta(days=1),
)
callback_data = [] callback_data = []
updated.connect( updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append( lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False
(sender, update)), )
weak=False)
self.responses.append(MockResponse(204)) self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback', response = self.client.post(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml"
update_data, 'application/atom+xml') )
self.assertEquals(response.status_code, 200) self.assertEquals(response.status_code, 200)
self.assertEquals( self.assertEquals(
PushSubscription.objects.filter( PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint', hub="http://myhub.example.com/endpoint",
topic='http://publisher.example.com/happycats.xml', topic="http://publisher.example.com/happycats.xml",
verified=True).count(), 1) verified=True,
).count(),
1,
)
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0], self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint")
'http://myhub.example.com/endpoint') self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/")
self.assertEquals(self.requests[0][1]['callback'], self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5)
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)
def test_update_with_changed_self(self): def test_update_with_changed_self(self):
update_data = """<?xml version="1.0"?> update_data = """<?xml version="1.0"?>
@ -392,30 +394,32 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(
hub="http://myhub.example.com/endpoint", hub="http://myhub.example.com/endpoint",
topic="topic", topic="topic",
lease_expires=datetime.now() + timedelta(days=1)) lease_expires=datetime.now() + timedelta(days=1),
)
callback_data = [] callback_data = []
updated.connect( updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append( lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False
(sender, update)), )
weak=False)
self.responses.append(MockResponse(204)) self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback', kwargs={'push_id': sub.pk}), response = self.client.post(
update_data, 'application/atom+xml') reverse("pubsubhubbub_callback", kwargs={"push_id": sub.pk}), update_data, "application/atom+xml"
)
self.assertEquals(response.status_code, 200) self.assertEquals(response.status_code, 200)
self.assertEquals( self.assertEquals(
PushSubscription.objects.filter( PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint', hub="http://myhub.example.com/endpoint",
topic='http://publisher.example.com/happycats.xml', topic="http://publisher.example.com/happycats.xml",
verified=True).count(), 1) verified=True,
).count(),
1,
)
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0], self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint")
'http://myhub.example.com/endpoint') self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/")
self.assertEquals(self.requests[0][1]['callback'], self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5)
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)
def test_update_with_changed_hub_and_self(self): def test_update_with_changed_hub_and_self(self):
update_data = """<?xml version="1.0"?> update_data = """<?xml version="1.0"?>
@ -438,30 +442,29 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
</atom:feed> </atom:feed>
""" """
sub = PushSubscription.objects.create( sub = PushSubscription.objects.create(
hub="hub", hub="hub", topic="topic", lease_expires=datetime.now() + timedelta(days=1)
topic="topic", )
lease_expires=datetime.now() + timedelta(days=1))
callback_data = [] callback_data = []
updated.connect( updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append( lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False
(sender, update)), )
weak=False)
self.responses.append(MockResponse(204)) self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback', response = self.client.post(
args=(sub.pk,)), reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml"
update_data, 'application/atom+xml') )
self.assertEquals(response.status_code, 200) self.assertEquals(response.status_code, 200)
self.assertEquals( self.assertEquals(
PushSubscription.objects.filter( PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint', hub="http://myhub.example.com/endpoint",
topic='http://publisher.example.com/happycats.xml', topic="http://publisher.example.com/happycats.xml",
verified=True).count(), 1) verified=True,
).count(),
1,
)
self.assertEquals(len(self.requests), 1) self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0], self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint")
'http://myhub.example.com/endpoint') self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/")
self.assertEquals(self.requests[0][1]['callback'], self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5)
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)

View file

@ -2,5 +2,5 @@ from django.conf.urls import *
from apps.push import views from apps.push import views
urlpatterns = [ urlpatterns = [
url(r'^(?P<push_id>\d+)/?$', views.push_callback, name='push-callback'), url(r"^(?P<push_id>\d+)/?$", views.push_callback, name="push-callback"),
] ]

View file

@ -13,43 +13,49 @@ from apps.push.signals import verified
from apps.rss_feeds.models import MFetchHistory from apps.rss_feeds.models import MFetchHistory
from utils import log as logging from utils import log as logging
def push_callback(request, push_id):
if request.method == 'GET':
mode = request.GET['hub.mode']
topic = request.GET['hub.topic']
challenge = request.GET.get('hub.challenge', '')
lease_seconds = request.GET.get('hub.lease_seconds')
verify_token = request.GET.get('hub.verify_token', '')
if mode == 'subscribe': def push_callback(request, push_id):
if not verify_token.startswith('subscribe'): if request.method == "GET":
mode = request.GET["hub.mode"]
topic = request.GET["hub.topic"]
challenge = request.GET.get("hub.challenge", "")
lease_seconds = request.GET.get("hub.lease_seconds")
verify_token = request.GET.get("hub.verify_token", "")
if mode == "subscribe":
if not verify_token.startswith("subscribe"):
raise Http404 raise Http404
subscription = get_object_or_404(PushSubscription, subscription = get_object_or_404(
pk=push_id, PushSubscription, pk=push_id, topic=topic, verify_token=verify_token
topic=topic, )
verify_token=verify_token)
subscription.verified = True subscription.verified = True
subscription.set_expiration(int(lease_seconds)) subscription.set_expiration(int(lease_seconds))
subscription.save() subscription.save()
subscription.feed.setup_push() subscription.feed.setup_push()
logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (subscription.feed, subscription.feed_id)) logging.debug(" ---> [%-30s] [%s] ~BBVerified PuSH" % (subscription.feed, subscription.feed_id))
verified.send(sender=subscription) verified.send(sender=subscription)
return HttpResponse(challenge, content_type='text/plain') return HttpResponse(challenge, content_type="text/plain")
elif request.method == 'POST': elif request.method == "POST":
subscription = get_object_or_404(PushSubscription, pk=push_id) subscription = get_object_or_404(PushSubscription, pk=push_id)
fetch_history = MFetchHistory.feed(subscription.feed_id) fetch_history = MFetchHistory.feed(subscription.feed_id)
latest_push_date_delta = None latest_push_date_delta = None
if fetch_history and fetch_history.get('push_history'): if fetch_history and fetch_history.get("push_history"):
latest_push = fetch_history['push_history'][0]['push_date'] latest_push = fetch_history["push_history"][0]["push_date"]
latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S') latest_push_date = datetime.datetime.strptime(latest_push, "%Y-%m-%d %H:%M:%S")
latest_push_date_delta = datetime.datetime.now() - latest_push_date latest_push_date_delta = datetime.datetime.now() - latest_push_date
if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1): if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1):
logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (subscription.feed, latest_push_date_delta.seconds)) logging.debug(
return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429) " ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago"
% (subscription.feed, latest_push_date_delta.seconds)
)
return HttpResponse(
"Slow down, you just pushed %s seconds ago..." % latest_push_date_delta.seconds,
status=429,
)
# XXX TODO: Optimize this by removing feedparser. It just needs to find out # XXX TODO: Optimize this by removing feedparser. It just needs to find out
# the hub_url or topic has changed. ElementTree could do it. # the hub_url or topic has changed. ElementTree could do it.
if random.random() < 0.1: if random.random() < 0.1:
@ -63,10 +69,12 @@ def push_callback(request, push_id):
# subscription.feed.queue_pushed_feed_xml(request.body) # subscription.feed.queue_pushed_feed_xml(request.body)
if subscription.feed.active_subscribers >= 1: if subscription.feed.active_subscribers >= 1:
subscription.feed.queue_pushed_feed_xml("Fetch me", latest_push_date_delta=latest_push_date_delta) subscription.feed.queue_pushed_feed_xml("Fetch me", latest_push_date_delta=latest_push_date_delta)
MFetchHistory.add(feed_id=subscription.feed_id, MFetchHistory.add(feed_id=subscription.feed_id, fetch_type="push")
fetch_type='push')
else: else:
logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (subscription.feed, subscription.feed)) logging.debug(
" ---> [%-30s] ~FBSkipping feed fetch, no actives: %s"
return HttpResponse('OK') % (subscription.feed, subscription.feed)
)
return HttpResponse("OK")
return Http404 return Http404

View file

@ -3,4 +3,4 @@ from django.contrib import admin
admin.site.register(UserSubscription) admin.site.register(UserSubscription)
admin.site.register(UserSubscriptionFolders) admin.site.register(UserSubscriptionFolders)
admin.site.register(Feature) admin.site.register(Feature)

View file

@ -8,6 +8,7 @@ from apps.profile.factories import UserFactory
fake = Faker() fake = Faker()
def generate_folder(): def generate_folder():
string = '{"' string = '{"'
string += " ".join(fake.words(2)) string += " ".join(fake.words(2))
@ -18,12 +19,13 @@ def generate_folder():
string += "]}," string += "]},"
return string return string
def generate_folders(): def generate_folders():
""" """
"folders": "[5299728, 644144, 1187026, {\"Brainiacs & Opinion\": [569, 38, 3581, 183139, 1186180, 15]}, {\"Science & Technology\": [731503, 140145, 1272495, 76, 161, 39, {\"Hacker\": [5985150, 3323431]}]}, {\"Humor\": [212379, 3530, 5994357]}, {\"Videos\": [3240, 5168]}]" "folders": "[5299728, 644144, 1187026, {\"Brainiacs & Opinion\": [569, 38, 3581, 183139, 1186180, 15]}, {\"Science & Technology\": [731503, 140145, 1272495, 76, 161, 39, {\"Hacker\": [5985150, 3323431]}]}, {\"Humor\": [212379, 3530, 5994357]}, {\"Videos\": [3240, 5168]}]"
""" """
string = '"folders":[' string = '"folders":['
for _ in range(3): for _ in range(3):
string += f"{fake.pyint()}, " string += f"{fake.pyint()}, "
for _ in range(3): for _ in range(3):
@ -32,6 +34,7 @@ def generate_folders():
string = string[:-1] + "]" string = string[:-1] + "]"
return string return string
class UserSubscriptionFoldersFactory(DjangoModelFactory): class UserSubscriptionFoldersFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory) user = factory.SubFactory(UserFactory)
folders = FuzzyAttribute(generate_folders) folders = FuzzyAttribute(generate_folders)
@ -39,18 +42,19 @@ class UserSubscriptionFoldersFactory(DjangoModelFactory):
class Meta: class Meta:
model = UserSubscriptionFolders model = UserSubscriptionFolders
class UserSubscriptionFactory(DjangoModelFactory): class UserSubscriptionFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory) user = factory.SubFactory(UserFactory)
feed = FuzzyAttribute(FeedFactory) feed = FuzzyAttribute(FeedFactory)
last_read_date = factory.Faker('date_time') last_read_date = factory.Faker("date_time")
class Meta: class Meta:
model = UserSubscription model = UserSubscription
class FeatureFactory(DjangoModelFactory): class FeatureFactory(DjangoModelFactory):
description = factory.Faker('text') description = factory.Faker("text")
date = factory.Faker('date_time') date = factory.Faker("date_time")
class Meta: class Meta:
model = Feature model = Feature

View file

@ -15,13 +15,18 @@ from dns.resolver import NoResolverConfiguration
class LoginForm(forms.Form): class LoginForm(forms.Form):
username = forms.CharField(label=_("Username or Email"), max_length=30, username = forms.CharField(
widget=forms.TextInput(attrs={'tabindex': 1, 'class': 'NB-input'}), label=_("Username or Email"),
error_messages={'required': 'Please enter a username.'}) max_length=30,
password = forms.CharField(label=_("Password"), widget=forms.TextInput(attrs={"tabindex": 1, "class": "NB-input"}),
widget=forms.PasswordInput(attrs={'tabindex': 2, 'class': 'NB-input'}), error_messages={"required": "Please enter a username."},
required=False) )
# error_messages={'required': 'Please enter a password.'}) password = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput(attrs={"tabindex": 2, "class": "NB-input"}),
required=False,
)
# error_messages={'required': 'Please enter a password.'})
add = forms.CharField(required=False, widget=forms.HiddenInput()) add = forms.CharField(required=False, widget=forms.HiddenInput())
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@ -29,10 +34,10 @@ class LoginForm(forms.Form):
super(LoginForm, self).__init__(*args, **kwargs) super(LoginForm, self).__init__(*args, **kwargs)
def clean(self): def clean(self):
username = self.cleaned_data.get('username', '').lower() username = self.cleaned_data.get("username", "").lower()
password = self.cleaned_data.get('password', '') password = self.cleaned_data.get("password", "")
if '@' in username: if "@" in username:
user = User.objects.filter(email=username) user = User.objects.filter(email=username)
if not user: if not user:
user = User.objects.filter(email__iexact=username) user = User.objects.filter(email__iexact=username)
@ -60,13 +65,15 @@ class LoginForm(forms.Form):
if blank: if blank:
email_user.set_password(email_user.username) email_user.set_password(email_user.username)
email_user.save() email_user.save()
self.user_cache = authenticate(username=email_user.username, password=email_user.username) self.user_cache = authenticate(
username=email_user.username, password=email_user.username
)
if self.user_cache is None: if self.user_cache is None:
logging.info(" ***> [%s] Bad Login" % username) logging.info(" ***> [%s] Bad Login" % username)
raise forms.ValidationError(_("Whoopsy-daisy, wrong password. Try again.")) raise forms.ValidationError(_("Whoopsy-daisy, wrong password. Try again."))
elif username and not user: elif username and not user:
raise forms.ValidationError(_("That username is not registered. Please try again.")) raise forms.ValidationError(_("That username is not registered. Please try again."))
return self.cleaned_data return self.cleaned_data
def get_user_id(self): def get_user_id(self):
@ -81,113 +88,135 @@ class LoginForm(forms.Form):
class SignupForm(forms.Form): class SignupForm(forms.Form):
use_required_attribute = False use_required_attribute = False
username = forms.RegexField(regex=r'^\w+$', username = forms.RegexField(
max_length=30, regex=r"^\w+$",
widget=forms.TextInput(attrs={'class': 'NB-input'}), max_length=30,
label=_('Username'), widget=forms.TextInput(attrs={"class": "NB-input"}),
error_messages={ label=_("Username"),
'required': 'Please enter a username.', error_messages={
'invalid': "Your username may only contain letters and numbers." "required": "Please enter a username.",
}) "invalid": "Your username may only contain letters and numbers.",
email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), },
label=_('Email'), )
required=True, email = forms.EmailField(
error_messages={'required': 'Please enter an email.'}) widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}),
password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}, label=_("Email"),
render_value=True,), required=True,
label=_('Password'), error_messages={"required": "Please enter an email."},
required=False) )
# error_messages={'required': 'Please enter a password.'}) password = forms.CharField(
widget=forms.PasswordInput(
attrs={"class": "NB-input"},
render_value=True,
),
label=_("Password"),
required=False,
)
# error_messages={'required': 'Please enter a password.'})
def clean_username(self): def clean_username(self):
username = self.cleaned_data['username'] username = self.cleaned_data["username"]
return username return username
def clean_password(self): def clean_password(self):
if not self.cleaned_data['password']: if not self.cleaned_data["password"]:
return "" return ""
return self.cleaned_data['password'] return self.cleaned_data["password"]
def clean_email(self): def clean_email(self):
email = self.cleaned_data.get('email', None) email = self.cleaned_data.get("email", None)
if email: if email:
email_exists = User.objects.filter(email__iexact=email).count() email_exists = User.objects.filter(email__iexact=email).count()
if email_exists: if email_exists:
raise forms.ValidationError(_('Someone is already using that email address.')) raise forms.ValidationError(_("Someone is already using that email address."))
if any([banned in email for banned in ['mailwire24', 'mailbox9', 'scintillamail', 'bluemailboxes', 'devmailing']]): if any(
logging.info(" ***> [%s] Spammer signup banned: %s/%s" % (self.cleaned_data.get('username', None), self.cleaned_data.get('password', None), email)) [
raise forms.ValidationError('Seriously, fuck off spammer.') banned in email
for banned in ["mailwire24", "mailbox9", "scintillamail", "bluemailboxes", "devmailing"]
]
):
logging.info(
" ***> [%s] Spammer signup banned: %s/%s"
% (
self.cleaned_data.get("username", None),
self.cleaned_data.get("password", None),
email,
)
)
raise forms.ValidationError("Seriously, fuck off spammer.")
try: try:
domain = email.rsplit('@', 1)[-1] domain = email.rsplit("@", 1)[-1]
if not query(domain, 'MX'): if not query(domain, "MX"):
raise forms.ValidationError('Sorry, that email is invalid.') raise forms.ValidationError("Sorry, that email is invalid.")
except (NXDOMAIN, NoNameservers, NoAnswer): except (NXDOMAIN, NoNameservers, NoAnswer):
raise forms.ValidationError('Sorry, that email is invalid.') raise forms.ValidationError("Sorry, that email is invalid.")
except NoResolverConfiguration as e: except NoResolverConfiguration as e:
logging.info(f" ***> ~FRFailed to check spamminess of domain: ~FY{domain} ~FR{e}") logging.info(f" ***> ~FRFailed to check spamminess of domain: ~FY{domain} ~FR{e}")
pass pass
return self.cleaned_data['email'] return self.cleaned_data["email"]
def clean(self): def clean(self):
username = self.cleaned_data.get('username', '') username = self.cleaned_data.get("username", "")
password = self.cleaned_data.get('password', '') password = self.cleaned_data.get("password", "")
email = self.cleaned_data.get('email', None) email = self.cleaned_data.get("email", None)
exists = User.objects.filter(username__iexact=username).count()
if exists:
user_auth = authenticate(username=username, password=password)
if not user_auth:
raise forms.ValidationError(_('Someone is already using that username.'))
return self.cleaned_data
def save(self, profile_callback=None):
username = self.cleaned_data['username']
password = self.cleaned_data['password']
email = self.cleaned_data['email']
exists = User.objects.filter(username__iexact=username).count() exists = User.objects.filter(username__iexact=username).count()
if exists: if exists:
user_auth = authenticate(username=username, password=password) user_auth = authenticate(username=username, password=password)
if not user_auth: if not user_auth:
raise forms.ValidationError(_('Someone is already using that username.')) raise forms.ValidationError(_("Someone is already using that username."))
return self.cleaned_data
def save(self, profile_callback=None):
username = self.cleaned_data["username"]
password = self.cleaned_data["password"]
email = self.cleaned_data["email"]
exists = User.objects.filter(username__iexact=username).count()
if exists:
user_auth = authenticate(username=username, password=password)
if not user_auth:
raise forms.ValidationError(_("Someone is already using that username."))
else: else:
return user_auth return user_auth
if not password: if not password:
password = username password = username
new_user = User(username=username) new_user = User(username=username)
new_user.set_password(password) new_user.set_password(password)
if not getattr(settings, 'AUTO_ENABLE_NEW_USERS', True): if not getattr(settings, "AUTO_ENABLE_NEW_USERS", True):
new_user.is_active = False new_user.is_active = False
new_user.email = email new_user.email = email
new_user.last_login = datetime.datetime.now() new_user.last_login = datetime.datetime.now()
new_user.save() new_user.save()
new_user = authenticate(username=username, new_user = authenticate(username=username, password=password)
password=password)
new_user = User.objects.get(username=username) new_user = User.objects.get(username=username)
MActivity.new_signup(user_id=new_user.pk) MActivity.new_signup(user_id=new_user.pk)
RNewUserQueue.add_user(new_user.pk) RNewUserQueue.add_user(new_user.pk)
if new_user.email: if new_user.email:
EmailNewUser.delay(user_id=new_user.pk) EmailNewUser.delay(user_id=new_user.pk)
if getattr(settings, 'AUTO_PREMIUM_NEW_USERS', False): if getattr(settings, "AUTO_PREMIUM_NEW_USERS", False):
new_user.profile.activate_premium() new_user.profile.activate_premium()
elif getattr(settings, 'AUTO_ENABLE_NEW_USERS', False): elif getattr(settings, "AUTO_ENABLE_NEW_USERS", False):
new_user.profile.activate_free() new_user.profile.activate_free()
return new_user return new_user
class FeatureForm(forms.Form): class FeatureForm(forms.Form):
use_required_attribute = False use_required_attribute = False
description = forms.CharField(required=True) description = forms.CharField(required=True)
def save(self): def save(self):
feature = Feature(description=self.cleaned_data['description'], feature = Feature(
date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1)) description=self.cleaned_data["description"],
date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1),
)
feature.save() feature.save()
return feature return feature

View file

@ -1,8 +1,9 @@
from django.shortcuts import render from django.shortcuts import render
def respond(request, template_name, context_dict, **kwargs): def respond(request, template_name, context_dict, **kwargs):
""" """
Use this function rather than render_to_response directly. The idea is to ensure Use this function rather than render_to_response directly. The idea is to ensure
that we're always using RequestContext. It's too easy to forget. that we're always using RequestContext. It's too easy to forget.
""" """
return render(request, template_name, context_dict, **kwargs) return render(request, template_name, context_dict, **kwargs)

View file

@ -4,32 +4,36 @@ from django.contrib.auth.models import User
from apps.rss_feeds.models import DuplicateFeed from apps.rss_feeds.models import DuplicateFeed
from utils import log as logging from utils import log as logging
class UserSubscriptionManager(models.Manager): class UserSubscriptionManager(models.Manager):
def get(self, *args, **kwargs): def get(self, *args, **kwargs):
try: try:
return super(UserSubscriptionManager, self).get(*args, **kwargs) return super(UserSubscriptionManager, self).get(*args, **kwargs)
except self.model.DoesNotExist as exception: except self.model.DoesNotExist as exception:
if isinstance(kwargs.get('feed'), int): if isinstance(kwargs.get("feed"), int):
feed_id = kwargs.get('feed') feed_id = kwargs.get("feed")
elif 'feed' in kwargs: elif "feed" in kwargs:
feed_id = kwargs['feed'].pk feed_id = kwargs["feed"].pk
elif 'feed__pk' in kwargs: elif "feed__pk" in kwargs:
feed_id = kwargs['feed__pk'] feed_id = kwargs["feed__pk"]
elif 'feed_id' in kwargs: elif "feed_id" in kwargs:
feed_id = kwargs['feed_id'] feed_id = kwargs["feed_id"]
dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if dupe_feed: if dupe_feed:
feed = dupe_feed[0].feed feed = dupe_feed[0].feed
if 'feed' in kwargs: if "feed" in kwargs:
kwargs['feed'] = feed kwargs["feed"] = feed
elif 'feed__pk' in kwargs: elif "feed__pk" in kwargs:
kwargs['feed__pk'] = feed.pk kwargs["feed__pk"] = feed.pk
elif 'feed_id' in kwargs: elif "feed_id" in kwargs:
kwargs['feed_id'] = feed.pk kwargs["feed_id"] = feed.pk
user = kwargs.get('user') user = kwargs.get("user")
if isinstance(user, int): if isinstance(user, int):
user = User.objects.get(pk=user) user = User.objects.get(pk=user)
logging.debug(" ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" % (user and user.username, feed, feed_id)) logging.debug(
" ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)"
% (user and user.username, feed, feed_id)
)
return super(UserSubscriptionManager, self).get(*args, **kwargs) return super(UserSubscriptionManager, self).get(*args, **kwargs)
else: else:
raise exception raise exception

View file

@ -8,60 +8,87 @@ import django.db.models.deletion
class Migration(migrations.Migration): class Migration(migrations.Migration):
initial = True initial = True
dependencies = [ dependencies = [
('rss_feeds', '0001_initial'), ("rss_feeds", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL), migrations.swappable_dependency(settings.AUTH_USER_MODEL),
] ]
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
name='Feature', name="Feature",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('description', models.TextField(default='')), "id",
('date', models.DateTimeField(default=datetime.datetime.now)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
),
("description", models.TextField(default="")),
("date", models.DateTimeField(default=datetime.datetime.now)),
], ],
options={ options={
'ordering': ['-date'], "ordering": ["-date"],
}, },
), ),
migrations.CreateModel( migrations.CreateModel(
name='UserSubscription', name="UserSubscription",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('user_title', models.CharField(blank=True, max_length=255, null=True)), "id",
('active', models.BooleanField(default=False)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
('last_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), ),
('mark_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), ("user_title", models.CharField(blank=True, max_length=255, null=True)),
('unread_count_neutral', models.IntegerField(default=0)), ("active", models.BooleanField(default=False)),
('unread_count_positive', models.IntegerField(default=0)), ("last_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)),
('unread_count_negative', models.IntegerField(default=0)), ("mark_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)),
('unread_count_updated', models.DateTimeField(default=datetime.datetime.now)), ("unread_count_neutral", models.IntegerField(default=0)),
('oldest_unread_story_date', models.DateTimeField(default=datetime.datetime.now)), ("unread_count_positive", models.IntegerField(default=0)),
('needs_unread_recalc', models.BooleanField(default=False)), ("unread_count_negative", models.IntegerField(default=0)),
('feed_opens', models.IntegerField(default=0)), ("unread_count_updated", models.DateTimeField(default=datetime.datetime.now)),
('is_trained', models.BooleanField(default=False)), ("oldest_unread_story_date", models.DateTimeField(default=datetime.datetime.now)),
('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscribers', to='rss_feeds.Feed')), ("needs_unread_recalc", models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL)), ("feed_opens", models.IntegerField(default=0)),
("is_trained", models.BooleanField(default=False)),
(
"feed",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscribers",
to="rss_feeds.Feed",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscriptions",
to=settings.AUTH_USER_MODEL,
),
),
], ],
), ),
migrations.CreateModel( migrations.CreateModel(
name='UserSubscriptionFolders', name="UserSubscriptionFolders",
fields=[ fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), (
('folders', models.TextField(default='[]')), "id",
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
),
("folders", models.TextField(default="[]")),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
], ],
options={ options={
'verbose_name': 'folder', "verbose_name": "folder",
'verbose_name_plural': 'folders', "verbose_name_plural": "folders",
}, },
), ),
migrations.AlterUniqueTogether( migrations.AlterUniqueTogether(
name='usersubscription', name="usersubscription",
unique_together={('user', 'feed')}, unique_together={("user", "feed")},
), ),
] ]

File diff suppressed because it is too large Load diff

View file

@ -6,13 +6,14 @@ from django.conf import settings
from apps.reader.models import UserSubscription from apps.reader.models import UserSubscription
from apps.social.models import MSocialSubscription from apps.social.models import MSocialSubscription
@app.task(name='freshen-homepage')
@app.task(name="freshen-homepage")
def FreshenHomepage(): def FreshenHomepage():
day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1)
user = User.objects.get(username=settings.HOMEPAGE_USERNAME) user = User.objects.get(username=settings.HOMEPAGE_USERNAME)
user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.last_seen_on = datetime.datetime.utcnow()
user.profile.save() user.profile.save()
usersubs = UserSubscription.objects.filter(user=user) usersubs = UserSubscription.objects.filter(user=user)
logging.debug(" ---> %s has %s feeds, freshening..." % (user.username, usersubs.count())) logging.debug(" ---> %s has %s feeds, freshening..." % (user.username, usersubs.count()))
for sub in usersubs: for sub in usersubs:
@ -20,7 +21,7 @@ def FreshenHomepage():
sub.needs_unread_recalc = True sub.needs_unread_recalc = True
sub.save() sub.save()
sub.calculate_feed_scores(silent=True) sub.calculate_feed_scores(silent=True)
socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) socialsubs = MSocialSubscription.objects.filter(user_id=user.pk)
logging.debug(" ---> %s has %s socialsubs, freshening..." % (user.username, socialsubs.count())) logging.debug(" ---> %s has %s socialsubs, freshening..." % (user.username, socialsubs.count()))
for sub in socialsubs: for sub in socialsubs:
@ -29,12 +30,16 @@ def FreshenHomepage():
sub.save() sub.save()
sub.calculate_feed_scores(silent=True) sub.calculate_feed_scores(silent=True)
@app.task(name='clean-analytics', time_limit=720*10)
@app.task(name="clean-analytics", time_limit=720 * 10)
def CleanAnalytics(): def CleanAnalytics():
logging.debug(" ---> Cleaning analytics... %s feed fetches" % ( logging.debug(
settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(), " ---> Cleaning analytics... %s feed fetches"
)) % (settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(),)
)
day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1)
settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many({ settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many(
"date": {"$lt": day_ago}, {
}) "date": {"$lt": day_ago},
}
)

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show more