diff --git a/ansible/roles/consul/tasks/get_consul_manager_ip.py b/ansible/roles/consul/tasks/get_consul_manager_ip.py index e98eb6b3f..659d06a47 100755 --- a/ansible/roles/consul/tasks/get_consul_manager_ip.py +++ b/ansible/roles/consul/tasks/get_consul_manager_ip.py @@ -14,24 +14,31 @@ def get_host_ips_from_group(group_name): :param inventory_base_path: Base path to the inventory directories. Defaults to the path in ansible.cfg. :return: A list of IP addresses belonging to the specified group. """ - cmd = ['ansible-inventory', '-i', '/srv/newsblur/ansible/inventories/hetzner.ini', '-i', '/srv/newsblur/ansible/inventories/hetzner.yml', '--list'] - + cmd = [ + "ansible-inventory", + "-i", + "/srv/newsblur/ansible/inventories/hetzner.ini", + "-i", + "/srv/newsblur/ansible/inventories/hetzner.yml", + "--list", + ] + try: # Execute the ansible-inventory command result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True) - + # Parse the JSON output from ansible-inventory inventory_data = json.loads(result.stdout) - + host_ips = [] # Check if the group exists if group_name in inventory_data: # Get the list of hosts in the specified group - if 'hosts' in inventory_data[group_name]: - for host in inventory_data[group_name]['hosts']: + if "hosts" in inventory_data[group_name]: + for host in inventory_data[group_name]["hosts"]: # Fetch the host details, specifically looking for the ansible_host variable for the IP - host_vars = inventory_data['_meta']['hostvars'][host] - ip_address = host_vars.get('ansible_host', None) + host_vars = inventory_data["_meta"]["hostvars"][host] + ip_address = host_vars.get("ansible_host", None) if ip_address: host_ips.append(ip_address) else: @@ -50,16 +57,19 @@ TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" with open(TOKEN_FILE) as f: token = f.read().strip() - os.environ['DO_API_TOKEN'] = token + os.environ["DO_API_TOKEN"] = token manager = digitalocean.Manager(token=token) my_droplets = manager.get_all_droplets() consul_manager_droplets = [d for d in my_droplets if "db-consul" in d.name] # Use ansible-inventory to get the consul-manager ip -group_name = 'hconsul' +group_name = "hconsul" hetzner_hosts = get_host_ips_from_group(group_name) -consul_manager_ip_address = ','.join([f"\"{droplet.ip_address}\"" for droplet in consul_manager_droplets] + [f"\"{host}\"" for host in hetzner_hosts]) +consul_manager_ip_address = ",".join( + [f'"{droplet.ip_address}"' for droplet in consul_manager_droplets] + + [f'"{host}"' for host in hetzner_hosts] +) print(consul_manager_ip_address) diff --git a/ansible/roles/postgres-exporter/tasks/get_credentials.py b/ansible/roles/postgres-exporter/tasks/get_credentials.py index 862c46d89..85fa38df8 100755 --- a/ansible/roles/postgres-exporter/tasks/get_credentials.py +++ b/ansible/roles/postgres-exporter/tasks/get_credentials.py @@ -1,12 +1,13 @@ #!/srv/newsblur/venv/newsblur3/bin/python import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") from newsblur_web import settings -username = settings.DATABASES['default']['USER'] -password = settings.DATABASES['default']['PASSWORD'] +username = settings.DATABASES["default"]["USER"] +password = settings.DATABASES["default"]["PASSWORD"] -if sys.argv[1] =='postgres_credentials': +if sys.argv[1] == "postgres_credentials": print(f"{username}:{password}") -if sys.argv[1] =='s3_bucket': - print(settings.S3_BACKUP_BUCKET) \ No newline at end of file +if sys.argv[1] == "s3_bucket": + print(settings.S3_BACKUP_BUCKET) diff --git a/ansible/utils/check_droplet.py b/ansible/utils/check_droplet.py index b231777aa..1abcc0d43 100644 --- a/ansible/utils/check_droplet.py +++ b/ansible/utils/check_droplet.py @@ -3,6 +3,7 @@ import time import digitalocean import subprocess + def test_ssh(drop): droplet_ip_address = drop.ip_address result = subprocess.call(f"ssh -o StrictHostKeyChecking=no root@{droplet_ip_address} ls", shell=True) @@ -10,6 +11,7 @@ def test_ssh(drop): return True return False + TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" droplet_name = sys.argv[1] @@ -25,7 +27,7 @@ ssh_works = False while not ssh_works: if timer > timeout: raise Exception(f"The {droplet_name} droplet was not created.") - + droplets = [drop for drop in manager.get_all_droplets() if drop.name == droplet_name] if droplets: droplet = droplets[0] @@ -33,4 +35,4 @@ while not ssh_works: ssh_works = test_ssh(droplet) time.sleep(3) timer += 3 -print("Success!") \ No newline at end of file +print("Success!") diff --git a/ansible/utils/generate_inventory.py b/ansible/utils/generate_inventory.py index d9e26f4a3..6fac02e79 100755 --- a/ansible/utils/generate_inventory.py +++ b/ansible/utils/generate_inventory.py @@ -8,7 +8,7 @@ import digitalocean OLD = False # Set env var OLD=1 to use existing servers -if os.environ.get('OLD', False): +if os.environ.get("OLD", False): OLD = True if OLD: @@ -17,7 +17,7 @@ else: TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" try: - api_token = open(TOKEN_FILE, 'r').read().strip() + api_token = open(TOKEN_FILE, "r").read().strip() except IOError: print(f" ---> Missing Digital Ocean API token: {TOKEN_FILE}") exit() @@ -25,20 +25,20 @@ except IOError: outfile = f"/srv/newsblur/ansible/inventories/digital_ocean{'.old' if OLD else ''}.ini" # Install from https://github.com/do-community/do-ansible-inventory/releases -ansible_inventory_cmd = f'do-ansible-inventory -t {api_token} --out {outfile}' +ansible_inventory_cmd = f"do-ansible-inventory -t {api_token} --out {outfile}" subprocess.call(ansible_inventory_cmd, shell=True) -with open(outfile, 'r') as original: +with open(outfile, "r") as original: data = original.read() -with open(outfile, 'w') as modified: +with open(outfile, "w") as modified: modified.write("127.0.0.1 ansible_connection=local\n" + data) -exit() # Too many requests if we run the below code +exit() # Too many requests if we run the below code do = digitalocean.Manager(token=api_token) droplets = do.get_all_droplets() -print("\n ---> Checking droplets: %s\n" % (' '.join([d.name for d in droplets]))) +print("\n ---> Checking droplets: %s\n" % (" ".join([d.name for d in droplets]))) def check_droplets_created(): @@ -46,8 +46,8 @@ def check_droplets_created(): droplets = do.get_all_droplets() for instance in droplets: - if instance.status == 'new': - print(".", end=' ') + if instance.status == "new": + print(".", end=" ") sys.stdout.flush() i += 1 time.sleep(i) @@ -56,6 +56,7 @@ def check_droplets_created(): print(" ---> All booted!") return True + i = 0 while True: if check_droplets_created(): diff --git a/api/newsblur.py b/api/newsblur.py index acdd2e437..f417fe889 100644 --- a/api/newsblur.py +++ b/api/newsblur.py @@ -13,342 +13,318 @@ API_URL = "https://www.newsblur.com/" # API_URL = "https://nb.local.host:8000/" -class request(): - +class request: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar())) - - def __init__(self, endpoint=None, method='get'): + + def __init__(self, endpoint=None, method="get"): self.endpoint = endpoint self.method = method def __call__(self, func): def wrapped(*args, **kwargs): params = func(*args, **kwargs) or {} - url = self.endpoint if self.endpoint else params.pop('url') + url = self.endpoint if self.endpoint else params.pop("url") params = urllib.parse.urlencode(params) - url = "%s%s" % (API_URL, url) - + url = "%s%s" % (API_URL, url) + response = self.opener.open(url, params).read() - + return json.loads(response) + return wrapped + class API: - - @request('api/login', method='post') + @request("api/login", method="post") def login(self, username, password): - ''' + """ Login as an existing user. - If a user has no password set, you cannot just send any old password. + If a user has no password set, you cannot just send any old password. Required parameters, username and password, must be of string type. - ''' - return { - 'username': username, - 'password': password - } + """ + return {"username": username, "password": password} - @request('api/logout') + @request("api/logout") def logout(self): - ''' + """ Logout the currently logged in user. - ''' + """ return - @request('api/signup') + @request("api/signup") def signup(self, username, password, email): - ''' + """ Create a new user. All three required parameters must be of type string. - ''' - return { - 'signup_username': username, - 'signup_password': password, - 'signup_email': email - } + """ + return {"signup_username": username, "signup_password": password, "signup_email": email} - @request('rss_feeds/search_feed') + @request("rss_feeds/search_feed") def search_feed(self, address, offset=0): - ''' + """ Retrieve information about a feed from its website or RSS address. Parameter address must be of type string while parameter offset must be an integer. Will return a feed. - ''' - return { - 'address': address, - 'offset': offset - } + """ + return {"address": address, "offset": offset} - @request('reader/feeds') + @request("reader/feeds") def feeds(self, include_favicons=True, flat=False): - ''' + """ Retrieve a list of feeds to which a user is actively subscribed. Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons. - ''' - return { - 'include_favicons': include_favicons, - 'flat': flat - } + """ + return {"include_favicons": include_favicons, "flat": flat} - @request('reader/favicons') + @request("reader/favicons") def favicons(self, feeds=None): - ''' - Retrieve a list of favicons for a list of feeds. - Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. - Useful for mobile devices, but requires a second request. - ''' + """ + Retrieve a list of favicons for a list of feeds. + Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. + Useful for mobile devices, but requires a second request. + """ data = [] for feed in feeds: - data.append( ("feeds", feed) ) + data.append(("feeds", feed)) return data @request() def page(self, feed_id): - ''' + """ Retrieve the original page from a single feed. - ''' - return { - 'url': 'reader/page/%s' % feed_id - } + """ + return {"url": "reader/page/%s" % feed_id} @request() def feed(self, feed_id, page=1): - ''' + """ Retrieve the stories from a single feed. - ''' + """ return { - 'url': 'reader/feed/%s' % feed_id, - 'page': page, + "url": "reader/feed/%s" % feed_id, + "page": page, } - @request('reader/refresh_feeds') + @request("reader/refresh_feeds") def refresh_feeds(self): - ''' + """ Up-to-the-second unread counts for each active feed. Poll for these counts no more than once a minute. - ''' + """ return - @request('reader/feeds_trainer') + @request("reader/feeds_trainer") def feeds_trainer(self, feed_id=None): - ''' - Retrieves all popular and known intelligence classifiers. - Also includes user's own classifiers. - ''' + """ + Retrieves all popular and known intelligence classifiers. + Also includes user's own classifiers. + """ return { - 'feed_id': feed_id, + "feed_id": feed_id, } - + @request() def statistics(self, feed_id=None): - ''' + """ If you only want a user's classifiers, use /classifiers/:id. Omit the feed_id to get all classifiers for all subscriptions. - ''' - return { - 'url': 'rss_feeds/statistics/%d' % feed_id - } - - @request('rss_feeds/feed_autocomplete') + """ + return {"url": "rss_feeds/statistics/%d" % feed_id} + + @request("rss_feeds/feed_autocomplete") def feed_autocomplete(self, term): - ''' + """ Get a list of feeds that contain a search phrase. Searches by feed address, feed url, and feed title, in that order. Will only show sites with 2+ subscribers. - ''' - return { - 'term': term - } + """ + return {"term": term} - @request('reader/starred_stories') + @request("reader/starred_stories") def starred_stories(self, page=1): - ''' + """ Retrieve a user's starred stories. - ''' + """ return { - 'page': page, + "page": page, } - @request('reader/river_stories') + @request("reader/river_stories") def river_stories(self, feeds, page=1, read_stories_count=0): - ''' + """ Retrieve stories from a collection of feeds. This is known as the River of News. Stories are ordered in reverse chronological order. `read_stories_count` is the number of stories that have been read in this continuation, so NewsBlur can efficiently skip those stories when retrieving new stories. Takes an array of feed ids. - ''' - - data = [ ('page', page), ('read_stories_count', read_stories_count) ] + """ + + data = [("page", page), ("read_stories_count", read_stories_count)] for feed in feeds: - data.append( ("feeds", feed) ) + data.append(("feeds", feed)) return data - - @request('reader/mark_story_hashes_as_read') + + @request("reader/mark_story_hashes_as_read") def mark_story_hashes_as_read(self, story_hashes): - ''' - Mark stories as read using their unique story_hash. - ''' + """ + Mark stories as read using their unique story_hash. + """ data = [] for hash in story_hashes: - data.append( ("story_hash", hash) ) + data.append(("story_hash", hash)) return data - @request('reader/mark_story_as_read') + @request("reader/mark_story_as_read") def mark_story_as_read(self, feed_id, story_ids): - ''' - Mark stories as read. - Multiple story ids can be sent at once. - Each story must be from the same feed. - Takes an array of story ids. - ''' - - data = [ ('feed_id', feed_id) ] + """ + Mark stories as read. + Multiple story ids can be sent at once. + Each story must be from the same feed. + Takes an array of story ids. + """ + + data = [("feed_id", feed_id)] for story_id in story_ids: - data.append( ("story_id", story_id) ) + data.append(("story_id", story_id)) return data - @request('reader/mark_story_as_starred') + @request("reader/mark_story_as_starred") def mark_story_as_starred(self, feed_id, story_id): - ''' + """ Mark a story as starred (saved). - ''' + """ return { - 'feed_id': feed_id, - 'story_id': story_id, + "feed_id": feed_id, + "story_id": story_id, } - @request('reader/mark_all_as_read') + @request("reader/mark_all_as_read") def mark_all_as_read(self, days=0): - ''' + """ Mark all stories in a feed or list of feeds as read. - ''' + """ return { - 'days': days, + "days": days, } - @request('reader/add_url') - def add_url(self, url, folder=''): - ''' - Add a feed by its URL. + @request("reader/add_url") + def add_url(self, url, folder=""): + """ + Add a feed by its URL. Can be either the RSS feed or the website itself. - ''' + """ return { - 'url': url, - 'folder': folder, + "url": url, + "folder": folder, } - @request('reader/add_folder') - def add_folder(self, folder, parent_folder=''): - ''' + @request("reader/add_folder") + def add_folder(self, folder, parent_folder=""): + """ Add a new folder. - ''' + """ return { - 'folder': folder, - 'parent_folder': parent_folder, + "folder": folder, + "parent_folder": parent_folder, } - - @request('reader/rename_feed') + + @request("reader/rename_feed") def rename_feed(self, feed_id, feed_title): - ''' + """ Rename a feed title. Only the current user will see the new title. - ''' + """ return { - 'feed_id': feed_id, - 'feed_title': feed_title, + "feed_id": feed_id, + "feed_title": feed_title, } - - @request('reader/delete_feed') + + @request("reader/delete_feed") def delete_feed(self, feed_id, in_folder): - ''' + """ Unsubscribe from a feed. Removes it from the folder. - Set the in_folder parameter to remove a feed from the correct + Set the in_folder parameter to remove a feed from the correct folder, in case the user is subscribed to the feed in multiple folders. - ''' + """ return { - 'feed_id': feed_id, - 'in_folder': in_folder, + "feed_id": feed_id, + "in_folder": in_folder, } - - @request('reader/rename_folder') + + @request("reader/rename_folder") def rename_folder(self, folder_to_rename, new_folder_name, in_folder): - ''' + """ Rename a folder. - ''' + """ return { - 'folder_to_rename': folder_to_rename, - 'new_folder_name': new_folder_name, - 'in_folder': in_folder, + "folder_to_rename": folder_to_rename, + "new_folder_name": new_folder_name, + "in_folder": in_folder, } - - @request('reader/delete_folder') + + @request("reader/delete_folder") def delete_folder(self, folder_to_delete, in_folder): - ''' + """ Delete a folder and unsubscribe from all feeds inside. - ''' + """ return { - 'folder_to_delete': folder_to_delete, - 'in_folder': in_folder, + "folder_to_delete": folder_to_delete, + "in_folder": in_folder, } - - @request('reader/mark_feed_as_read') + + @request("reader/mark_feed_as_read") def mark_feed_as_read(self, feed_ids): - ''' + """ Mark a list of feeds as read. Takes an array of feeds. - ''' + """ data = [] for feed in feed_ids: - data.append( ("feed_id", feed) ) + data.append(("feed_id", feed)) return data - @request('reader/save_feed_order') + @request("reader/save_feed_order") def save_feed_order(self, folders): - ''' + """ Reorder feeds and move them around between folders. The entire folder structure needs to be serialized. - ''' + """ return { - 'folders': folders, + "folders": folders, } @request() def classifier(self, feed_id): - ''' - Get the intelligence classifiers for a user's site. - Only includes the user's own classifiers. - Use /reader/feeds_trainer for popular classifiers. - ''' + """ + Get the intelligence classifiers for a user's site. + Only includes the user's own classifiers. + Use /reader/feeds_trainer for popular classifiers. + """ return { - 'url': '/classifier/%d' % feed_id, + "url": "/classifier/%d" % feed_id, } - @request('classifier/save') + @request("classifier/save") def classifier_save(self, like_type, dislike_type, remove_like_type, remove_dislike_type): - ''' + """ Save intelligence classifiers (tags, titles, authors, and the feed) for a feed. - + TODO: Make this usable. - ''' + """ raise NotImplemented - - @request('import/opml_export') + @request("import/opml_export") def opml_export(self): - ''' + """ Download a backup of feeds and folders as an OPML file. Contains folders and feeds in XML; useful for importing in another RSS reader. - ''' + """ return - - @request('import/opml_upload') - def opml_upload(self, opml_file): - ''' - Upload an OPML file. - ''' - f = open(opml_file) - return { - 'file': f - } - + @request("import/opml_upload") + def opml_upload(self, opml_file): + """ + Upload an OPML file. + """ + f = open(opml_file) + return {"file": f} diff --git a/apps/analyzer/classifier.py b/apps/analyzer/classifier.py index 686d3720b..360472029 100644 --- a/apps/analyzer/classifier.py +++ b/apps/analyzer/classifier.py @@ -2,8 +2,8 @@ from apps.analyzer.models import Category, FeatureCategory from django.db.models.aggregates import Sum import math + class Classifier: - def __init__(self, user, feed, phrases): self.user = user self.feed = feed @@ -11,7 +11,7 @@ class Classifier: def get_features(self, doc): found = {} - + for phrase in self.phrases: if phrase in doc: if phrase in found: @@ -20,36 +20,40 @@ class Classifier: found[phrase] = 1 return found - + def increment_feature(self, feature, category): - count = self.feature_count(feature,category) - if count==0: + count = self.feature_count(feature, category) + if count == 0: fc = FeatureCategory(user=self.user, feed=self.feed, feature=feature, category=category, count=1) fc.save() else: - fc = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) + fc = FeatureCategory.objects.get( + user=self.user, feed=self.feed, feature=feature, category=category + ) fc.count = count + 1 fc.save() - + def feature_count(self, feature, category): if isinstance(category, Category): category = category.category - + try: - feature_count = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) + feature_count = FeatureCategory.objects.get( + user=self.user, feed=self.feed, feature=feature, category=category + ) except FeatureCategory.DoesNotExist: return 0 else: return float(feature_count.count) - def increment_category(self,category): + def increment_category(self, category): count = self.category_count(category) - if count==0: + if count == 0: category = Category(user=self.user, feed=self.feed, category=category, count=1) category.save() else: category = Category.objects.get(user=self.user, feed=self.feed, category=category) - category.count = count+1 + category.count = count + 1 category.save() def category_count(self, category): @@ -68,12 +72,12 @@ class Classifier: return categories def totalcount(self): - categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum('count')) - return categories['sum'] + categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum("count")) + return categories["sum"] def train(self, item, category): features = self.get_features(item) - + # Increment the count for every feature with this category for feature in features: self.increment_feature(feature, category) @@ -84,7 +88,7 @@ class Classifier: def feature_probability(self, feature, category): if self.category_count(category) == 0: return 0 - # The total number of times this feature appeared in this + # The total number of times this feature appeared in this # category divided by the total number of items in this category return self.feature_count(feature, category) / self.category_count(category) @@ -96,21 +100,20 @@ class Classifier: totals = sum([self.feature_count(feature, c) for c in self.categories()]) # Calculate the weighted average - bp = ((weight*ap) + (totals*basic_prob)) / (weight+totals) + bp = ((weight * ap) + (totals * basic_prob)) / (weight + totals) print(feature, category, basic_prob, totals, bp) return bp class FisherClassifier(Classifier): - def __init__(self, user, feed, phrases): Classifier.__init__(self, user, feed, phrases) self.minimums = {} - + def category_probability(self, feature, category): - # The frequency of this feature in this category + # The frequency of this feature in this category clf = self.feature_probability(feature, category) - if clf==0: + if clf == 0: return 0 # The frequency of this feature in all the categories @@ -119,54 +122,53 @@ class FisherClassifier(Classifier): # The probability is the frequency in this category divided by # the overall frequency p = clf / freqsum - + return p - + def fisher_probability(self, item, category): # Multiply all the probabilities together - p = .5 + p = 0.5 features = self.get_features(item) if features: p = 1 - + for feature in features: - p *= (self.weighted_probability(feature, category, self.category_probability)) + p *= self.weighted_probability(feature, category, self.category_probability) # Take the natural log and multiply by -2 - fscore = -2*math.log(p) + fscore = -2 * math.log(p) # Use the inverse chi2 function to get a probability - return self.invchi2(fscore,len(features)*2) - + return self.invchi2(fscore, len(features) * 2) + def invchi2(self, chi, df): m = chi / 2.0 sum = term = math.exp(-m) - for i in range(1, df//2): + for i in range(1, df // 2): term *= m / i sum += term return min(sum, 1.0) - def setminimum(self, category, min): self.minimums[category] = min - + def getminimum(self, category): if category not in self.minimums: return 0 return self.minimums[category] - - def classify(self,item,default=None): + + def classify(self, item, default=None): # Loop through looking for the best result best = default max = 0.0 print(self.categories(), item) for category in self.categories(): - p=self.fisher_probability(item, category) + p = self.fisher_probability(item, category) # Make sure it exceeds its minimum if p > self.getminimum(category) and p > max: best = category max = p - - return best \ No newline at end of file + + return best diff --git a/apps/analyzer/feed_filter.py b/apps/analyzer/feed_filter.py index b900ec989..7b3a6bbcf 100644 --- a/apps/analyzer/feed_filter.py +++ b/apps/analyzer/feed_filter.py @@ -6,36 +6,38 @@ import datetime import re import math + def entry_features(self, entry): - splitter=re.compile('\\W*') - f={} + splitter = re.compile("\\W*") + f = {} # Extract the title words and annotate - titlewords=[s.lower() for s in splitter.split(entry['title']) - if len(s)>2 and len(s)<20] - - for w in titlewords: f['Title:'+w]=1 + titlewords = [s.lower() for s in splitter.split(entry["title"]) if len(s) > 2 and len(s) < 20] + + for w in titlewords: + f["Title:" + w] = 1 # Extract the summary words - summarywords=[s.lower() for s in splitter.split(entry['summary']) - if len(s)>2 and len(s)<20] + summarywords = [s.lower() for s in splitter.split(entry["summary"]) if len(s) > 2 and len(s) < 20] # Count uppercase words - uc=0 + uc = 0 for i in range(len(summarywords)): - w=summarywords[i] - f[w]=1 - if w.isupper(): uc+=1 + w = summarywords[i] + f[w] = 1 + if w.isupper(): + uc += 1 # Get word pairs in summary as features - if i0.3: f['UPPERCASE']=1 + # UPPERCASE is a virtual word flagging too much shouting + if float(uc) / len(summarywords) > 0.3: + f["UPPERCASE"] = 1 return f diff --git a/apps/analyzer/forms.py b/apps/analyzer/forms.py index 5377f3b0c..6dc21e22e 100644 --- a/apps/analyzer/forms.py +++ b/apps/analyzer/forms.py @@ -8,25 +8,22 @@ from django.contrib.auth.models import User from apps.profile.models import change_password, blank_authenticate, MGiftCode from apps.social.models import MSocialProfile + class PopularityQueryForm(forms.Form): - email = forms.CharField(widget=forms.TextInput(), - label="Your email address", - required=False) - query = forms.CharField(widget=forms.TextInput(), - label="Keywords", - required=False) + email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False) + query = forms.CharField(widget=forms.TextInput(), label="Keywords", required=False) def __init__(self, *args, **kwargs): super(PopularityQueryForm, self).__init__(*args, **kwargs) - + def clean_email(self): - if not self.cleaned_data['email']: - raise forms.ValidationError('Please enter in an email address.') + if not self.cleaned_data["email"]: + raise forms.ValidationError("Please enter in an email address.") + + return self.cleaned_data["email"] - return self.cleaned_data['email'] - def clean_query(self): - if not self.cleaned_data['query']: - raise forms.ValidationError('Please enter in a keyword search query.') + if not self.cleaned_data["query"]: + raise forms.ValidationError("Please enter in a keyword search query.") - return self.cleaned_data['query'] + return self.cleaned_data["query"] diff --git a/apps/analyzer/lda.py b/apps/analyzer/lda.py index b354d1714..2d9d94f4b 100644 --- a/apps/analyzer/lda.py +++ b/apps/analyzer/lda.py @@ -9,226 +9,234 @@ from nltk import FreqDist def lgammln(xx): - """ - Returns the gamma function of xx. - Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. - (Adapted from: Numerical Recipies in C.) - - Usage: lgammln(xx) - - Copied from stats.py by strang@nmr.mgh.harvard.edu - """ + """ + Returns the gamma function of xx. + Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. + (Adapted from: Numerical Recipies in C.) + + Usage: lgammln(xx) + + Copied from stats.py by strang@nmr.mgh.harvard.edu + """ + + coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, 0.120858003e-2, -0.536382e-5] + x = xx - 1.0 + tmp = x + 5.5 + tmp = tmp - (x + 0.5) * log(tmp) + ser = 1.0 + for j in range(len(coeff)): + x = x + 1 + ser = ser + coeff[j] / x + return -tmp + log(2.50662827465 * ser) - coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, - 0.120858003e-2, -0.536382e-5] - x = xx - 1.0 - tmp = x + 5.5 - tmp = tmp - (x+0.5)*log(tmp) - ser = 1.0 - for j in range(len(coeff)): - x = x + 1 - ser = ser + coeff[j]/x - return -tmp + log(2.50662827465*ser) def log_sum(log_a, log_b): - if log_a < log_b: - return log_b + log(1 + exp(log_a - log_b)) - else: - return log_a + log(1 + exp(log_b - log_a)) + if log_a < log_b: + return log_b + log(1 + exp(log_a - log_b)) + else: + return log_a + log(1 + exp(log_b - log_a)) + def log_normalize(dist): - normalizer = reduce(log_sum, dist) - for ii in xrange(len(dist)): - dist[ii] -= normalizer - return dist + normalizer = reduce(log_sum, dist) + for ii in xrange(len(dist)): + dist[ii] -= normalizer + return dist + def log_sample(dist): - """ - Sample a key from a dictionary using the values as probabilities (unnormalized) - """ - cutoff = random() - dist = log_normalize(dist) - #print "Normalizer: ", normalizer + """ + Sample a key from a dictionary using the values as probabilities (unnormalized) + """ + cutoff = random() + dist = log_normalize(dist) + # print "Normalizer: ", normalizer + + current = 0 + for ii in xrange(len(dist)): + current += exp(dist[ii]) + if current >= cutoff: + # print "Chose", i + return ii + assert False, "Didn't choose anything: %f %f" % (cutoff, current) - current = 0 - for ii in xrange(len(dist)): - current += exp(dist[ii]) - if current >= cutoff: - #print "Chose", i - return ii - assert False, "Didn't choose anything: %f %f" % (cutoff, current) def create_data(stories, lang="english", doc_limit=-1, delimiter=""): - from nltk.tokenize.treebank import TreebankWordTokenizer - tokenizer = TreebankWordTokenizer() + from nltk.tokenize.treebank import TreebankWordTokenizer - from nltk.corpus import stopwords - stop = stopwords.words('english') - - from string import ascii_lowercase - - docs = {} - print("Found %i stories" % stories.count()) - for story in stories: - text = zlib.decompress(story.story_content_z) - # text = story.story_title - text = ''.join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower() - if delimiter: - sections = text.split(delimiter) - else: - sections = [text] - - if doc_limit > 0 and len(docs) > doc_limit: - print("Passed doc limit %i" % len(docs)) - break - print(story.story_title, len(sections)) + tokenizer = TreebankWordTokenizer() + + from nltk.corpus import stopwords + + stop = stopwords.words("english") + + from string import ascii_lowercase + + docs = {} + print("Found %i stories" % stories.count()) + for story in stories: + text = zlib.decompress(story.story_content_z) + # text = story.story_title + text = "".join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower() + if delimiter: + sections = text.split(delimiter) + else: + sections = [text] + + if doc_limit > 0 and len(docs) > doc_limit: + print("Passed doc limit %i" % len(docs)) + break + print(story.story_title, len(sections)) + + for jj in xrange(len(sections)): + docs["%s-%i" % (story.story_title, jj)] = [ + x + for x in tokenizer.tokenize(sections[jj]) + if (not x in stop) and (min(y in ascii_lowercase for y in x)) + ] + return docs - for jj in xrange(len(sections)): - docs["%s-%i" % (story.story_title, jj)] = [x for x in tokenizer.tokenize(sections[jj]) \ - if (not x in stop) and \ - (min(y in ascii_lowercase for y in x))] - return docs class LdaSampler: - def __init__(self, num_topics, doc_smoothing = 0.1, topic_smoothing = 0.01): - self._docs = defaultdict(FreqDist) - self._topics = defaultdict(FreqDist) - self._K = num_topics - self._state = None + def __init__(self, num_topics, doc_smoothing=0.1, topic_smoothing=0.01): + self._docs = defaultdict(FreqDist) + self._topics = defaultdict(FreqDist) + self._K = num_topics + self._state = None - self._alpha = doc_smoothing - self._lambda = topic_smoothing + self._alpha = doc_smoothing + self._lambda = topic_smoothing - def optimize_hyperparameters(self, samples=5, step = 3.0): - rawParam = [log(self._alpha), log(self._lambda)] + def optimize_hyperparameters(self, samples=5, step=3.0): + rawParam = [log(self._alpha), log(self._lambda)] - for ii in xrange(samples): - lp_old = self.lhood(self._alpha, self._lambda) - lp_new = log(random()) + lp_old - print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda)) + for ii in xrange(samples): + lp_old = self.lhood(self._alpha, self._lambda) + lp_new = log(random()) + lp_old + print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda)) - l = [x - random() * step for x in rawParam] - r = [x + step for x in rawParam] + l = [x - random() * step for x in rawParam] + r = [x + step for x in rawParam] - for jj in xrange(100): - rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))] - trial_alpha, trial_lambda = [exp(x) for x in rawParamNew] - lp_test = self.lhood(trial_alpha, trial_lambda) - #print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda)) + for jj in xrange(100): + rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))] + trial_alpha, trial_lambda = [exp(x) for x in rawParamNew] + lp_test = self.lhood(trial_alpha, trial_lambda) + # print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda)) - if lp_test > lp_new: - print(jj) - self._alpha = exp(rawParamNew[0]) - self._lambda = exp(rawParamNew[1]) - self._alpha_sum = self._alpha * self._K - self._lambda_sum = self._lambda * self._W - rawParam = [log(self._alpha), log(self._lambda)] - break - else: - for dd in xrange(len(rawParamNew)): - if rawParamNew[dd] < rawParam[dd]: - l[dd] = rawParamNew[dd] - else: - r[dd] = rawParamNew[dd] - assert l[dd] <= rawParam[dd] - assert r[dd] >= rawParam[dd] + if lp_test > lp_new: + print(jj) + self._alpha = exp(rawParamNew[0]) + self._lambda = exp(rawParamNew[1]) + self._alpha_sum = self._alpha * self._K + self._lambda_sum = self._lambda * self._W + rawParam = [log(self._alpha), log(self._lambda)] + break + else: + for dd in xrange(len(rawParamNew)): + if rawParamNew[dd] < rawParam[dd]: + l[dd] = rawParamNew[dd] + else: + r[dd] = rawParamNew[dd] + assert l[dd] <= rawParam[dd] + assert r[dd] >= rawParam[dd] - print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda)) + print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda)) - def lhood(self, doc_smoothing, voc_smoothing): - doc_sum = doc_smoothing * self._K - voc_sum = voc_smoothing * self._W + def lhood(self, doc_smoothing, voc_smoothing): + doc_sum = doc_smoothing * self._K + voc_sum = voc_smoothing * self._W - val = 0.0 - val += lgammln(doc_sum) * len(self._docs) - val -= lgammln(doc_smoothing) * self._K * len(self._docs) - for ii in self._docs: - for jj in xrange(self._K): - val += lgammln(doc_smoothing + self._docs[ii][jj]) - val -= lgammln(doc_sum + self._docs[ii].N()) - - val += lgammln(voc_sum) * self._K - val -= lgammln(voc_smoothing) * self._W * self._K - for ii in self._topics: - for jj in self._vocab: - val += lgammln(voc_smoothing + self._topics[ii][jj]) - val -= lgammln(voc_sum + self._topics[ii].N()) - return val + val = 0.0 + val += lgammln(doc_sum) * len(self._docs) + val -= lgammln(doc_smoothing) * self._K * len(self._docs) + for ii in self._docs: + for jj in xrange(self._K): + val += lgammln(doc_smoothing + self._docs[ii][jj]) + val -= lgammln(doc_sum + self._docs[ii].N()) - def initialize(self, data): - """ - Data should be keyed by doc-id, values should be iterable - """ + val += lgammln(voc_sum) * self._K + val -= lgammln(voc_smoothing) * self._W * self._K + for ii in self._topics: + for jj in self._vocab: + val += lgammln(voc_smoothing + self._topics[ii][jj]) + val -= lgammln(voc_sum + self._topics[ii].N()) + return val - self._alpha_sum = self._alpha * self._K - self._state = defaultdict(dict) + def initialize(self, data): + """ + Data should be keyed by doc-id, values should be iterable + """ - self._vocab = set([]) - for dd in data: - for ww in xrange(len(data[dd])): - # Learn all the words we'll see - self._vocab.add(data[dd][ww]) + self._alpha_sum = self._alpha * self._K + self._state = defaultdict(dict) - # Initialize the state to unassigned - self._state[dd][ww] = -1 + self._vocab = set([]) + for dd in data: + for ww in xrange(len(data[dd])): + # Learn all the words we'll see + self._vocab.add(data[dd][ww]) - self._W = len(self._vocab) - self._lambda_sum = float(self._W) * self._lambda + # Initialize the state to unassigned + self._state[dd][ww] = -1 - self._data = data + self._W = len(self._vocab) + self._lambda_sum = float(self._W) * self._lambda - print("Initialized vocab of size %i" % len(self._vocab)) + self._data = data - def prob(self, doc, word, topic): - val = log(self._docs[doc][topic] + self._alpha) - # This is constant across a document, so we don't need to compute this term - # val -= log(self._docs[doc].N() + self._alpha_sum) - - val += log(self._topics[topic][word] + self._lambda) - val -= log(self._topics[topic].N() + self._lambda_sum) + print("Initialized vocab of size %i" % len(self._vocab)) - # print doc, word, topic, self._docs[doc][topic], self._topics[topic][word] - - return val + def prob(self, doc, word, topic): + val = log(self._docs[doc][topic] + self._alpha) + # This is constant across a document, so we don't need to compute this term + # val -= log(self._docs[doc].N() + self._alpha_sum) - def sample_word(self, doc, position): - word = self._data[doc][position] + val += log(self._topics[topic][word] + self._lambda) + val -= log(self._topics[topic].N() + self._lambda_sum) - old_topic = self._state[doc][position] - if old_topic != -1: - self.change_count(doc, word, old_topic, -1) + # print doc, word, topic, self._docs[doc][topic], self._topics[topic][word] - probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)] - new_topic = log_sample(probs) - #print doc, word, new_topic + return val - self.change_count(doc, word, new_topic, 1) - self._state[doc][position] = new_topic + def sample_word(self, doc, position): + word = self._data[doc][position] - def change_count(self, doc, word, topic, delta): - self._docs[doc].inc(topic, delta) - self._topics[topic].inc(word, delta) + old_topic = self._state[doc][position] + if old_topic != -1: + self.change_count(doc, word, old_topic, -1) - def sample(self, iterations = 100, hyper_delay = 10): - assert self._state - for ii in xrange(iterations): - for dd in self._data: - for ww in xrange(len(self._data[dd])): - self.sample_word(dd, ww) - print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda))) - if hyper_delay >= 0 and ii % hyper_delay == 0: - self.optimize_hyperparameters() + probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)] + new_topic = log_sample(probs) + # print doc, word, new_topic - def print_topics(self, num_words=15): - for ii in self._topics: - print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words]))) + self.change_count(doc, word, new_topic, 1) + self._state[doc][position] = new_topic + + def change_count(self, doc, word, topic, delta): + self._docs[doc].inc(topic, delta) + self._topics[topic].inc(word, delta) + + def sample(self, iterations=100, hyper_delay=10): + assert self._state + for ii in xrange(iterations): + for dd in self._data: + for ww in xrange(len(self._data[dd])): + self.sample_word(dd, ww) + print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda))) + if hyper_delay >= 0 and ii % hyper_delay == 0: + self.optimize_hyperparameters() + + def print_topics(self, num_words=15): + for ii in self._topics: + print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words]))) if __name__ == "__main__": - stories = MStory.objects(story_feed_id=199) - d = create_data(stories, doc_limit=250, delimiter="") - lda = LdaSampler(5) - lda.initialize(d) + stories = MStory.objects(story_feed_id=199) + d = create_data(stories, doc_limit=250, delimiter="") + lda = LdaSampler(5) + lda.initialize(d) - lda.sample(50) - lda.print_topics() \ No newline at end of file + lda.sample(50) + lda.print_topics() diff --git a/apps/analyzer/migrations/0001_initial.py b/apps/analyzer/migrations/0001_initial.py index b83bf543f..a8b589883 100644 --- a/apps/analyzer/migrations/0001_initial.py +++ b/apps/analyzer/migrations/0001_initial.py @@ -6,34 +6,49 @@ import django.db.models.deletion class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Category', + name="Category", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('category', models.CharField(max_length=255)), - ('count', models.IntegerField(default=0)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("category", models.CharField(max_length=255)), + ("count", models.IntegerField(default=0)), + ("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='FeatureCategory', + name="FeatureCategory", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feature', models.CharField(max_length=255)), - ('category', models.CharField(max_length=255)), - ('count', models.IntegerField(default=0)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feature", models.CharField(max_length=255)), + ("category", models.CharField(max_length=255)), + ("count", models.IntegerField(default=0)), + ("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/apps/analyzer/models.py b/apps/analyzer/models.py index dc3d01f55..b9eb99b6b 100644 --- a/apps/analyzer/models.py +++ b/apps/analyzer/models.py @@ -10,24 +10,26 @@ from apps.rss_feeds.models import Feed from apps.analyzer.tasks import EmailPopularityQuery from utils import log as logging + class FeatureCategory(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE) feature = models.CharField(max_length=255) category = models.CharField(max_length=255) count = models.IntegerField(default=0) - + def __str__(self): - return '%s - %s (%s)' % (self.feature, self.category, self.count) + return "%s - %s (%s)" % (self.feature, self.category, self.count) + class Category(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE) category = models.CharField(max_length=255) count = models.IntegerField(default=0) - + def __str__(self): - return '%s (%s)' % (self.category, self.count) + return "%s (%s)" % (self.category, self.count) class MPopularityQuery(mongo.Document): @@ -35,55 +37,53 @@ class MPopularityQuery(mongo.Document): query = mongo.StringField() is_emailed = mongo.BooleanField() creation_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'popularity_query', - 'allow_inheritance': False, + "collection": "popularity_query", + "allow_inheritance": False, } - + def __str__(self): - return "%s - \"%s\"" % (self.email, self.query) + return '%s - "%s"' % (self.email, self.query) def queue_email(self): EmailPopularityQuery.delay(pk=str(self.pk)) - + @classmethod def ensure_all_sent(cls, queue=True): - for query in cls.objects.all().order_by('creation_date'): + for query in cls.objects.all().order_by("creation_date"): query.ensure_sent(queue=queue) - + def ensure_sent(self, queue=True): if self.is_emailed: logging.debug(" ---> Already sent %s" % self) return - + if queue: self.queue_email() else: self.send_email() - + def send_email(self, limit=5000): filename = Feed.xls_query_popularity(self.query, limit=limit) xlsx = open(filename, "r") - - params = { - 'query': self.query - } - text = render_to_string('mail/email_popularity_query.txt', params) - html = render_to_string('mail/email_popularity_query.xhtml', params) - subject = "Keyword popularity spreadsheet: \"%s\"" % self.query - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['<%s>' % (self.email)]) + + params = {"query": self.query} + text = render_to_string("mail/email_popularity_query.txt", params) + html = render_to_string("mail/email_popularity_query.xhtml", params) + subject = 'Keyword popularity spreadsheet: "%s"' % self.query + msg = EmailMultiAlternatives( + subject, text, from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, to=["<%s>" % (self.email)] + ) msg.attach_alternative(html, "text/html") - msg.attach(filename, xlsx.read(), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + msg.attach(filename, xlsx.read(), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") msg.send() - + self.is_emailed = True self.save() - + logging.debug(" -> ~BB~FM~SBSent email for popularity query: %s" % self) - + class MClassifierTitle(mongo.Document): user_id = mongo.IntField() @@ -92,68 +92,69 @@ class MClassifierTitle(mongo.Document): title = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_title', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_title", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.title[:30]) - - + + class MClassifierAuthor(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'author')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "author")) feed_id = mongo.IntField() social_user_id = mongo.IntField() author = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_author', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_author", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.author[:30]) + class MClassifierTag(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'tag')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "tag")) feed_id = mongo.IntField() social_user_id = mongo.IntField() tag = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_tag', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_tag", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.tag[:30]) - + class MClassifierFeed(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id")) feed_id = mongo.IntField() social_user_id = mongo.IntField() score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_feed', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_feed", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) if self.feed_id: @@ -161,94 +162,105 @@ class MClassifierFeed(mongo.Document): else: feed = User.objects.get(pk=self.social_user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, feed) - + def compute_story_score(story, classifier_titles, classifier_authors, classifier_tags, classifier_feeds): intelligence = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } score = 0 - score_max = max(intelligence['title'], - intelligence['author'], - intelligence['tags']) - score_min = min(intelligence['title'], - intelligence['author'], - intelligence['tags']) + score_max = max(intelligence["title"], intelligence["author"], intelligence["tags"]) + score_min = min(intelligence["title"], intelligence["author"], intelligence["tags"]) if score_max > 0: score = score_max elif score_min < 0: score = score_min if score == 0: - score = intelligence['feed'] - + score = intelligence["feed"] + return score - + + def apply_classifier_titles(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if classifier.title.lower() in story['story_title'].lower(): + if classifier.title.lower() in story["story_title"].lower(): # print 'Titles: (%s) %s -- %s' % (classifier.title in story['story_title'], classifier.title, story['story_title']) score = classifier.score - if score > 0: return score + if score > 0: + return score return score - + + def apply_classifier_authors(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if story.get('story_authors') and classifier.author == story.get('story_authors'): + if story.get("story_authors") and classifier.author == story.get("story_authors"): # print 'Authors: %s -- %s' % (classifier.author, story['story_authors']) score = classifier.score - if score > 0: return classifier.score + if score > 0: + return classifier.score return score - + + def apply_classifier_tags(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if story['story_tags'] and classifier.tag in story['story_tags']: + if story["story_tags"] and classifier.tag in story["story_tags"]: # print 'Tags: (%s-%s) %s -- %s' % (classifier.tag in story['story_tags'], classifier.score, classifier.tag, story['story_tags']) score = classifier.score - if score > 0: return classifier.score + if score > 0: + return classifier.score return score - + + def apply_classifier_feeds(classifiers, feed, social_user_ids=None): - if not feed and not social_user_ids: return 0 + if not feed and not social_user_ids: + return 0 feed_id = None if feed: feed_id = feed if isinstance(feed, int) else feed.pk - + if social_user_ids and not isinstance(social_user_ids, list): social_user_ids = [social_user_ids] - + for classifier in classifiers: if classifier.feed_id == feed_id: # print 'Feeds: %s -- %s' % (classifier.feed_id, feed.pk) return classifier.score - if (social_user_ids and not classifier.feed_id and - classifier.social_user_id in social_user_ids): + if social_user_ids and not classifier.feed_id and classifier.social_user_id in social_user_ids: return classifier.score return 0 - -def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier_feeds=None, classifier_authors=None, - classifier_titles=None, classifier_tags=None): + + +def get_classifiers_for_user( + user, + feed_id=None, + social_user_id=None, + classifier_feeds=None, + classifier_authors=None, + classifier_titles=None, + classifier_tags=None, +): params = dict(user_id=user.pk) if isinstance(feed_id, list): - params['feed_id__in'] = feed_id + params["feed_id__in"] = feed_id elif feed_id: - params['feed_id'] = feed_id + params["feed_id"] = feed_id if social_user_id: if isinstance(social_user_id, str): - social_user_id = int(social_user_id.replace('social:', '')) - params['social_user_id'] = social_user_id + social_user_id = int(social_user_id.replace("social:", "")) + params["social_user_id"] = social_user_id if classifier_authors is None: classifier_authors = list(MClassifierAuthor.objects(**params)) @@ -258,49 +270,56 @@ def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier classifier_tags = list(MClassifierTag.objects(**params)) if classifier_feeds is None: if not social_user_id and feed_id: - params['social_user_id'] = 0 + params["social_user_id"] = 0 classifier_feeds = list(MClassifierFeed.objects(**params)) - + feeds = [] for f in classifier_feeds: if f.social_user_id and not f.feed_id: - feeds.append(('social:%s' % f.social_user_id, f.score)) + feeds.append(("social:%s" % f.social_user_id, f.score)) else: feeds.append((f.feed_id, f.score)) - + payload = { - 'feeds': dict(feeds), - 'authors': dict([(a.author, a.score) for a in classifier_authors]), - 'titles': dict([(t.title, t.score) for t in classifier_titles]), - 'tags': dict([(t.tag, t.score) for t in classifier_tags]), + "feeds": dict(feeds), + "authors": dict([(a.author, a.score) for a in classifier_authors]), + "titles": dict([(t.title, t.score) for t in classifier_titles]), + "tags": dict([(t.tag, t.score) for t in classifier_tags]), } - + return payload - -def sort_classifiers_by_feed(user, feed_ids=None, - classifier_feeds=None, - classifier_authors=None, - classifier_titles=None, - classifier_tags=None): + + +def sort_classifiers_by_feed( + user, + feed_ids=None, + classifier_feeds=None, + classifier_authors=None, + classifier_titles=None, + classifier_tags=None, +): def sort_by_feed(classifiers): feed_classifiers = defaultdict(list) for classifier in classifiers: feed_classifiers[classifier.feed_id].append(classifier) return feed_classifiers - + classifiers = {} if feed_ids: - classifier_feeds = sort_by_feed(classifier_feeds) + classifier_feeds = sort_by_feed(classifier_feeds) classifier_authors = sort_by_feed(classifier_authors) - classifier_titles = sort_by_feed(classifier_titles) - classifier_tags = sort_by_feed(classifier_tags) + classifier_titles = sort_by_feed(classifier_titles) + classifier_tags = sort_by_feed(classifier_tags) for feed_id in feed_ids: - classifiers[feed_id] = get_classifiers_for_user(user, feed_id=feed_id, - classifier_feeds=classifier_feeds[feed_id], - classifier_authors=classifier_authors[feed_id], - classifier_titles=classifier_titles[feed_id], - classifier_tags=classifier_tags[feed_id]) - + classifiers[feed_id] = get_classifiers_for_user( + user, + feed_id=feed_id, + classifier_feeds=classifier_feeds[feed_id], + classifier_authors=classifier_authors[feed_id], + classifier_titles=classifier_titles[feed_id], + classifier_tags=classifier_tags[feed_id], + ) + return classifiers diff --git a/apps/analyzer/phrase_filter.py b/apps/analyzer/phrase_filter.py index 70fe77c01..fe025f84b 100644 --- a/apps/analyzer/phrase_filter.py +++ b/apps/analyzer/phrase_filter.py @@ -1,39 +1,39 @@ import re from pprint import pprint + class PhraseFilter: - def __init__(self): self.phrases = {} - + def run(self, text, storyid): chunks = self.chunk(text) self.count_phrases(chunks, storyid) - + def print_phrases(self): pprint(self.phrases) - + def get_phrases(self): return self.phrases.keys() - + # =========== # = Chunker = # =========== - + def chunk(self, text): - chunks = [t.strip() for t in re.split('[^a-zA-Z-]+', text) if t] + chunks = [t.strip() for t in re.split("[^a-zA-Z-]+", text) if t] # chunks = self._lowercase(chunks) return chunks - + def _lowercase(self, chunks): return [c.lower() for c in chunks] - + # ================== # = Phrase Counter = # ================== - + def count_phrases(self, chunks, storyid): - for l in range(1, len(chunks)+1): + for l in range(1, len(chunks) + 1): combinations = self._get_combinations(chunks, l) # print "Combinations: %s" % combinations for phrase in combinations: @@ -41,23 +41,23 @@ class PhraseFilter: self.phrases[phrase] = [] if storyid not in self.phrases[phrase]: self.phrases[phrase].append(storyid) - + def _get_combinations(self, chunks, length): combinations = [] for i, chunk in enumerate(chunks): # 0,1,2,3,4,5,6 = 01 12 23 34 45 56 combination = [] for l in range(length): - if i+l < len(chunks): + if i + l < len(chunks): # print i, l, chunks[i+l], len(chunks) - combination.append(chunks[i+l]) - combinations.append(' '.join(combination)) + combination.append(chunks[i + l]) + combinations.append(" ".join(combination)) return combinations - + # ================= # = Phrase Paring = # ================= - + def pare_phrases(self): # Kill singles for phrase, counts in self.phrases.items(): @@ -67,27 +67,32 @@ class PhraseFilter: if len(phrase) < 4: del self.phrases[phrase] continue - + # Kill repeats for phrase in self.phrases.keys(): for phrase2 in self.phrases.keys(): - if phrase in self.phrases and len(phrase2) > len(phrase) and phrase in phrase2 and phrase != phrase2: + if ( + phrase in self.phrases + and len(phrase2) > len(phrase) + and phrase in phrase2 + and phrase != phrase2 + ): del self.phrases[phrase] - -if __name__ == '__main__': + + +if __name__ == "__main__": phrasefilter = PhraseFilter() - phrasefilter.run('House of the Day: 123 Atlantic Ave. #3', 1) - phrasefilter.run('House of the Day: 456 Plankton St. #3', 4) - phrasefilter.run('Coop of the Day: 321 Pacific St.', 2) - phrasefilter.run('Streetlevel: 393 Pacific St.', 11) - phrasefilter.run('Coop of the Day: 456 Jefferson Ave.', 3) - phrasefilter.run('Extra, Extra', 5) - phrasefilter.run('Extra, Extra', 6) - phrasefilter.run('Early Addition', 7) - phrasefilter.run('Early Addition', 8) - phrasefilter.run('Development Watch', 9) - phrasefilter.run('Streetlevel', 10) - + phrasefilter.run("House of the Day: 123 Atlantic Ave. #3", 1) + phrasefilter.run("House of the Day: 456 Plankton St. #3", 4) + phrasefilter.run("Coop of the Day: 321 Pacific St.", 2) + phrasefilter.run("Streetlevel: 393 Pacific St.", 11) + phrasefilter.run("Coop of the Day: 456 Jefferson Ave.", 3) + phrasefilter.run("Extra, Extra", 5) + phrasefilter.run("Extra, Extra", 6) + phrasefilter.run("Early Addition", 7) + phrasefilter.run("Early Addition", 8) + phrasefilter.run("Development Watch", 9) + phrasefilter.run("Streetlevel", 10) + phrasefilter.pare_phrases() phrasefilter.print_phrases() - \ No newline at end of file diff --git a/apps/analyzer/tasks.py b/apps/analyzer/tasks.py index c41736d12..5741e15a9 100644 --- a/apps/analyzer/tasks.py +++ b/apps/analyzer/tasks.py @@ -1,12 +1,12 @@ from newsblur_web.celeryapp import app from utils import log as logging + @app.task() def EmailPopularityQuery(pk): from apps.analyzer.models import MPopularityQuery - + query = MPopularityQuery.objects.get(pk=pk) logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query) - + query.send_email() - diff --git a/apps/analyzer/tests.py b/apps/analyzer/tests.py index a69739247..ac1f3a1cf 100644 --- a/apps/analyzer/tests.py +++ b/apps/analyzer/tests.py @@ -2,6 +2,7 @@ from django.test.client import Client from apps.rss_feeds.models import MStory from django.test import TestCase from django.core import management + # from apps.analyzer.classifier import FisherClassifier import nltk from itertools import groupby @@ -11,16 +12,17 @@ from apps.analyzer.phrase_filter import PhraseFilter class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder): - """A tool for the finding and ranking of quadgram collocations or other association measures. + """A tool for the finding and ranking of quadgram collocations or other association measures. It is often useful to use from_words() rather thanconstructing an instance directly. """ + def __init__(self, word_fd, quadgram_fd, trigram_fd, bigram_fd, wildcard_fd): """Construct a TrigramCollocationFinder, given FreqDists for appearances of words, bigrams, two words with any word between them,and trigrams.""" nltk.collocations.AbstractCollocationFinder.__init__(self, word_fd, quadgram_fd) self.trigram_fd = trigram_fd self.bigram_fd = bigram_fd self.wildcard_fd = wildcard_fd - + @classmethod def from_words(cls, words): wfd = nltk.probability.FreqDist() @@ -28,20 +30,20 @@ class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder): tfd = nltk.probability.FreqDist() bfd = nltk.probability.FreqDist() wildfd = nltk.probability.FreqDist() - - for w1, w2, w3 ,w4 in nltk.util.ingrams(words, 4, pad_right=True): + + for w1, w2, w3, w4 in nltk.util.ingrams(words, 4, pad_right=True): wfd.inc(w1) if w4 is None: continue else: - qfd.inc((w1,w2,w3,w4)) - bfd.inc((w1,w2)) - tfd.inc((w1,w2,w3)) - wildfd.inc((w1,w3,w4)) - wildfd.inc((w1,w2,w4)) - + qfd.inc((w1, w2, w3, w4)) + bfd.inc((w1, w2)) + tfd.inc((w1, w2, w3)) + wildfd.inc((w1, w3, w4)) + wildfd.inc((w1, w2, w4)) + return cls(wfd, qfd, tfd, bfd, wildfd) - + def score_ngram(self, score_fn, w1, w2, w3, w4): n_all = self.word_fd.N() n_iiii = self.ngram_fd[(w1, w2, w3, w4)] @@ -59,63 +61,78 @@ class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder): n_xixi = self.trigram_fd[(w2, w3)] n_xxii = self.trigram_fd[(w3, w4)] n_xxxi = self.trigram_fd[(w3, w4)] - return score_fn(n_iiii, - (n_iiix, n_iixi, n_ixii, n_xiii), - (n_iixx, n_ixix, n_ixxi, n_ixxx), - (n_xiix, n_xixi, n_xxii, n_xxxi), - n_all) + return score_fn( + n_iiii, + (n_iiix, n_iixi, n_ixii, n_xiii), + (n_iixx, n_ixix, n_ixxi, n_ixxx), + (n_xiix, n_xixi, n_xxii, n_xxxi), + n_all, + ) + - class CollocationTest(TestCase): - - fixtures = ['brownstoner.json'] - + fixtures = ["brownstoner.json"] + def setUp(self): self.client = Client() - + def test_bigrams(self): # bigram_measures = nltk.collocations.BigramAssocMeasures() trigram_measures = nltk.collocations.TrigramAssocMeasures() tokens = [ - 'Co-op', 'of', 'the', 'day', - 'House', 'of', 'the', 'day', - 'Condo', 'of', 'the', 'day', - 'Development', 'Watch', - 'Co-op', 'of', 'the', 'day', + "Co-op", + "of", + "the", + "day", + "House", + "of", + "the", + "day", + "Condo", + "of", + "the", + "day", + "Development", + "Watch", + "Co-op", + "of", + "the", + "day", ] finder = nltk.collocations.TrigramCollocationFinder.from_words(tokens) - + finder.apply_freq_filter(2) - + # return the 10 n-grams with the highest PMI print(finder.nbest(trigram_measures.pmi, 10)) titles = [ - 'Co-op of the day', - 'Condo of the day', - 'Co-op of the day', - 'House of the day', - 'Development Watch', - 'Streetlevel', + "Co-op of the day", + "Condo of the day", + "Co-op of the day", + "House of the day", + "Development Watch", + "Streetlevel", ] - tokens = nltk.tokenize.word(' '.join(titles)) + tokens = nltk.tokenize.word(" ".join(titles)) ngrams = nltk.ngrams(tokens, 4) d = [key for key, group in groupby(sorted(ngrams)) if len(list(group)) >= 2] print(d) + class ClassifierTest(TestCase): - - fixtures = ['classifiers.json', 'brownstoner.json'] - + fixtures = ["classifiers.json", "brownstoner.json"] + def setUp(self): self.client = Client() - # + + # # def test_filter(self): # user = User.objects.all() # feed = Feed.objects.all() - # + # # management.call_command('loaddata', 'brownstoner.json', verbosity=0) # response = self.client.get('/reader/refresh_feed', { "feed_id": 1, "force": True }) # management.call_command('loaddata', 'brownstoner2.json', verbosity=0) @@ -124,28 +141,32 @@ class ClassifierTest(TestCase): # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) # management.call_command('loaddata', 'gothamist2.json', verbosity=0) # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) - # + # # stories = Story.objects.filter(story_feed=feed[1]).order_by('-story_date')[:100] - # + # # phrasefilter = PhraseFilter() # for story in stories: # # print story.story_title, story.id # phrasefilter.run(story.story_title, story.id) - # + # # phrasefilter.pare_phrases() # phrasefilter.print_phrases() - # + # def test_train(self): # user = User.objects.all() # feed = Feed.objects.all() - - management.call_command('loaddata', 'brownstoner.json', verbosity=0, commit=False, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) - management.call_command('loaddata', 'brownstoner2.json', verbosity=0, commit=False, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) - + + management.call_command("loaddata", "brownstoner.json", verbosity=0, commit=False, skip_checks=False) + management.call_command( + "refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False + ) + management.call_command("loaddata", "brownstoner2.json", verbosity=0, commit=False, skip_checks=False) + management.call_command( + "refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False + ) + stories = MStory.objects(story_feed_id=1)[:53] - + phrasefilter = PhraseFilter() for story in stories: # print story.story_title, story.id @@ -154,46 +175,45 @@ class ClassifierTest(TestCase): phrasefilter.pare_phrases() phrases = phrasefilter.get_phrases() print(phrases) - + tokenizer = Tokenizer(phrases) - classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases) - - classifier.train('good', 'House of the Day: 393 Pacific St.') - classifier.train('good', 'House of the Day: 393 Pacific St.') - classifier.train('good', 'Condo of the Day: 393 Pacific St.') - classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3') - classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3') - classifier.train('good', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Streetlevel: 393 Pacific St. #3') - - guess = dict(classifier.guess('Co-op of the Day: 413 Atlantic')) - self.assertTrue(guess['good'] > .99) - self.assertTrue('bad' not in guess) - - guess = dict(classifier.guess('House of the Day: 413 Atlantic')) - self.assertTrue(guess['good'] > .99) - self.assertTrue('bad' not in guess) - - guess = dict(classifier.guess('Development Watch: Yatta')) - self.assertTrue(guess['bad'] > .7) - self.assertTrue(guess['good'] < .3) + classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases) - guess = dict(classifier.guess('Development Watch: 393 Pacific St.')) - self.assertTrue(guess['bad'] > .7) - self.assertTrue(guess['good'] < .3) - - guess = dict(classifier.guess('Streetlevel: 123 Carlton St.')) - self.assertTrue(guess['bad'] > .99) - self.assertTrue('good' not in guess) + classifier.train("good", "House of the Day: 393 Pacific St.") + classifier.train("good", "House of the Day: 393 Pacific St.") + classifier.train("good", "Condo of the Day: 393 Pacific St.") + classifier.train("good", "Co-op of the Day: 393 Pacific St. #3") + classifier.train("good", "Co-op of the Day: 393 Pacific St. #3") + classifier.train("good", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Streetlevel: 393 Pacific St. #3") - guess = classifier.guess('Extra, Extra') - self.assertTrue('bad' not in guess) - self.assertTrue('good' not in guess) - - guess = classifier.guess('Nothing doing: 393 Pacific St.') - self.assertTrue('bad' not in guess) - self.assertTrue('good' not in guess) - \ No newline at end of file + guess = dict(classifier.guess("Co-op of the Day: 413 Atlantic")) + self.assertTrue(guess["good"] > 0.99) + self.assertTrue("bad" not in guess) + + guess = dict(classifier.guess("House of the Day: 413 Atlantic")) + self.assertTrue(guess["good"] > 0.99) + self.assertTrue("bad" not in guess) + + guess = dict(classifier.guess("Development Watch: Yatta")) + self.assertTrue(guess["bad"] > 0.7) + self.assertTrue(guess["good"] < 0.3) + + guess = dict(classifier.guess("Development Watch: 393 Pacific St.")) + self.assertTrue(guess["bad"] > 0.7) + self.assertTrue(guess["good"] < 0.3) + + guess = dict(classifier.guess("Streetlevel: 123 Carlton St.")) + self.assertTrue(guess["bad"] > 0.99) + self.assertTrue("good" not in guess) + + guess = classifier.guess("Extra, Extra") + self.assertTrue("bad" not in guess) + self.assertTrue("good" not in guess) + + guess = classifier.guess("Nothing doing: 393 Pacific St.") + self.assertTrue("bad" not in guess) + self.assertTrue("good" not in guess) diff --git a/apps/analyzer/tfidf.py b/apps/analyzer/tfidf.py index 08fe0e4f0..1bb015efe 100755 --- a/apps/analyzer/tfidf.py +++ b/apps/analyzer/tfidf.py @@ -9,6 +9,7 @@ See the README for a usage example. import sys import os + class tfidf: def __init__(self): self.weighted = False @@ -19,7 +20,7 @@ class tfidf: # building a dictionary doc_dict = {} for w in list_of_words: - doc_dict[w] = doc_dict.get(w, 0.) + 1.0 + doc_dict[w] = doc_dict.get(w, 0.0) + 1.0 self.corpus_dict[w] = self.corpus_dict.get(w, 0.0) + 1.0 # normalizing the dictionary @@ -53,4 +54,4 @@ class tfidf: score += (query_dict[k] / self.corpus_dict[k]) + (doc_dict[k] / self.corpus_dict[k]) sims.append([doc[0], score]) - return sims \ No newline at end of file + return sims diff --git a/apps/analyzer/tokenizer.py b/apps/analyzer/tokenizer.py index 83885398b..0ca40f7e0 100644 --- a/apps/analyzer/tokenizer.py +++ b/apps/analyzer/tokenizer.py @@ -1,28 +1,30 @@ import re + class Tokenizer: """A simple regex-based whitespace tokenizer. It expects a string and can return all tokens lower-cased or in their existing case. """ - - WORD_RE = re.compile('[^a-zA-Z-]+') + + WORD_RE = re.compile("[^a-zA-Z-]+") def __init__(self, phrases, lower=False): self.phrases = phrases self.lower = lower - + def tokenize(self, doc): print(doc) - formatted_doc = ' '.join(self.WORD_RE.split(doc)) + formatted_doc = " ".join(self.WORD_RE.split(doc)) print(formatted_doc) for phrase in self.phrases: if phrase in formatted_doc: yield phrase - -if __name__ == '__main__': - phrases = ['Extra Extra', 'Streetlevel', 'House of the Day'] + + +if __name__ == "__main__": + phrases = ["Extra Extra", "Streetlevel", "House of the Day"] tokenizer = Tokenizer(phrases) - doc = 'Extra, Extra' - tokenizer.tokenize(doc) \ No newline at end of file + doc = "Extra, Extra" + tokenizer.tokenize(doc) diff --git a/apps/analyzer/urls.py b/apps/analyzer/urls.py index 381275562..ed6580c67 100644 --- a/apps/analyzer/urls.py +++ b/apps/analyzer/urls.py @@ -2,8 +2,8 @@ from django.conf.urls import url from apps.analyzer import views urlpatterns = [ - url(r'^$', views.index), - url(r'^save/?', views.save_classifier), - url(r'^popularity/?', views.popularity_query), - url(r'^(?P\d+)', views.get_classifiers_feed), + url(r"^$", views.index), + url(r"^save/?", views.save_classifier), + url(r"^popularity/?", views.popularity_query), + url(r"^(?P\d+)", views.get_classifiers_feed), ] diff --git a/apps/analyzer/views.py b/apps/analyzer/views.py index d06b72625..8ea282565 100644 --- a/apps/analyzer/views.py +++ b/apps/analyzer/views.py @@ -15,34 +15,38 @@ from utils import json_functions as json from utils.user_functions import get_user from utils.user_functions import ajax_login_required + def index(requst): pass - + + @require_POST @ajax_login_required @json.json_view def save_classifier(request): post = request.POST - feed_id = post['feed_id'] + feed_id = post["feed_id"] feed = None social_user_id = None - if feed_id.startswith('social:'): - social_user_id = int(feed_id.replace('social:', '')) + if feed_id.startswith("social:"): + social_user_id = int(feed_id.replace("social:", "")) feed_id = None else: feed_id = int(feed_id) feed = get_object_or_404(Feed, pk=feed_id) code = 0 - message = 'OK' + message = "OK" payload = {} logging.user(request, "~FGSaving classifier: ~SB%s~SN ~FW%s" % (feed, post)) - + # Mark subscription as dirty, so unread counts can be recalculated usersub = None socialsub = None if social_user_id: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, subscription_user_id=social_user_id) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() @@ -55,31 +59,31 @@ def save_classifier(request): usersub.needs_unread_recalc = True usersub.is_trained = True usersub.save() - - + def _save_classifier(ClassifierCls, content_type): classifiers = { - 'like_'+content_type: 1, - 'dislike_'+content_type: -1, - 'remove_like_'+content_type: 0, - 'remove_dislike_'+content_type: 0, + "like_" + content_type: 1, + "dislike_" + content_type: -1, + "remove_like_" + content_type: 0, + "remove_dislike_" + content_type: 0, } for opinion, score in classifiers.items(): if opinion in post: post_contents = post.getlist(opinion) for post_content in post_contents: - if not post_content: continue + if not post_content: + continue classifier_dict = { - 'user_id': request.user.pk, - 'feed_id': feed_id or 0, - 'social_user_id': social_user_id or 0, + "user_id": request.user.pk, + "feed_id": feed_id or 0, + "social_user_id": social_user_id or 0, } - if content_type in ('author', 'tag', 'title'): + if content_type in ("author", "tag", "title"): max_length = ClassifierCls._fields[content_type].max_length classifier_dict.update({content_type: post_content[:max_length]}) - if content_type == 'feed': - if not post_content.startswith('social:'): - classifier_dict['feed_id'] = post_content + if content_type == "feed": + if not post_content.startswith("social:"): + classifier_dict["feed_id"] = post_content try: classifier = ClassifierCls.objects.get(**classifier_dict) except ClassifierCls.DoesNotExist: @@ -94,59 +98,77 @@ def save_classifier(request): classifier.delete() elif classifier.score != score: if score == 0: - if ((classifier.score == 1 and opinion.startswith('remove_like')) - or (classifier.score == -1 and opinion.startswith('remove_dislike'))): + if (classifier.score == 1 and opinion.startswith("remove_like")) or ( + classifier.score == -1 and opinion.startswith("remove_dislike") + ): classifier.delete() else: classifier.score = score classifier.save() - - _save_classifier(MClassifierAuthor, 'author') - _save_classifier(MClassifierTag, 'tag') - _save_classifier(MClassifierTitle, 'title') - _save_classifier(MClassifierFeed, 'feed') + + _save_classifier(MClassifierAuthor, "author") + _save_classifier(MClassifierTag, "tag") + _save_classifier(MClassifierTitle, "title") + _save_classifier(MClassifierFeed, "feed") r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) response = dict(code=code, message=message, payload=payload) return response - + + @json.json_view def get_classifiers_feed(request, feed_id): user = get_user(request) code = 0 - + payload = get_classifiers_for_user(user, feed_id=feed_id) - + response = dict(code=code, payload=payload) - + return response + def popularity_query(request): - if request.method == 'POST': + if request.method == "POST": form = PopularityQueryForm(request.POST) if form.is_valid(): - logging.user(request.user, "~BC~FRPopularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) - query = MPopularityQuery.objects.create(email=request.POST['email'], - query=request.POST['query']) + logging.user( + request.user, + '~BC~FRPopularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"' + % (request.POST["email"], request.POST["query"]), + ) + query = MPopularityQuery.objects.create(email=request.POST["email"], query=request.POST["query"]) query.queue_email() - - response = render(request, 'analyzer/popularity_query.xhtml', { - 'success': True, - 'popularity_query_form': form, - }) - response.set_cookie('newsblur_popularity_query', request.POST['query']) - + + response = render( + request, + "analyzer/popularity_query.xhtml", + { + "success": True, + "popularity_query_form": form, + }, + ) + response.set_cookie("newsblur_popularity_query", request.POST["query"]) + return response else: - logging.user(request.user, "~BC~FRFailed popularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) + logging.user( + request.user, + '~BC~FRFailed popularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"' + % (request.POST["email"], request.POST["query"]), + ) else: logging.user(request.user, "~BC~FRPopularity query form loading") - form = PopularityQueryForm(initial={'query': request.COOKIES.get('newsblur_popularity_query', "")}) - - response = render(request, 'analyzer/popularity_query.xhtml', { - 'popularity_query_form': form, - }) + form = PopularityQueryForm(initial={"query": request.COOKIES.get("newsblur_popularity_query", "")}) + + response = render( + request, + "analyzer/popularity_query.xhtml", + { + "popularity_query_form": form, + }, + ) return response diff --git a/apps/api/tests.py b/apps/api/tests.py index c7c4668e1..f51d798ff 100644 --- a/apps/api/tests.py +++ b/apps/api/tests.py @@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application. from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ class SimpleTest(TestCase): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/api/urls.py b/apps/api/urls.py index bd4b6d43f..3876b24e2 100644 --- a/apps/api/urls.py +++ b/apps/api/urls.py @@ -2,18 +2,18 @@ from django.conf.urls import url from apps.api import views urlpatterns = [ - url(r'^logout', views.logout, name='api-logout'), - url(r'^login', views.login, name='api-login'), - url(r'^signup', views.signup, name='api-signup'), - url(r'^add_site_load_script/(?P\w+)', views.add_site_load_script, name='api-add-site-load-script'), - url(r'^add_site/(?P\w+)', views.add_site, name='api-add-site'), - url(r'^add_url/(?P\w+)', views.add_site, name='api-add-site'), - url(r'^add_site/?$', views.add_site_authed, name='api-add-site-authed'), - url(r'^add_url/?$', views.add_site_authed, name='api-add-site-authed'), - url(r'^check_share_on_site/(?P\w+)', views.check_share_on_site, name='api-check-share-on-site'), - url(r'^share_story/(?P\w+)', views.share_story, name='api-share-story'), - url(r'^save_story/(?P\w+)', views.save_story, name='api-save-story'), - url(r'^share_story/?$', views.share_story), - url(r'^save_story/?$', views.save_story), - url(r'^ip_addresses/?$', views.ip_addresses), + url(r"^logout", views.logout, name="api-logout"), + url(r"^login", views.login, name="api-login"), + url(r"^signup", views.signup, name="api-signup"), + url(r"^add_site_load_script/(?P\w+)", views.add_site_load_script, name="api-add-site-load-script"), + url(r"^add_site/(?P\w+)", views.add_site, name="api-add-site"), + url(r"^add_url/(?P\w+)", views.add_site, name="api-add-site"), + url(r"^add_site/?$", views.add_site_authed, name="api-add-site-authed"), + url(r"^add_url/?$", views.add_site_authed, name="api-add-site-authed"), + url(r"^check_share_on_site/(?P\w+)", views.check_share_on_site, name="api-check-share-on-site"), + url(r"^share_story/(?P\w+)", views.share_story, name="api-share-story"), + url(r"^save_story/(?P\w+)", views.save_story, name="api-save-story"), + url(r"^share_story/?$", views.share_story), + url(r"^save_story/?$", views.save_story), + url(r"^ip_addresses/?$", views.ip_addresses), ] diff --git a/apps/api/views.py b/apps/api/views.py index e09230113..ad89693bc 100644 --- a/apps/api/views.py +++ b/apps/api/views.py @@ -29,10 +29,10 @@ from utils.view_functions import required_params def login(request): code = -1 errors = None - user_agent = request.environ.get('HTTP_USER_AGENT', '') - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + user_agent = request.environ.get("HTTP_USER_AGENT", "") + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] - if not user_agent or user_agent.lower() in ['nativehost']: + if not user_agent or user_agent.lower() in ["nativehost"]: errors = dict(user_agent="You must set a user agent to login.") logging.user(request, "~FG~BB~SK~FRBlocked ~FGAPI Login~SN~FW: %s / %s" % (user_agent, ip)) elif request.method == "POST": @@ -40,19 +40,20 @@ def login(request): if form.errors: errors = form.errors if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s / %s" % (user_agent, ip)) code = 1 else: errors = dict(method="Invalid method. Use POST. You used %s" % request.method) - + return dict(code=code, errors=errors) - + + @json.json_view def signup(request): code = -1 errors = None - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] if request.method == "POST": form = SignupForm(data=request.POST) @@ -61,48 +62,47 @@ def signup(request): if form.is_valid(): try: new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s / %s" % (new_user.email, ip)) code = 1 except forms.ValidationError as e: errors = [e.args[0]] else: errors = dict(method="Invalid method. Use POST. You used %s" % request.method) - return dict(code=code, errors=errors) - + + @json.json_view def logout(request): code = 1 logging.user(request, "~FG~BBAPI Logout~FW") logout_user(request) - + return dict(code=code) + def add_site_load_script(request, token): code = 0 usf = None profile = None user_profile = None starred_counts = {} - - def image_base64(image_name, path='icons/circular/'): - image_file = open(os.path.join(settings.MEDIA_ROOT, 'img/%s%s' % (path, image_name)), 'rb') - return base64.b64encode(image_file.read()).decode('utf-8') - - accept_image = image_base64('newuser_icn_setup.png') - error_image = image_base64('newuser_icn_sharewith_active.png') - new_folder_image = image_base64('g_icn_arrow_right.png') - add_image = image_base64('g_icn_expand_hover.png') + + def image_base64(image_name, path="icons/circular/"): + image_file = open(os.path.join(settings.MEDIA_ROOT, "img/%s%s" % (path, image_name)), "rb") + return base64.b64encode(image_file.read()).decode("utf-8") + + accept_image = image_base64("newuser_icn_setup.png") + error_image = image_base64("newuser_icn_sharewith_active.png") + new_folder_image = image_base64("g_icn_arrow_right.png") + add_image = image_base64("g_icn_expand_hover.png") try: profiles = Profile.objects.filter(secret_token=token) if profiles: profile = profiles[0] - usf = UserSubscriptionFolders.objects.get( - user=profile.user - ) + usf = UserSubscriptionFolders.objects.get(user=profile.user) user_profile = MSocialProfile.get_user(user_id=profile.user.pk) starred_counts = MStarredStoryCounts.user_counts(profile.user.pk) else: @@ -111,29 +111,34 @@ def add_site_load_script(request, token): code = -1 except UserSubscriptionFolders.DoesNotExist: code = -1 - - return render(request, 'api/share_bookmarklet.js', { - 'code': code, - 'token': token, - 'folders': (usf and usf.folders) or [], - 'user': profile and profile.user or {}, - 'user_profile': user_profile and json.encode(user_profile.canonical()) or {}, - 'starred_counts': json.encode(starred_counts), - 'accept_image': accept_image, - 'error_image': error_image, - 'add_image': add_image, - 'new_folder_image': new_folder_image, - }, - content_type='application/javascript') + + return render( + request, + "api/share_bookmarklet.js", + { + "code": code, + "token": token, + "folders": (usf and usf.folders) or [], + "user": profile and profile.user or {}, + "user_profile": user_profile and json.encode(user_profile.canonical()) or {}, + "starred_counts": json.encode(starred_counts), + "accept_image": accept_image, + "error_image": error_image, + "add_image": add_image, + "new_folder_image": new_folder_image, + }, + content_type="application/javascript", + ) + def add_site(request, token): - code = 0 - get_post = getattr(request, request.method) - url = get_post.get('url') - folder = get_post.get('folder') - new_folder = get_post.get('new_folder') - callback = get_post.get('callback', '') - + code = 0 + get_post = getattr(request, request.method) + url = get_post.get("url") + folder = get_post.get("folder") + new_folder = get_post.get("new_folder") + callback = get_post.get("callback", "") + if not url: code = -1 else: @@ -144,35 +149,40 @@ def add_site(request, token): usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( - user=profile.user, - feed_address=url, - folder=folder, - bookmarklet=True + user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 - + if code > 0: - message = 'OK' - - logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), - request=request) - - return HttpResponse(callback + '(' + json.encode({ - 'code': code, - 'message': message, - 'usersub': us and us.feed_id, - }) + ')', content_type='text/plain') + message = "OK" + + logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request) + + return HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "usersub": us and us.feed_id, + } + ) + + ")", + content_type="text/plain", + ) + @ajax_login_required def add_site_authed(request): - code = 0 - url = request.GET['url'] - folder = request.GET['folder'] - new_folder = request.GET.get('new_folder') - callback = request.GET['callback'] - user = get_user(request) - + code = 0 + url = request.GET["url"] + folder = request.GET["folder"] + new_folder = request.GET.get("new_folder") + callback = request.GET["callback"] + user = get_user(request) + if not url: code = -1 else: @@ -181,40 +191,45 @@ def add_site_authed(request): usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( - user=user, - feed_address=url, - folder=folder, - bookmarklet=True + user=user, feed_address=url, folder=folder, bookmarklet=True ) - + if code > 0: - message = 'OK' - - logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), - request=request) - - return HttpResponse(callback + '(' + json.encode({ - 'code': code, - 'message': message, - 'usersub': us and us.feed_id, - }) + ')', content_type='text/plain') + message = "OK" + + logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), request=request) + + return HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "usersub": us and us.feed_id, + } + ) + + ")", + content_type="text/plain", + ) + def check_share_on_site(request, token): - code = 0 - story_url = request.GET['story_url'] - rss_url = request.GET.get('rss_url') - callback = request.GET['callback'] + code = 0 + story_url = request.GET["story_url"] + rss_url = request.GET.get("rss_url") + callback = request.GET["callback"] other_stories = None same_stories = None - usersub = None - message = None - user = None + usersub = None + message = None + user = None users = {} your_story = None same_stories = None other_stories = None previous_stories = None - + if not story_url: code = -1 else: @@ -223,7 +238,7 @@ def check_share_on_site(request, token): user = user_profile.user except Profile.DoesNotExist: code = -1 - + logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url) feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False) if not feed: @@ -239,9 +254,9 @@ def check_share_on_site(request, token): logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url) feed = Feed.get_feed_from_url(base_url, create=False, fetch=False) if not feed: - logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + '/')) - feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False) - + logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + "/")) + feed = Feed.get_feed_from_url(base_url + "/", create=False, fetch=False) + if feed and user: try: usersub = UserSubscription.objects.filter(user=user, feed=feed) @@ -249,23 +264,27 @@ def check_share_on_site(request, token): usersub = None if user: feed_id = feed and feed.pk - your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id, - user_id=user.pk, story_url=story_url) - previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by('-shared_date').limit(3) - previous_stories = [{ - "user_id": story.user_id, - "story_title": story.story_title, - "comments": story.comments, - "shared_date": story.shared_date, - "relative_date": relative_timesince(story.shared_date), - "blurblog_permalink": story.blurblog_permalink(), - } for story in previous_stories] - + your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site( + feed_id, user_id=user.pk, story_url=story_url + ) + previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by("-shared_date").limit(3) + previous_stories = [ + { + "user_id": story.user_id, + "story_title": story.story_title, + "comments": story.comments, + "shared_date": story.shared_date, + "relative_date": relative_timesince(story.shared_date), + "blurblog_permalink": story.blurblog_permalink(), + } + for story in previous_stories + ] + user_ids = set([user_profile.user.pk]) for story in same_stories: - user_ids.add(story['user_id']) + user_ids.add(story["user_id"]) for story in other_stories: - user_ids.add(story['user_id']) + user_ids.add(story["user_id"]) profiles = MSocialProfile.profiles(user_ids) for profile in profiles: @@ -273,39 +292,47 @@ def check_share_on_site(request, token): "username": profile.username, "photo_url": profile.photo_url, } - - logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), - request=request) - - response = HttpResponse(callback + '(' + json.encode({ - 'code' : code, - 'message' : message, - 'feed' : feed, - 'subscribed' : bool(usersub), - 'your_story' : your_story, - 'same_stories' : same_stories, - 'other_stories' : other_stories, - 'previous_stories' : previous_stories, - 'users' : users, - }) + ')', content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'GET' - + + logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), request=request) + + response = HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "feed": feed, + "subscribed": bool(usersub), + "your_story": your_story, + "same_stories": same_stories, + "other_stories": other_stories, + "previous_stories": previous_stories, + "users": users, + } + ) + + ")", + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "GET" + return response -@required_params('story_url') + +@required_params("story_url") def share_story(request, token=None): - code = 0 - story_url = request.POST['story_url'] - comments = request.POST.get('comments', "") - title = request.POST.get('title', None) - content = request.POST.get('content', None) - rss_url = request.POST.get('rss_url', None) - feed_id = request.POST.get('feed_id', None) or 0 - feed = None - message = None - profile = None - + code = 0 + story_url = request.POST["story_url"] + comments = request.POST.get("comments", "") + title = request.POST.get("title", None) + content = request.POST.get("content", None) + rss_url = request.POST.get("rss_url", None) + feed_id = request.POST.get("feed_id", None) or 0 + feed = None + message = None + profile = None + if request.user.is_authenticated: profile = request.user.profile else: @@ -317,14 +344,19 @@ def share_story(request, token=None): message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." - + if not profile: - return HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': None, - }), content_type='text/plain') - + return HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": None, + } + ), + content_type="text/plain", + ) + if feed_id: feed = Feed.get_by_id(feed_id) else: @@ -336,7 +368,7 @@ def share_story(request, token=None): feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk - + if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) @@ -346,13 +378,15 @@ def share_story(request, token=None): importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) if not content: - content = document['content'] + content = document["content"] if not title: - title = document['title'] - - shared_story = MSharedStory.objects.filter(user_id=profile.user.pk, - story_feed_id=feed_id, - story_guid=story_url).limit(1).first() + title = document["title"] + + shared_story = ( + MSharedStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url) + .limit(1) + .first() + ) if not shared_story: story_db = { "story_guid": story_url, @@ -361,7 +395,6 @@ def share_story(request, token=None): "story_feed_id": feed_id, "story_content": content, "story_date": datetime.datetime.now(), - "user_id": profile.user.pk, "comments": comments, "has_comments": bool(comments), @@ -382,49 +415,57 @@ def share_story(request, token=None): shared_story.has_comments = bool(comments) shared_story.story_feed_id = feed_id shared_story.save() - logging.user(profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments)) + logging.user( + profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments) + ) message = "Updating shared story from site: %s: %s" % (story_url, comments) try: - socialsub = MSocialSubscription.objects.get(user_id=profile.user.pk, - subscription_user_id=profile.user.pk) + socialsub = MSocialSubscription.objects.get( + user_id=profile.user.pk, subscription_user_id=profile.user.pk + ) except MSocialSubscription.DoesNotExist: socialsub = None - + if socialsub: - socialsub.mark_story_ids_as_read([shared_story.story_hash], - shared_story.story_feed_id, - request=request) + socialsub.mark_story_ids_as_read( + [shared_story.story_hash], shared_story.story_feed_id, request=request + ) else: RUserStory.mark_read(profile.user.pk, shared_story.story_feed_id, shared_story.story_hash) - shared_story.publish_update_to_subscribers() - - response = HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': shared_story, - }), content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'POST' - + + response = HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": shared_story, + } + ), + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "POST" + return response -@required_params('story_url', 'title') + +@required_params("story_url", "title") def save_story(request, token=None): - code = 0 - story_url = request.POST['story_url'] - user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') or [] - add_user_tag = request.POST.get('add_user_tag', None) - title = request.POST['title'] - content = request.POST.get('content', None) - rss_url = request.POST.get('rss_url', None) - user_notes = request.POST.get('user_notes', None) - feed_id = request.POST.get('feed_id', None) or 0 - feed = None - message = None - profile = None - + code = 0 + story_url = request.POST["story_url"] + user_tags = request.POST.getlist("user_tags") or request.POST.getlist("user_tags[]") or [] + add_user_tag = request.POST.get("add_user_tag", None) + title = request.POST["title"] + content = request.POST.get("content", None) + rss_url = request.POST.get("rss_url", None) + user_notes = request.POST.get("user_notes", None) + feed_id = request.POST.get("feed_id", None) or 0 + feed = None + message = None + profile = None + if request.user.is_authenticated: profile = request.user.profile else: @@ -436,14 +477,19 @@ def save_story(request, token=None): message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." - + if not profile: - return HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': None, - }), content_type='text/plain') - + return HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": None, + } + ), + content_type="text/plain", + ) + if feed_id: feed = Feed.get_by_id(feed_id) else: @@ -455,7 +501,7 @@ def save_story(request, token=None): feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk - + if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) @@ -463,16 +509,18 @@ def save_story(request, token=None): else: importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) - content = document['content'] + content = document["content"] if not title: - title = document['title'] - + title = document["title"] + if add_user_tag: - user_tags = user_tags + [tag for tag in add_user_tag.split(',')] - - starred_story = MStarredStory.objects.filter(user_id=profile.user.pk, - story_feed_id=feed_id, - story_guid=story_url).limit(1).first() + user_tags = user_tags + [tag for tag in add_user_tag.split(",")] + + starred_story = ( + MStarredStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url) + .limit(1) + .first() + ) if not starred_story: story_db = { "story_guid": story_url, @@ -498,26 +546,34 @@ def save_story(request, token=None): starred_story.story_feed_id = feed_id starred_story.user_notes = user_notes starred_story.save() - logging.user(profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags)) + logging.user( + profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags) + ) message = "Updating saved story from site: %s: %s" % (story_url, user_tags) MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) - - response = HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': starred_story, - }), content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'POST' - + + response = HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": starred_story, + } + ), + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "POST" + return response + def ip_addresses(request): # Read local file /srv/newsblur/apps/api/ip_addresses.txt and return that - with open('/srv/newsblur/apps/api/ip_addresses.txt', 'r') as f: + with open("/srv/newsblur/apps/api/ip_addresses.txt", "r") as f: addresses = f.read() mail_admins(f"IP Addresses accessed from {request.META['REMOTE_ADDR']} by {request.user}", addresses) - return HttpResponse(addresses, content_type='text/plain') + return HttpResponse(addresses, content_type="text/plain") diff --git a/apps/categories/models.py b/apps/categories/models.py index 7afa63151..7254c80b3 100644 --- a/apps/categories/models.py +++ b/apps/categories/models.py @@ -6,20 +6,21 @@ from utils import json_functions as json from utils.feed_functions import add_object_to_folder from utils import log as logging + class MCategory(mongo.Document): title = mongo.StringField() description = mongo.StringField() feed_ids = mongo.ListField(mongo.IntField()) - + meta = { - 'collection': 'category', - 'indexes': ['title'], - 'allow_inheritance': False, + "collection": "category", + "indexes": ["title"], + "allow_inheritance": False, } - + def __str__(self): return "%s: %s sites" % (self.title, len(self.feed_ids)) - + @classmethod def audit(cls): categories = cls.objects.all() @@ -39,28 +40,28 @@ class MCategory(mongo.Document): @classmethod def add(cls, title, description): return cls.objects.create(title=title, description=description) - + @classmethod def serialize(cls, category=None): categories = cls.objects.all() if category: categories = categories.filter(title=category) - + data = dict(categories=[], feeds={}) feed_ids = set() for category in categories: category_output = { - 'title': category.title, - 'description': category.description, - 'feed_ids': category.feed_ids, + "title": category.title, + "description": category.description, + "feed_ids": category.feed_ids, } - data['categories'].append(category_output) + data["categories"].append(category_output) feed_ids.update(list(category.feed_ids)) - + feeds = Feed.objects.filter(pk__in=feed_ids) for feed in feeds: - data['feeds'][feed.pk] = feed.canonical() - + data["feeds"][feed.pk] = feed.canonical() + return data @classmethod @@ -68,8 +69,10 @@ class MCategory(mongo.Document): category_sites = MCategorySite.objects.all() if category_title: category_sites = category_sites.filter(category_title=category_title) - - category_groups = groupby(sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title) + + category_groups = groupby( + sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title + ) for category_title, sites in category_groups: try: category = cls.objects.get(title=category_title) @@ -79,27 +82,26 @@ class MCategory(mongo.Document): category.feed_ids = [site.feed_id for site in sites] category.save() print(" ---> Reloaded category: %s" % category) - + @classmethod def subscribe(cls, user_id, category_title): category = cls.objects.get(title=category_title) for feed_id in category.feed_ids: us, _ = UserSubscription.objects.get_or_create( - feed_id=feed_id, + feed_id=feed_id, user_id=user_id, defaults={ - 'needs_unread_recalc': True, - 'active': True, - } + "needs_unread_recalc": True, + "active": True, + }, ) - + usf, created = UserSubscriptionFolders.objects.get_or_create( - user_id=user_id, - defaults={'folders': '[]'} + user_id=user_id, defaults={"folders": "[]"} ) - - usf.add_folder('', category.title) + + usf.add_folder("", category.title) folders = json.decode(usf.folders) for feed_id in category.feed_ids: feed = Feed.get_by_id(feed_id) @@ -108,27 +110,26 @@ class MCategory(mongo.Document): folders = add_object_to_folder(feed.pk, category.title, folders) usf.folders = json.encode(folders) usf.save() - - + + class MCategorySite(mongo.Document): feed_id = mongo.IntField() category_title = mongo.StringField() - + meta = { - 'collection': 'category_site', - 'indexes': ['feed_id', 'category_title'], - 'allow_inheritance': False, + "collection": "category_site", + "indexes": ["feed_id", "category_title"], + "allow_inheritance": False, } - + def __str__(self): feed = Feed.get_by_id(self.feed_id) return "%s: %s" % (self.category_title, feed) - + @classmethod def add(cls, category_title, feed_id): - category_site, created = cls.objects.get_or_create(category_title=category_title, - feed_id=feed_id) - + category_site, created = cls.objects.get_or_create(category_title=category_title, feed_id=feed_id) + if not created: print(" ---> Site is already in category: %s" % category_site) else: diff --git a/apps/categories/urls.py b/apps/categories/urls.py index dda7b05ec..18ac85b27 100644 --- a/apps/categories/urls.py +++ b/apps/categories/urls.py @@ -2,6 +2,6 @@ from django.conf.urls import url from apps.categories import views urlpatterns = [ - url(r'^$', views.all_categories, name='all-categories'), - url(r'^subscribe/?$', views.subscribe, name='categories-subscribe'), + url(r"^$", views.all_categories, name="all-categories"), + url(r"^subscribe/?$", views.subscribe, name="categories-subscribe"), ] diff --git a/apps/categories/views.py b/apps/categories/views.py index 3616c6214..b3d51b320 100644 --- a/apps/categories/views.py +++ b/apps/categories/views.py @@ -3,35 +3,42 @@ from apps.reader.models import UserSubscriptionFolders from utils import json_functions as json from utils.user_functions import ajax_login_required + @json.json_view def all_categories(request): categories = MCategory.serialize() - + return categories - + + @ajax_login_required @json.json_view def subscribe(request): user = request.user categories = MCategory.serialize() - category_titles = [c['title'] for c in categories['categories']] - subscribe_category_titles = request.POST.getlist('category') or request.POST.getlist('category[]') - + category_titles = [c["title"] for c in categories["categories"]] + subscribe_category_titles = request.POST.getlist("category") or request.POST.getlist("category[]") + invalid_category_title = False for category_title in subscribe_category_titles: if category_title not in category_titles: invalid_category_title = True - + if not subscribe_category_titles or invalid_category_title: - message = "Choose one or more of these categories: %s" % ', '.join(category_titles) + message = "Choose one or more of these categories: %s" % ", ".join(category_titles) return dict(code=-1, message=message) - + for category_title in subscribe_category_titles: MCategory.subscribe(user.pk, category_title) - + usf = UserSubscriptionFolders.objects.get(user=user.pk) - - return dict(code=1, message="Subscribed to %s %s" % ( - len(subscribe_category_titles), - 'category' if len(subscribe_category_titles) == 1 else 'categories', - ), folders=json.decode(usf.folders)) \ No newline at end of file + + return dict( + code=1, + message="Subscribed to %s %s" + % ( + len(subscribe_category_titles), + "category" if len(subscribe_category_titles) == 1 else "categories", + ), + folders=json.decode(usf.folders), + ) diff --git a/apps/feed_import/migrations/0001_initial.py b/apps/feed_import/migrations/0001_initial.py index 900e91b21..107f7a85d 100644 --- a/apps/feed_import/migrations/0001_initial.py +++ b/apps/feed_import/migrations/0001_initial.py @@ -7,7 +7,6 @@ import django.db.models.deletion class Migration(migrations.Migration): - initial = True dependencies = [ @@ -16,19 +15,30 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='OAuthToken', + name="OAuthToken", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('session_id', models.CharField(blank=True, max_length=50, null=True)), - ('uuid', models.CharField(blank=True, max_length=50, null=True)), - ('remote_ip', models.CharField(blank=True, max_length=50, null=True)), - ('request_token', models.CharField(max_length=50)), - ('request_token_secret', models.CharField(max_length=50)), - ('access_token', models.CharField(max_length=50)), - ('access_token_secret', models.CharField(max_length=50)), - ('credential', models.TextField(blank=True, null=True)), - ('created_date', models.DateTimeField(default=datetime.datetime.now)), - ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("session_id", models.CharField(blank=True, max_length=50, null=True)), + ("uuid", models.CharField(blank=True, max_length=50, null=True)), + ("remote_ip", models.CharField(blank=True, max_length=50, null=True)), + ("request_token", models.CharField(max_length=50)), + ("request_token_secret", models.CharField(max_length=50)), + ("access_token", models.CharField(max_length=50)), + ("access_token_secret", models.CharField(max_length=50)), + ("credential", models.TextField(blank=True, null=True)), + ("created_date", models.DateTimeField(default=datetime.datetime.now)), + ( + "user", + models.OneToOneField( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/feed_import/models.py b/apps/feed_import/models.py index 6371a1ab8..ad1e15fca 100644 --- a/apps/feed_import/models.py +++ b/apps/feed_import/models.py @@ -28,75 +28,73 @@ class OAuthToken(models.Model): access_token_secret = models.CharField(max_length=50) credential = models.TextField(null=True, blank=True) created_date = models.DateTimeField(default=datetime.datetime.now) - + class Importer: - def clear_feeds(self): UserSubscription.objects.filter(user=self.user).delete() def clear_folders(self): UserSubscriptionFolders.objects.filter(user=self.user).delete() - + def get_folders(self): - self.usf, _ = UserSubscriptionFolders.objects.get_or_create(user=self.user, - defaults={'folders': '[]'}) + self.usf, _ = UserSubscriptionFolders.objects.get_or_create( + user=self.user, defaults={"folders": "[]"} + ) return json.decode(self.usf.folders) - + class OPMLExporter(Importer): - def __init__(self, user): self.user = user self.fetch_feeds() - + def process(self, verbose=False): now = str(datetime.datetime.now()) - root = Element('opml') - root.set('version', '1.1') - root.append(Comment('Generated by NewsBlur - newsblur.com')) + root = Element("opml") + root.set("version", "1.1") + root.append(Comment("Generated by NewsBlur - newsblur.com")) - head = SubElement(root, 'head') - title = SubElement(head, 'title') - title.text = 'NewsBlur Feeds' - dc = SubElement(head, 'dateCreated') - dc.text = now - dm = SubElement(head, 'dateModified') - dm.text = now - folders = self.get_folders() - body = SubElement(root, 'body') + head = SubElement(root, "head") + title = SubElement(head, "title") + title.text = "NewsBlur Feeds" + dc = SubElement(head, "dateCreated") + dc.text = now + dm = SubElement(head, "dateModified") + dm.text = now + folders = self.get_folders() + body = SubElement(root, "body") self.process_outline(body, folders, verbose=verbose) - return tostring(root, encoding='utf8', method='xml') - + return tostring(root, encoding="utf8", method="xml") + def process_outline(self, body, folders, verbose=False): for obj in folders: if isinstance(obj, int) and obj in self.feeds: feed = self.feeds[obj] if verbose: - print(" ---> Adding feed: %s - %s" % (feed['id'], - feed['feed_title'][:30])) + print(" ---> Adding feed: %s - %s" % (feed["id"], feed["feed_title"][:30])) feed_attrs = self.make_feed_row(feed) - body.append(Element('outline', feed_attrs)) + body.append(Element("outline", feed_attrs)) elif isinstance(obj, dict): for folder_title, folder_objs in list(obj.items()): if verbose: print(" ---> Adding folder: %s" % folder_title) - folder_element = Element('outline', {'text': folder_title, 'title': folder_title}) + folder_element = Element("outline", {"text": folder_title, "title": folder_title}) body.append(self.process_outline(folder_element, folder_objs, verbose=verbose)) return body - + def make_feed_row(self, feed): feed_attrs = { - 'text': feed['feed_title'], - 'title': feed['feed_title'], - 'type': 'rss', - 'version': 'RSS', - 'htmlUrl': feed['feed_link'] or "", - 'xmlUrl': feed['feed_address'] or "", + "text": feed["feed_title"], + "title": feed["feed_title"], + "type": "rss", + "version": "RSS", + "htmlUrl": feed["feed_link"] or "", + "xmlUrl": feed["feed_address"] or "", } return feed_attrs - + def fetch_feeds(self): subs = UserSubscription.objects.filter(user=self.user) self.feeds = [] @@ -113,16 +111,15 @@ class OPMLExporter(Importer): class OPMLImporter(Importer): - def __init__(self, opml_xml, user): self.user = user self.opml_xml = opml_xml - + @timelimit(10) def try_processing(self): folders = self.process() return folders - + def process(self): # self.clear_feeds() @@ -136,38 +133,37 @@ class OPMLImporter(Importer): # self.clear_folders() self.usf.folders = json.encode(folders) self.usf.save() - + return folders - - def process_outline(self, outline, folders, in_folder=''): + + def process_outline(self, outline, folders, in_folder=""): for item in outline: - if (not hasattr(item, 'xmlUrl') and - (hasattr(item, 'text') or hasattr(item, 'title'))): + if not hasattr(item, "xmlUrl") and (hasattr(item, "text") or hasattr(item, "title")): folder = item - title = getattr(item, 'text', None) or getattr(item, 'title', None) + title = getattr(item, "text", None) or getattr(item, "title", None) # if hasattr(folder, 'text'): # logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text)) obj = {title: []} folders = add_object_to_folder(obj, in_folder, folders) folders = self.process_outline(folder, folders, title) - elif hasattr(item, 'xmlUrl'): + elif hasattr(item, "xmlUrl"): feed = item - if not hasattr(feed, 'htmlUrl'): - setattr(feed, 'htmlUrl', None) + if not hasattr(feed, "htmlUrl"): + setattr(feed, "htmlUrl", None) # If feed title matches what's in the DB, don't override it on subscription. - feed_title = getattr(feed, 'title', None) or getattr(feed, 'text', None) + feed_title = getattr(feed, "title", None) or getattr(feed, "text", None) if not feed_title: - setattr(feed, 'title', feed.htmlUrl or feed.xmlUrl) + setattr(feed, "title", feed.htmlUrl or feed.xmlUrl) user_feed_title = None else: - setattr(feed, 'title', feed_title) + setattr(feed, "title", feed_title) user_feed_title = feed.title feed_address = urlnorm.normalize(feed.xmlUrl) feed_link = urlnorm.normalize(feed.htmlUrl) - if len(feed_address) > Feed._meta.get_field('feed_address').max_length: + if len(feed_address) > Feed._meta.get_field("feed_address").max_length: continue - if feed_link and len(feed_link) > Feed._meta.get_field('feed_link').max_length: + if feed_link and len(feed_link) > Feed._meta.get_field("feed_link").max_length: continue # logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,)) feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title) @@ -178,32 +174,31 @@ class OPMLImporter(Importer): if duplicate_feed: feed_db = duplicate_feed[0].feed else: - feed_data['active_subscribers'] = 1 - feed_data['num_subscribers'] = 1 - feed_db, _ = Feed.find_or_create(feed_address=feed_address, - feed_link=feed_link, - defaults=dict(**feed_data)) + feed_data["active_subscribers"] = 1 + feed_data["num_subscribers"] = 1 + feed_db, _ = Feed.find_or_create( + feed_address=feed_address, feed_link=feed_link, defaults=dict(**feed_data) + ) if user_feed_title == feed_db.feed_title: user_feed_title = None - + try: - us = UserSubscription.objects.get( - feed=feed_db, - user=self.user) + us = UserSubscription.objects.get(feed=feed_db, user=self.user) except UserSubscription.DoesNotExist: us = None - + if not us: us = UserSubscription( - feed=feed_db, + feed=feed_db, user=self.user, needs_unread_recalc=True, mark_read_date=datetime.datetime.utcnow() - datetime.timedelta(days=1), active=self.user.profile.is_premium, - user_title=user_feed_title) + user_title=user_feed_title, + ) us.save() - + if self.user.profile.is_premium and not us.active: us.active = True us.save() @@ -214,25 +209,25 @@ class OPMLImporter(Importer): folders = add_object_to_folder(feed_db.pk, in_folder, folders) return folders - + def count_feeds_in_opml(self): opml_count = len(opml.from_string(self.opml_xml)) sub_count = UserSubscription.objects.filter(user=self.user).count() return max(sub_count, opml_count) - + class UploadedOPML(mongo.Document): user_id = mongo.IntField() opml_file = mongo.StringField() upload_date = mongo.DateTimeField(default=datetime.datetime.now) - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s: %s characters" % (user.username, len(self.opml_file)) - + meta = { - 'collection': 'uploaded_opml', - 'allow_inheritance': False, - 'order': '-upload_date', - 'indexes': ['user_id', '-upload_date'], + "collection": "uploaded_opml", + "allow_inheritance": False, + "order": "-upload_date", + "indexes": ["user_id", "-upload_date"], } diff --git a/apps/feed_import/tasks.py b/apps/feed_import/tasks.py index 8f9d7e67f..7e70f5fd2 100644 --- a/apps/feed_import/tasks.py +++ b/apps/feed_import/tasks.py @@ -12,14 +12,14 @@ def ProcessOPML(user_id): logging.user(user, "~FR~SBOPML upload (task) starting...") opml = UploadedOPML.objects.filter(user_id=user_id).first() - opml_importer = OPMLImporter(opml.opml_file.encode('utf-8'), user) + opml_importer = OPMLImporter(opml.opml_file.encode("utf-8"), user) opml_importer.process() - + feed_count = UserSubscription.objects.filter(user=user).count() user.profile.send_upload_opml_finished_email(feed_count) logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count)) MActivity.new_opml_import(user_id=user.pk, count=feed_count) - + UserSubscription.queue_new_feeds(user) UserSubscription.refresh_stale_feeds(user, exclude_new=True) diff --git a/apps/feed_import/test_feed_import.py b/apps/feed_import/test_feed_import.py index b739eb157..57e429d9a 100644 --- a/apps/feed_import/test_feed_import.py +++ b/apps/feed_import/test_feed_import.py @@ -8,62 +8,95 @@ from apps.rss_feeds.models import merge_feeds, DuplicateFeed, Feed from utils import json_functions as json_functions import json from django.core.management import call_command + + class Test_Import(TestCase): - fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', - 'opml_import.json' - ] - + fixtures = ["apps/rss_feeds/fixtures/initial_data.json", "opml_import.json"] + def setUp(self): self.client = Client() - + def test_opml_import(self): - self.client.login(username='conesus', password='test') - user = User.objects.get(username='conesus') - + self.client.login(username="conesus", password="test") + user = User.objects.get(username="conesus") + # Verify user has no feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 0) - - f = open(os.path.join(os.path.dirname(__file__), 'fixtures/opml.xml')) - response = self.client.post(reverse('opml-upload'), {'file': f}) + + f = open(os.path.join(os.path.dirname(__file__), "fixtures/opml.xml")) + response = self.client.post(reverse("opml-upload"), {"file": f}) self.assertEqual(response.status_code, 200) - + # Verify user now has feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 54) - + usf = UserSubscriptionFolders.objects.get(user=user) print(json_functions.decode(usf.folders)) - self.assertEqual(json_functions.decode(usf.folders), [{'Tech': [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]}, 1, 2, 3, 6, {'New York': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, {'tech': []}, {'Blogs': [29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, {'The Bloglets': [45, 46, 47, 48, 49]}]}, {'Cooking': [50, 51, 52, 53]}, 54]) - + self.assertEqual( + json_functions.decode(usf.folders), + [ + {"Tech": [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]}, + 1, + 2, + 3, + 6, + {"New York": [1, 2, 3, 4, 5, 6, 7, 8, 9]}, + {"tech": []}, + { + "Blogs": [ + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + {"The Bloglets": [45, 46, 47, 48, 49]}, + ] + }, + {"Cooking": [50, 51, 52, 53]}, + 54, + ], + ) + def test_opml_import__empty(self): - self.client.login(username='conesus', password='test') - user = User.objects.get(username='conesus') - + self.client.login(username="conesus", password="test") + user = User.objects.get(username="conesus") + # Verify user has default feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 0) - response = self.client.post(reverse('opml-upload')) + response = self.client.post(reverse("opml-upload")) self.assertEqual(response.status_code, 200) - + # Verify user now has feeds subs = UserSubscription.objects.filter(user=user) self.assertEquals(subs.count(), 0) + class Test_Duplicate_Feeds(TestCase): fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', + "apps/rss_feeds/fixtures/initial_data.json", ] - def test_duplicate_feeds(self): # had to load the feed data this way to hit the save() override. # it wouldn't work with loaddata or fixures - with open('apps/feed_import/fixtures/duplicate_feeds.json') as json_file: + with open("apps/feed_import/fixtures/duplicate_feeds.json") as json_file: feed_data = json.loads(json_file.read()) feed_data_1 = feed_data[0] feed_data_2 = feed_data[1] @@ -72,15 +105,15 @@ class Test_Duplicate_Feeds(TestCase): feed_1.save() feed_2.save() - call_command('loaddata', 'apps/feed_import/fixtures/subscriptions.json') + call_command("loaddata", "apps/feed_import/fixtures/subscriptions.json") - user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id + user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id self.assertNotEqual(user_1_feed_subscription, user_2_feed_subscription) original_feed_id = merge_feeds(user_1_feed_subscription, user_2_feed_subscription) - - user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id + + user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id self.assertEqual(user_1_feed_subscription, user_2_feed_subscription) diff --git a/apps/feed_import/urls.py b/apps/feed_import/urls.py index 1fe34d44a..feb2ace7f 100644 --- a/apps/feed_import/urls.py +++ b/apps/feed_import/urls.py @@ -2,6 +2,6 @@ from django.conf.urls import url from apps.feed_import import views urlpatterns = [ - url(r'^opml_upload/?$', views.opml_upload, name='opml-upload'), - url(r'^opml_export/?$', views.opml_export, name='opml-export'), + url(r"^opml_upload/?$", views.opml_upload, name="opml-upload"), + url(r"^opml_export/?$", views.opml_export, name="opml-export"), ] diff --git a/apps/feed_import/views.py b/apps/feed_import/views.py index 9eaf98df5..b6d28882b 100644 --- a/apps/feed_import/views.py +++ b/apps/feed_import/views.py @@ -7,6 +7,7 @@ from bson.errors import InvalidStringData import uuid from django.contrib.sites.models import Site from django.contrib.auth.models import User + # from django.db import IntegrityError from django.http import HttpResponse, HttpResponseRedirect from django.conf import settings @@ -29,11 +30,11 @@ def opml_upload(request): message = "OK" code = 1 payload = {} - - if request.method == 'POST': - if 'file' in request.FILES: + + if request.method == "POST": + if "file" in request.FILES: logging.user(request, "~FR~SBOPML upload starting...") - file = request.FILES['file'] + file = request.FILES["file"] xml_opml = file.read() try: UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml) @@ -41,7 +42,7 @@ def opml_upload(request): folders = None code = -1 message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?" - + opml_importer = OPMLImporter(xml_opml, request.user) try: folders = opml_importer.try_processing() @@ -49,7 +50,9 @@ def opml_upload(request): folders = None ProcessOPML.delay(request.user.pk) feed_count = opml_importer.count_feeds_in_opml() - logging.user(request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count) + logging.user( + request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count + ) payload = dict(folders=folders, delayed=True, feed_count=feed_count) code = 2 message = "" @@ -64,32 +67,35 @@ def opml_upload(request): payload = dict(folders=folders, feeds=feeds) logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds))) from apps.social.models import MActivity + MActivity.new_opml_import(user_id=request.user.pk, count=len(feeds)) UserSubscription.queue_new_feeds(request.user) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) else: message = "Attach an .opml file." code = -1 - - return HttpResponse(json.encode(dict(message=message, code=code, payload=payload)), - content_type='text/html') + + return HttpResponse( + json.encode(dict(message=message, code=code, payload=payload)), content_type="text/html" + ) + def opml_export(request): - user = get_user(request) - now = datetime.datetime.now() - if request.GET.get('user_id') and user.is_staff: - user = User.objects.get(pk=request.GET['user_id']) + user = get_user(request) + now = datetime.datetime.now() + if request.GET.get("user_id") and user.is_staff: + user = User.objects.get(pk=request.GET["user_id"]) exporter = OPMLExporter(user) - opml = exporter.process() + opml = exporter.process() from apps.social.models import MActivity + MActivity.new_opml_export(user_id=user.pk, count=exporter.feed_count) - response = HttpResponse(opml, content_type='text/xml; charset=utf-8') - response['Content-Disposition'] = 'attachment; filename=NewsBlur-%s-%s.opml' % ( + response = HttpResponse(opml, content_type="text/xml; charset=utf-8") + response["Content-Disposition"] = "attachment; filename=NewsBlur-%s-%s.opml" % ( user.username, - now.strftime('%Y-%m-%d') + now.strftime("%Y-%m-%d"), ) - - return response + return response diff --git a/apps/mobile/tests.py b/apps/mobile/tests.py index 2247054b3..3748f41ba 100644 --- a/apps/mobile/tests.py +++ b/apps/mobile/tests.py @@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application. from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ class SimpleTest(TestCase): """ self.failUnlessEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/mobile/urls.py b/apps/mobile/urls.py index 01e7b15e6..5d66dda53 100644 --- a/apps/mobile/urls.py +++ b/apps/mobile/urls.py @@ -2,5 +2,5 @@ from django.conf.urls import url from apps.mobile import views urlpatterns = [ - url(r'^$', views.index, name='mobile-index'), + url(r"^$", views.index, name="mobile-index"), ] diff --git a/apps/mobile/views.py b/apps/mobile/views.py index f9e765f52..95333ae27 100644 --- a/apps/mobile/views.py +++ b/apps/mobile/views.py @@ -8,5 +8,6 @@ from apps.reader.models import UserSubscription, UserSubscriptionFolders from utils import json_functions as json from utils import log as logging + def index(request): - return render(request, 'mobile/mobile_workspace.xhtml', {}) + return render(request, "mobile/mobile_workspace.xhtml", {}) diff --git a/apps/monitor/urls.py b/apps/monitor/urls.py index 2ae4c0fdc..4576c9b0a 100644 --- a/apps/monitor/urls.py +++ b/apps/monitor/urls.py @@ -1,24 +1,38 @@ from django.conf.urls import url -from apps.monitor.views import ( AppServers, AppTimes, -Classifiers, DbTimes, Errors, FeedCounts, Feeds, LoadTimes, - Stories, TasksCodes, TasksPipeline, TasksServers, TasksTimes, - Updates, Users, FeedSizes +from apps.monitor.views import ( + AppServers, + AppTimes, + Classifiers, + DbTimes, + Errors, + FeedCounts, + Feeds, + LoadTimes, + Stories, + TasksCodes, + TasksPipeline, + TasksServers, + TasksTimes, + Updates, + Users, + FeedSizes, ) + urlpatterns = [ - url(r'^app-servers?$', AppServers.as_view(), name="app_servers"), - url(r'^app-times?$', AppTimes.as_view(), name="app_times"), - url(r'^classifiers?$', Classifiers.as_view(), name="classifiers"), - url(r'^db-times?$', DbTimes.as_view(), name="db_times"), - url(r'^errors?$', Errors.as_view(), name="errors"), - url(r'^feed-counts?$', FeedCounts.as_view(), name="feed_counts"), - url(r'^feed-sizes?$', FeedSizes.as_view(), name="feed_sizes"), - url(r'^feeds?$', Feeds.as_view(), name="feeds"), - url(r'^load-times?$', LoadTimes.as_view(), name="load_times"), - url(r'^stories?$', Stories.as_view(), name="stories"), - url(r'^task-codes?$', TasksCodes.as_view(), name="task_codes"), - url(r'^task-pipeline?$', TasksPipeline.as_view(), name="task_pipeline"), - url(r'^task-servers?$', TasksServers.as_view(), name="task_servers"), - url(r'^task-times?$', TasksTimes.as_view(), name="task_times"), - url(r'^updates?$', Updates.as_view(), name="updates"), - url(r'^users?$', Users.as_view(), name="users"), + url(r"^app-servers?$", AppServers.as_view(), name="app_servers"), + url(r"^app-times?$", AppTimes.as_view(), name="app_times"), + url(r"^classifiers?$", Classifiers.as_view(), name="classifiers"), + url(r"^db-times?$", DbTimes.as_view(), name="db_times"), + url(r"^errors?$", Errors.as_view(), name="errors"), + url(r"^feed-counts?$", FeedCounts.as_view(), name="feed_counts"), + url(r"^feed-sizes?$", FeedSizes.as_view(), name="feed_sizes"), + url(r"^feeds?$", Feeds.as_view(), name="feeds"), + url(r"^load-times?$", LoadTimes.as_view(), name="load_times"), + url(r"^stories?$", Stories.as_view(), name="stories"), + url(r"^task-codes?$", TasksCodes.as_view(), name="task_codes"), + url(r"^task-pipeline?$", TasksPipeline.as_view(), name="task_pipeline"), + url(r"^task-servers?$", TasksServers.as_view(), name="task_servers"), + url(r"^task-times?$", TasksTimes.as_view(), name="task_times"), + url(r"^updates?$", Updates.as_view(), name="updates"), + url(r"^users?$", Users.as_view(), name="users"), ] diff --git a/apps/monitor/views/newsblur_app_servers.py b/apps/monitor/views/newsblur_app_servers.py index aeae5286d..59f3e5984 100755 --- a/apps/monitor/views/newsblur_app_servers.py +++ b/apps/monitor/views/newsblur_app_servers.py @@ -3,11 +3,11 @@ from django.conf import settings from django.views import View from django.shortcuts import render -class AppServers(View): +class AppServers(View): def get(self, request): - data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - #total = self.total: + data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + # total = self.total: # if total: # data['total'] = total[0]['feeds'] chart_name = "app_servers" @@ -21,38 +21,48 @@ class AppServers(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property - def total(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + def total(self): + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_app_times.py b/apps/monitor/views/newsblur_app_times.py index d64b0bc15..884058f0f 100755 --- a/apps/monitor/views/newsblur_app_times.py +++ b/apps/monitor/views/newsblur_app_times.py @@ -3,10 +3,10 @@ from django.shortcuts import render import datetime from django.conf import settings -class AppTimes(View): +class AppTimes(View): def get(self, request): - servers = dict((("%s" % s['_id'], s['page_load']) for s in self.stats)) + servers = dict((("%s" % s["_id"], s["page_load"]) for s in self.stats)) data = servers chart_name = "app_times" chart_type = "counter" @@ -20,21 +20,26 @@ class AppTimes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "page_load" : {"$avg": "$page_load"}, - }, - }]) - + { + "$group": { + "_id": "$server", + "page_load": {"$avg": "$page_load"}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_classifiers.py b/apps/monitor/views/newsblur_classifiers.py index bc7af32ab..752fded2d 100755 --- a/apps/monitor/views/newsblur_classifiers.py +++ b/apps/monitor/views/newsblur_classifiers.py @@ -4,13 +4,12 @@ from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifier class Classifiers(View): - def get(self, request): data = { - 'feeds': MClassifierFeed.objects._collection.count(), - 'authors': MClassifierAuthor.objects._collection.count(), - 'tags': MClassifierTag.objects._collection.count(), - 'titles': MClassifierTitle.objects._collection.count(), + "feeds": MClassifierFeed.objects._collection.count(), + "authors": MClassifierAuthor.objects._collection.count(), + "tags": MClassifierTag.objects._collection.count(), + "titles": MClassifierTitle.objects._collection.count(), } chart_name = "classifiers" @@ -24,5 +23,4 @@ class Classifiers(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_dbtimes.py b/apps/monitor/views/newsblur_dbtimes.py index d11daf595..d23859edd 100755 --- a/apps/monitor/views/newsblur_dbtimes.py +++ b/apps/monitor/views/newsblur_dbtimes.py @@ -3,24 +3,22 @@ from django.views import View from apps.statistics.models import MStatistics + class DbTimes(View): - - def get(self, request): - data = { - 'sql_avg': MStatistics.get('latest_sql_avg'), - 'mongo_avg': MStatistics.get('latest_mongo_avg'), - 'redis_user_avg': MStatistics.get('latest_redis_user_avg'), - 'redis_story_avg': MStatistics.get('latest_redis_story_avg'), - 'redis_session_avg': MStatistics.get('latest_redis_session_avg'), - 'redis_pubsub_avg': MStatistics.get('latest_redis_pubsub_avg'), - 'task_sql_avg': MStatistics.get('latest_task_sql_avg'), - 'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), - 'task_redis_user_avg': MStatistics.get('latest_task_redis_user_avg'), - 'task_redis_story_avg': MStatistics.get('latest_task_redis_story_avg'), - 'task_redis_session_avg': MStatistics.get('latest_task_redis_session_avg'), - 'task_redis_pubsub_avg': MStatistics.get('latest_task_redis_pubsub_avg'), + "sql_avg": MStatistics.get("latest_sql_avg"), + "mongo_avg": MStatistics.get("latest_mongo_avg"), + "redis_user_avg": MStatistics.get("latest_redis_user_avg"), + "redis_story_avg": MStatistics.get("latest_redis_story_avg"), + "redis_session_avg": MStatistics.get("latest_redis_session_avg"), + "redis_pubsub_avg": MStatistics.get("latest_redis_pubsub_avg"), + "task_sql_avg": MStatistics.get("latest_task_sql_avg"), + "task_mongo_avg": MStatistics.get("latest_task_mongo_avg"), + "task_redis_user_avg": MStatistics.get("latest_task_redis_user_avg"), + "task_redis_story_avg": MStatistics.get("latest_task_redis_story_avg"), + "task_redis_session_avg": MStatistics.get("latest_task_redis_session_avg"), + "task_redis_pubsub_avg": MStatistics.get("latest_task_redis_pubsub_avg"), } chart_name = "db_times" chart_type = "counter" @@ -32,4 +30,4 @@ class DbTimes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_errors.py b/apps/monitor/views/newsblur_errors.py index ea057efe8..7497efd4f 100755 --- a/apps/monitor/views/newsblur_errors.py +++ b/apps/monitor/views/newsblur_errors.py @@ -3,23 +3,22 @@ from django.views import View from apps.statistics.models import MStatistics -class Errors(View): +class Errors(View): def get(self, request): statistics = MStatistics.all() data = { - 'feed_success': statistics['feeds_fetched'], + "feed_success": statistics["feeds_fetched"], } chart_name = "errors" chart_type = "counter" formatted_data = {} for k, v in data.items(): - formatted_data[k] = f'feed_success {v}' - + formatted_data[k] = f"feed_success {v}" + context = { "data": formatted_data, "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feed_counts.py b/apps/monitor/views/newsblur_feed_counts.py index 7a08c954c..dbc355851 100755 --- a/apps/monitor/views/newsblur_feed_counts.py +++ b/apps/monitor/views/newsblur_feed_counts.py @@ -6,44 +6,43 @@ from apps.rss_feeds.models import Feed, DuplicateFeed from apps.push.models import PushSubscription from apps.statistics.models import MStatistics -class FeedCounts(View): +class FeedCounts(View): def get(self, request): - - exception_feeds = MStatistics.get('munin:exception_feeds') + exception_feeds = MStatistics.get("munin:exception_feeds") if not exception_feeds: exception_feeds = Feed.objects.filter(has_feed_exception=True).count() - MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12) + MStatistics.set("munin:exception_feeds", exception_feeds, 60 * 60 * 12) - exception_pages = MStatistics.get('munin:exception_pages') + exception_pages = MStatistics.get("munin:exception_pages") if not exception_pages: exception_pages = Feed.objects.filter(has_page_exception=True).count() - MStatistics.set('munin:exception_pages', exception_pages, 60*60*12) + MStatistics.set("munin:exception_pages", exception_pages, 60 * 60 * 12) - duplicate_feeds = MStatistics.get('munin:duplicate_feeds') + duplicate_feeds = MStatistics.get("munin:duplicate_feeds") if not duplicate_feeds: duplicate_feeds = DuplicateFeed.objects.count() - MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12) + MStatistics.set("munin:duplicate_feeds", duplicate_feeds, 60 * 60 * 12) - active_feeds = MStatistics.get('munin:active_feeds') + active_feeds = MStatistics.get("munin:active_feeds") if not active_feeds: active_feeds = Feed.objects.filter(active_subscribers__gt=0).count() - MStatistics.set('munin:active_feeds', active_feeds, 60*60*12) + MStatistics.set("munin:active_feeds", active_feeds, 60 * 60 * 12) - push_feeds = MStatistics.get('munin:push_feeds') + push_feeds = MStatistics.get("munin:push_feeds") if not push_feeds: push_feeds = PushSubscription.objects.filter(verified=True).count() - MStatistics.set('munin:push_feeds', push_feeds, 60*60*12) + MStatistics.set("munin:push_feeds", push_feeds, 60 * 60 * 12) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + data = { - 'scheduled_feeds': r.zcard('scheduled_updates'), - 'exception_feeds': exception_feeds, - 'exception_pages': exception_pages, - 'duplicate_feeds': duplicate_feeds, - 'active_feeds': active_feeds, - 'push_feeds': push_feeds, + "scheduled_feeds": r.zcard("scheduled_updates"), + "exception_feeds": exception_feeds, + "exception_pages": exception_pages, + "duplicate_feeds": duplicate_feeds, + "active_feeds": active_feeds, + "push_feeds": push_feeds, } chart_name = "feed_counts" chart_type = "counter" @@ -57,6 +56,4 @@ class FeedCounts(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feed_sizes.py b/apps/monitor/views/newsblur_feed_sizes.py index 553aee324..c0da56eae 100644 --- a/apps/monitor/views/newsblur_feed_sizes.py +++ b/apps/monitor/views/newsblur_feed_sizes.py @@ -7,23 +7,24 @@ from apps.rss_feeds.models import Feed, DuplicateFeed from apps.push.models import PushSubscription from apps.statistics.models import MStatistics + class FeedSizes(View): - def get(self, request): - - fs_size_bytes = MStatistics.get('munin:fs_size_bytes') + fs_size_bytes = MStatistics.get("munin:fs_size_bytes") if not fs_size_bytes: - fs_size_bytes = Feed.objects.aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] - MStatistics.set('munin:fs_size_bytes', fs_size_bytes, 60*60*12) + fs_size_bytes = Feed.objects.aggregate(Sum("fs_size_bytes"))["fs_size_bytes__sum"] + MStatistics.set("munin:fs_size_bytes", fs_size_bytes, 60 * 60 * 12) - archive_users_size_bytes = MStatistics.get('munin:archive_users_size_bytes') + archive_users_size_bytes = MStatistics.get("munin:archive_users_size_bytes") if not archive_users_size_bytes: - archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] - MStatistics.set('munin:archive_users_size_bytes', archive_users_size_bytes, 60*60*12) + archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate( + Sum("fs_size_bytes") + )["fs_size_bytes__sum"] + MStatistics.set("munin:archive_users_size_bytes", archive_users_size_bytes, 60 * 60 * 12) data = { - 'fs_size_bytes': fs_size_bytes, - 'archive_users_size_bytes': archive_users_size_bytes, + "fs_size_bytes": fs_size_bytes, + "archive_users_size_bytes": archive_users_size_bytes, } chart_name = "feed_sizes" chart_type = "counter" @@ -37,6 +38,4 @@ class FeedSizes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feeds.py b/apps/monitor/views/newsblur_feeds.py index 9c527fa82..fcacada32 100755 --- a/apps/monitor/views/newsblur_feeds.py +++ b/apps/monitor/views/newsblur_feeds.py @@ -6,37 +6,35 @@ from apps.reader.models import UserSubscription from apps.social.models import MSocialProfile, MSocialSubscription from apps.statistics.models import MStatistics + class Feeds(View): - def get(self, request): - - feeds_count = MStatistics.get('munin:feeds_count') + feeds_count = MStatistics.get("munin:feeds_count") if not feeds_count: feeds_count = Feed.objects.all().count() - MStatistics.set('munin:feeds_count', feeds_count, 60*60*12) + MStatistics.set("munin:feeds_count", feeds_count, 60 * 60 * 12) - subscriptions_count = MStatistics.get('munin:subscriptions_count') + subscriptions_count = MStatistics.get("munin:subscriptions_count") if not subscriptions_count: subscriptions_count = UserSubscription.objects.all().count() - MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12) + MStatistics.set("munin:subscriptions_count", subscriptions_count, 60 * 60 * 12) data = { - 'feeds': feeds_count, - 'subscriptions': subscriptions_count, - 'profiles': MSocialProfile.objects._collection.count(), - 'social_subscriptions': MSocialSubscription.objects._collection.count(), + "feeds": feeds_count, + "subscriptions": subscriptions_count, + "profiles": MSocialProfile.objects._collection.count(), + "social_subscriptions": MSocialSubscription.objects._collection.count(), } chart_name = "feeds" chart_type = "counter" formatted_data = {} for k, v in data.items(): formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' - + context = { "data": formatted_data, "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_loadtimes.py b/apps/monitor/views/newsblur_loadtimes.py index 64c93e1f6..40538f6df 100755 --- a/apps/monitor/views/newsblur_loadtimes.py +++ b/apps/monitor/views/newsblur_loadtimes.py @@ -1,15 +1,15 @@ from django.shortcuts import render from django.views import View -class LoadTimes(View): +class LoadTimes(View): def get(self, request): from apps.statistics.models import MStatistics - + data = { - 'feed_loadtimes_1min': MStatistics.get('last_1_min_time_taken'), - 'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'), - 'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'), + "feed_loadtimes_1min": MStatistics.get("last_1_min_time_taken"), + "feed_loadtimes_avg_hour": MStatistics.get("latest_avg_time_taken"), + "feeds_loaded_hour": MStatistics.get("latest_sites_loaded"), } chart_name = "load_times" chart_type = "counter" @@ -23,5 +23,4 @@ class LoadTimes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_stories.py b/apps/monitor/views/newsblur_stories.py index 3cbe3f0b9..f88a5164c 100755 --- a/apps/monitor/views/newsblur_stories.py +++ b/apps/monitor/views/newsblur_stories.py @@ -2,13 +2,13 @@ from django.views import View from django.shortcuts import render from apps.rss_feeds.models import MStory, MStarredStory from apps.rss_feeds.models import MStory, MStarredStory - -class Stories(View): + +class Stories(View): def get(self, request): data = { - 'stories': MStory.objects._collection.count(), - 'starred_stories': MStarredStory.objects._collection.count(), + "stories": MStory.objects._collection.count(), + "starred_stories": MStarredStory.objects._collection.count(), } chart_name = "stories" chart_type = "counter" @@ -21,5 +21,4 @@ class Stories(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_tasks_codes.py b/apps/monitor/views/newsblur_tasks_codes.py index 652a13654..952a9afed 100755 --- a/apps/monitor/views/newsblur_tasks_codes.py +++ b/apps/monitor/views/newsblur_tasks_codes.py @@ -3,10 +3,10 @@ from django.conf import settings from django.shortcuts import render from django.views import View -class TasksCodes(View): +class TasksCodes(View): def get(self, request): - data = dict((("_%s" % s['_id'], s['feeds']) for s in self.stats)) + data = dict((("_%s" % s["_id"], s["feeds"]) for s in self.stats)) chart_name = "task_codes" chart_type = "counter" formatted_data = {} @@ -18,22 +18,26 @@ class TasksCodes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property - def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + def stats(self): + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$feed_code", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$feed_code", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - \ No newline at end of file diff --git a/apps/monitor/views/newsblur_tasks_pipeline.py b/apps/monitor/views/newsblur_tasks_pipeline.py index e962fb9ef..33931f5ac 100755 --- a/apps/monitor/views/newsblur_tasks_pipeline.py +++ b/apps/monitor/views/newsblur_tasks_pipeline.py @@ -4,10 +4,10 @@ from django.conf import settings from django.shortcuts import render from django.views import View -class TasksPipeline(View): +class TasksPipeline(View): def get(self, request): - data =self.stats + data = self.stats chart_name = "task_pipeline" chart_type = "counter" @@ -19,27 +19,31 @@ class TasksPipeline(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id": 1, - "feed_fetch": {"$avg": "$feed_fetch"}, - "feed_process": {"$avg": "$feed_process"}, - "page": {"$avg": "$page"}, - "icon": {"$avg": "$icon"}, - "total": {"$avg": "$total"}, - }, - }]) + { + "$group": { + "_id": 1, + "feed_fetch": {"$avg": "$feed_fetch"}, + "feed_process": {"$avg": "$feed_process"}, + "page": {"$avg": "$page"}, + "icon": {"$avg": "$icon"}, + "total": {"$avg": "$total"}, + }, + }, + ] + ) stats = list(stats) if stats: print(stats) diff --git a/apps/monitor/views/newsblur_tasks_servers.py b/apps/monitor/views/newsblur_tasks_servers.py index 90a26fcf3..c8bcb394f 100755 --- a/apps/monitor/views/newsblur_tasks_servers.py +++ b/apps/monitor/views/newsblur_tasks_servers.py @@ -4,10 +4,10 @@ from django.conf import settings from django.shortcuts import render from django.views import View -class TasksServers(View): +class TasksServers(View): def get(self, request): - data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) + data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) chart_name = "task_servers" chart_type = "counter" @@ -19,39 +19,48 @@ class TasksServers(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") - @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_tasks_times.py b/apps/monitor/views/newsblur_tasks_times.py index 0d6a14f9c..2ba7aa21b 100755 --- a/apps/monitor/views/newsblur_tasks_times.py +++ b/apps/monitor/views/newsblur_tasks_times.py @@ -4,10 +4,10 @@ from django.conf import settings from django.shortcuts import render from django.views import View -class TasksTimes(View): +class TasksTimes(View): def get(self, request): - data = dict((("%s" % s['_id'], s['total']) for s in self.stats)) + data = dict((("%s" % s["_id"], s["total"]) for s in self.stats)) chart_name = "task_times" chart_type = "counter" @@ -19,22 +19,26 @@ class TasksTimes(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") - @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "total" : {"$avg": "$total"}, - }, - }]) - + { + "$group": { + "_id": "$server", + "total": {"$avg": "$total"}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_updates.py b/apps/monitor/views/newsblur_updates.py index 38640407f..2775e72ec 100755 --- a/apps/monitor/views/newsblur_updates.py +++ b/apps/monitor/views/newsblur_updates.py @@ -4,26 +4,26 @@ from django.conf import settings from django.shortcuts import render from django.views import View -class Updates(View): - def get(self, request): +class Updates(View): + def get(self, request): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) data = { - 'update_queue': r.scard("queued_feeds"), - 'feeds_fetched': r.zcard("fetched_feeds_last_hour"), - 'tasked_feeds': r.zcard("tasked_feeds"), - 'error_feeds': r.zcard("error_feeds"), - 'celery_update_feeds': r.llen("update_feeds"), - 'celery_new_feeds': r.llen("new_feeds"), - 'celery_push_feeds': r.llen("push_feeds"), - 'celery_work_queue': r.llen("work_queue"), - 'celery_search_queue': r.llen("search_indexer"), + "update_queue": r.scard("queued_feeds"), + "feeds_fetched": r.zcard("fetched_feeds_last_hour"), + "tasked_feeds": r.zcard("tasked_feeds"), + "error_feeds": r.zcard("error_feeds"), + "celery_update_feeds": r.llen("update_feeds"), + "celery_new_feeds": r.llen("new_feeds"), + "celery_push_feeds": r.llen("push_feeds"), + "celery_work_queue": r.llen("work_queue"), + "celery_search_queue": r.llen("search_indexer"), } chart_name = "updates" chart_type = "counter" formatted_data = {} - + for k, v in data.items(): formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' context = { @@ -31,5 +31,4 @@ class Updates(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_users.py b/apps/monitor/views/newsblur_users.py index 00218c8cd..76eed4d0f 100755 --- a/apps/monitor/views/newsblur_users.py +++ b/apps/monitor/views/newsblur_users.py @@ -7,39 +7,63 @@ from django.views import View from apps.profile.models import Profile, RNewUserQueue from apps.statistics.models import MStatistics -class Users(View): +class Users(View): def get(self, request): last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365) last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30) - last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24) - expiration_sec = 60*60 # 1 hour - + last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 * 24) + expiration_sec = 60 * 60 # 1 hour + data = { - 'all': MStatistics.get('munin:users_count', - lambda: User.objects.count(), - set_default=True, expiration_sec=expiration_sec), - 'yearly': MStatistics.get('munin:users_yearly', - lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(), - set_default=True, expiration_sec=expiration_sec), - 'monthly': MStatistics.get('munin:users_monthly', - lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(), - set_default=True, expiration_sec=expiration_sec), - 'daily': MStatistics.get('munin:users_daily', - lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(), - set_default=True, expiration_sec=expiration_sec), - 'premium': MStatistics.get('munin:users_premium', - lambda: Profile.objects.filter(is_premium=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'archive': MStatistics.get('munin:users_archive', - lambda: Profile.objects.filter(is_archive=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'pro': MStatistics.get('munin:users_pro', - lambda: Profile.objects.filter(is_pro=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'queued': MStatistics.get('munin:users_queued', - lambda: RNewUserQueue.user_count(), - set_default=True, expiration_sec=expiration_sec), + "all": MStatistics.get( + "munin:users_count", + lambda: User.objects.count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "yearly": MStatistics.get( + "munin:users_yearly", + lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "monthly": MStatistics.get( + "munin:users_monthly", + lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "daily": MStatistics.get( + "munin:users_daily", + lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "premium": MStatistics.get( + "munin:users_premium", + lambda: Profile.objects.filter(is_premium=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "archive": MStatistics.get( + "munin:users_archive", + lambda: Profile.objects.filter(is_archive=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "pro": MStatistics.get( + "munin:users_pro", + lambda: Profile.objects.filter(is_pro=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "queued": MStatistics.get( + "munin:users_queued", + lambda: RNewUserQueue.user_count(), + set_default=True, + expiration_sec=expiration_sec, + ), } chart_name = "users" chart_type = "counter" @@ -52,5 +76,4 @@ class Users(View): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/prometheus_redis.py b/apps/monitor/views/prometheus_redis.py index 817628797..9c2486ce8 100644 --- a/apps/monitor/views/prometheus_redis.py +++ b/apps/monitor/views/prometheus_redis.py @@ -12,6 +12,7 @@ RedisUsedMemory RedisSize """ + class RedisGrafanaMetric(View): category = "Redis" @@ -23,9 +24,9 @@ class RedisGrafanaMetric(View): return True def get_info(self): - host = os.environ.get('REDIS_HOST') or '127.0.0.1' - port = int(os.environ.get('REDIS_PORT') or '6379') - if host.startswith('/'): + host = os.environ.get("REDIS_HOST") or "127.0.0.1" + port = int(os.environ.get("REDIS_PORT") or "6379") + if host.startswith("/"): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect(host) else: @@ -33,9 +34,9 @@ class RedisGrafanaMetric(View): s.connect((host, port)) s.send("*1\r\n$4\r\ninfo\r\n") buf = "" - while '\r\n' not in buf: + while "\r\n" not in buf: buf += s.recv(1024) - l, buf = buf.split('\r\n', 1) + l, buf = buf.split("\r\n", 1) if l[0] != "$": s.close() raise Exception("Protocol error") @@ -43,7 +44,7 @@ class RedisGrafanaMetric(View): if remaining > 0: buf += s.recv(remaining) s.close() - return dict(x.split(':', 1) for x in buf.split('\r\n') if ':' in x) + return dict(x.split(":", 1) for x in buf.split("\r\n") if ":" in x) def execute(self): stats = self.get_info() @@ -57,25 +58,28 @@ class RedisGrafanaMetric(View): return values def get_fields(self): - raise NotImplementedError('You must implement the get_fields function') + raise NotImplementedError("You must implement the get_fields function") def get_context(self): - raise NotImplementedError('You must implement the get_context function') - + raise NotImplementedError("You must implement the get_context function") + def get(self, request): context = self.get_context() - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + class RedisActiveConnection(RedisGrafanaMetric): - def get_fields(self): return ( - ('connected_clients', dict( - label = "connections", - info = "connections", - type = "GAUGE", - )), + ( + "connected_clients", + dict( + label="connections", + info="connections", + type="GAUGE", + ), + ), ) def get_context(self): - raise NotImplementedError('You must implement the get_context function') + raise NotImplementedError("You must implement the get_context function") diff --git a/apps/newsletters/models.py b/apps/newsletters/models.py index b7bd880cd..31950ac2d 100644 --- a/apps/newsletters/models.py +++ b/apps/newsletters/models.py @@ -17,23 +17,23 @@ from utils import log as logging from utils.story_functions import linkify from utils.scrubber import Scrubber + class EmailNewsletter: - def receive_newsletter(self, params): - user = self._user_from_email(params['recipient']) + user = self._user_from_email(params["recipient"]) if not user: return - - sender_name, sender_username, sender_domain = self._split_sender(params['from']) + + sender_name, sender_username, sender_domain = self._split_sender(params["from"]) feed_address = self._feed_address(user, "%s@%s" % (sender_username, sender_domain)) - + try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: logging.user(user, "~FRUser does not have a USF, ignoring newsletter.") return - usf.add_folder('', 'Newsletters') - + usf.add_folder("", "Newsletters") + # First look for the email address try: feed = Feed.objects.get(feed_address=feed_address) @@ -46,45 +46,47 @@ class EmailNewsletter: # If not found, check among titles user has subscribed to if not feed: - newsletter_subs = UserSubscription.objects.filter(user=user, feed__feed_address__contains="newsletter:").only('feed') + newsletter_subs = UserSubscription.objects.filter( + user=user, feed__feed_address__contains="newsletter:" + ).only("feed") newsletter_feed_ids = [us.feed.pk for us in newsletter_subs] feeds = Feed.objects.filter(feed_title__iexact=sender_name, pk__in=newsletter_feed_ids) if feeds.count(): feed = feeds[0] - + # Create a new feed if it doesn't exist by sender name or email if not feed: - feed = Feed.objects.create(feed_address=feed_address, - feed_link='http://' + sender_domain, - feed_title=sender_name, - fetched_once=True, - known_good=True) + feed = Feed.objects.create( + feed_address=feed_address, + feed_link="http://" + sender_domain, + feed_title=sender_name, + fetched_once=True, + known_good=True, + ) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:%s' % feed.pk) + r.publish(user.username, "reload:%s" % feed.pk) self._check_if_first_newsletter(user) - + feed.last_update = datetime.datetime.now() feed.last_story_date = datetime.datetime.now() feed.save() - + if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() - + try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( - user=user, - feed_address=feed_address, - folder='Newsletters' + user=user, feed_address=feed_address, folder="Newsletters" ) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:feeds') - - story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) + r.publish(user.username, "reload:feeds") + + story_hash = MStory.ensure_story_hash(params["signature"], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): @@ -92,15 +94,16 @@ class EmailNewsletter: story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, - "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), - "story_title": params['subject'], + "story_date": datetime.datetime.fromtimestamp(int(params["timestamp"])), + "story_title": params["subject"], "story_content": story_content, - "story_author_name": params['from'], - "story_permalink": "https://%s%s" % ( - Site.objects.get_current().domain, - reverse('newsletter-story', - kwargs={'story_hash': story_hash})), - "story_guid": params['signature'], + "story_author_name": params["from"], + "story_permalink": "https://%s%s" + % ( + Site.objects.get_current().domain, + reverse("newsletter-story", kwargs={"story_hash": story_hash}), + ), + "story_guid": params["signature"], } try: @@ -108,17 +111,17 @@ class EmailNewsletter: except MStory.DoesNotExist: story = MStory(**story_params) story.save() - + usersub.needs_unread_recalc = True usersub.save() - + self._publish_to_subscribers(feed, story.story_hash) - - MFetchHistory.add(feed_id=feed.pk, fetch_type='push') + + MFetchHistory.add(feed_id=feed.pk, fetch_type="push") logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) - + return story - + def _check_if_first_newsletter(self, user, force=False): if not user.email: return @@ -129,10 +132,10 @@ class EmailNewsletter: if sub.feed.is_newsletter: found_newsletter = True break - if not found_newsletter and not force: - return - - params = dict(receiver_user_id=user.pk, email_type='first_newsletter') + if not found_newsletter and not force: + return + + params = dict(receiver_user_id=user.pk, email_type="first_newsletter") try: MSentEmail.objects.get(**params) if not force: @@ -140,23 +143,26 @@ class EmailNewsletter: return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - text = render_to_string('mail/email_first_newsletter.txt', {}) - html = render_to_string('mail/email_first_newsletter.xhtml', {}) + + text = render_to_string("mail/email_first_newsletter.txt", {}) + html = render_to_string("mail/email_first_newsletter.xhtml", {}) subject = "Your email newsletters are now being sent to NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(user, "~BB~FM~SBSending first newsletter email to: %s" % user.email) - + def _user_from_email(self, email): - tokens = re.search('(\w+)[\+\-\.](\w+)@newsletters.newsblur.com', email) + tokens = re.search("(\w+)[\+\-\.](\w+)@newsletters.newsblur.com", email) if not tokens: return - + username, secret_token = tokens.groups() try: profiles = Profile.objects.filter(secret_token=secret_token) @@ -165,55 +171,56 @@ class EmailNewsletter: profile = profiles[0] except Profile.DoesNotExist: return - + return profile.user - + def _feed_address(self, user, sender_email): - return 'newsletter:%s:%s' % (user.pk, sender_email) - + return "newsletter:%s:%s" % (user.pk, sender_email) + def _split_sender(self, sender): - tokens = re.search('(.*?) <(.*?)@(.*?)>', sender) + tokens = re.search("(.*?) <(.*?)@(.*?)>", sender) if not tokens: - name, domain = sender.split('@') + name, domain = sender.split("@") return name, sender, domain - + sender_name, sender_username, sender_domain = tokens.group(1), tokens.group(2), tokens.group(3) - sender_name = sender_name.replace('"', '') - + sender_name = sender_name.replace('"', "") + return sender_name, sender_username, sender_domain - + def _get_content(self, params, force_plain=False): - if 'body-enriched' in params and not force_plain: - return params['body-enriched'] - if 'body-html' in params and not force_plain: - return params['body-html'] - if 'stripped-html' in params and not force_plain: - return params['stripped-html'] - if 'body-plain' in params: - return linkify(linebreaks(params['body-plain'])) - + if "body-enriched" in params and not force_plain: + return params["body-enriched"] + if "body-html" in params and not force_plain: + return params["body-html"] + if "stripped-html" in params and not force_plain: + return params["stripped-html"] + if "body-plain" in params: + return linkify(linebreaks(params["body-plain"])) + if force_plain: return self._get_content(params, force_plain=False) - + def _clean_content(self, content): original = content scrubber = Scrubber() content = scrubber.scrub(content) - if len(content) < len(original)*0.01: + if len(content) < len(original) * 0.01: content = original - content = content.replace('!important', '') + content = content.replace("!important", "") return content - + def _publish_to_subscribers(self, feed, story_hash): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - listeners_count = r.publish("%s:story" % feed.pk, 'story:new:%s' % story_hash) + listeners_count = r.publish("%s:story" % feed.pk, "story:new:%s" % story_hash) if listeners_count: - logging.debug(" ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count)) + logging.debug( + " ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count) + ) except redis.ConnectionError: logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.log_title[:30],)) - + if MUserFeedNotification.feed_has_users(feed.pk) > 0: QueueNotifications.delay(feed.pk, 1) - \ No newline at end of file diff --git a/apps/newsletters/urls.py b/apps/newsletters/urls.py index 224e2cc09..e6263f189 100644 --- a/apps/newsletters/urls.py +++ b/apps/newsletters/urls.py @@ -2,6 +2,6 @@ from django.conf.urls import url from apps.newsletters import views urlpatterns = [ - url(r'^receive/?$', views.newsletter_receive, name='newsletter-receive'), - url(r'^story/(?P[\w:]+)/?$', views.newsletter_story, name='newsletter-story'), + url(r"^receive/?$", views.newsletter_receive, name="newsletter-receive"), + url(r"^story/(?P[\w:]+)/?$", views.newsletter_story, name="newsletter-story"), ] diff --git a/apps/newsletters/views.py b/apps/newsletters/views.py index 448912783..f4ed35b1b 100644 --- a/apps/newsletters/views.py +++ b/apps/newsletters/views.py @@ -5,9 +5,10 @@ from utils import log as logging from apps.newsletters.models import EmailNewsletter from apps.rss_feeds.models import Feed, MStory + def newsletter_receive(request): """ - This function is called by mailgun's receive email feature. This is a + This function is called by mailgun's receive email feature. This is a private API used for the newsletter app. """ # params = { @@ -42,24 +43,25 @@ def newsletter_receive(request): # 'Subject':'Test Newsletter theskimm' # } params = request.POST - - response = HttpResponse('OK') - - if settings.DEBUG or 'samuel' in params.get('To', ''): + + response = HttpResponse("OK") + + if settings.DEBUG or "samuel" in params.get("To", ""): logging.debug(" ---> Email newsletter: %s" % params) - + if not params or not len(params.keys()): logging.debug(" ***> Email newsletter blank body: %s" % request.body) raise Http404 - + email_newsletter = EmailNewsletter() story = email_newsletter.receive_newsletter(params) - + if not story: raise Http404 - + return response + def newsletter_story(request, story_hash): try: story = MStory.objects.get(story_hash=story_hash) @@ -67,4 +69,4 @@ def newsletter_story(request, story_hash): raise Http404 story = Feed.format_story(story) - return HttpResponse(story['story_content']) + return HttpResponse(story["story_content"]) diff --git a/apps/notifications/models.py b/apps/notifications/models.py index 253dab95e..3542415f4 100644 --- a/apps/notifications/models.py +++ b/apps/notifications/models.py @@ -40,21 +40,21 @@ class NotificationFrequency(enum.Enum): class MUserNotificationTokens(mongo.Document): - '''A user's push notification tokens''' + """A user's push notification tokens""" user_id = mongo.IntField() ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) use_sandbox = mongo.BooleanField(default=False) meta = { - 'collection': 'notification_tokens', - 'indexes': [ + "collection": "notification_tokens", + "indexes": [ { - 'fields': ['user_id'], - 'unique': True, + "fields": ["user_id"], + "unique": True, } ], - 'allow_inheritance': False, + "allow_inheritance": False, } @classmethod @@ -68,7 +68,7 @@ class MUserNotificationTokens(mongo.Document): class MUserFeedNotification(mongo.Document): - '''A user's notifications of a single feed.''' + """A user's notifications of a single feed.""" user_id = mongo.IntField() feed_id = mongo.IntField() @@ -82,32 +82,32 @@ class MUserFeedNotification(mongo.Document): ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) meta = { - 'collection': 'notifications', - 'indexes': [ - 'feed_id', + "collection": "notifications", + "indexes": [ + "feed_id", { - 'fields': ['user_id', 'feed_id'], - 'unique': True, + "fields": ["user_id", "feed_id"], + "unique": True, }, ], - 'allow_inheritance': False, + "allow_inheritance": False, } def __str__(self): notification_types = [] if self.is_email: - notification_types.append('email') + notification_types.append("email") if self.is_web: - notification_types.append('web') + notification_types.append("web") if self.is_ios: - notification_types.append('ios') + notification_types.append("ios") if self.is_android: - notification_types.append('android') + notification_types.append("android") return "%s/%s: %s -> %s" % ( User.objects.get(pk=self.user_id).username, Feed.get_by_id(self.feed_id), - ','.join(notification_types), + ",".join(notification_types), self.last_notification_date, ) @@ -128,17 +128,17 @@ class MUserFeedNotification(mongo.Document): for feed in notifications: notifications_by_feed[feed.feed_id] = { - 'notification_types': [], - 'notification_filter': "focus" if feed.is_focus else "unread", + "notification_types": [], + "notification_filter": "focus" if feed.is_focus else "unread", } if feed.is_email: - notifications_by_feed[feed.feed_id]['notification_types'].append('email') + notifications_by_feed[feed.feed_id]["notification_types"].append("email") if feed.is_web: - notifications_by_feed[feed.feed_id]['notification_types'].append('web') + notifications_by_feed[feed.feed_id]["notification_types"].append("web") if feed.is_ios: - notifications_by_feed[feed.feed_id]['notification_types'].append('ios') + notifications_by_feed[feed.feed_id]["notification_types"].append("ios") if feed.is_android: - notifications_by_feed[feed.feed_id]['notification_types'].append('android') + notifications_by_feed[feed.feed_id]["notification_types"].append("android") return notifications_by_feed @@ -153,7 +153,7 @@ class MUserFeedNotification(mongo.Document): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) latest_story_hashes = r.zrange("zF:%s" % feed.pk, -1 * new_stories, -1) - mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by('-story_date') + mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) total_sent_count = 0 @@ -186,19 +186,19 @@ class MUserFeedNotification(mongo.Document): if settings.DEBUG: logging.debug("Sent too many, ignoring...") continue - if story['story_date'] <= last_notification_date and not force: + if story["story_date"] <= last_notification_date and not force: if settings.DEBUG: logging.debug( "Story date older than last notification date: %s <= %s" - % (story['story_date'], last_notification_date) + % (story["story_date"], last_notification_date) ) continue - if story['story_date'] > user_feed_notification.last_notification_date: - user_feed_notification.last_notification_date = story['story_date'] + if story["story_date"] > user_feed_notification.last_notification_date: + user_feed_notification.last_notification_date = story["story_date"] user_feed_notification.save() - story['story_content'] = html.unescape(story['story_content']) + story["story_content"] = html.unescape(story["story_content"]) sent = user_feed_notification.push_story_notification(story, classifiers, usersub) if sent: @@ -209,49 +209,40 @@ class MUserFeedNotification(mongo.Document): def classifiers(self, usersub): classifiers = {} if usersub.is_trained: - classifiers['feeds'] = list( - MClassifierFeed.objects( - user_id=self.user_id, feed_id=self.feed_id, social_user_id=0 - ) + classifiers["feeds"] = list( + MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0) ) - classifiers['authors'] = list( + classifiers["authors"] = list( MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id) ) - classifiers['titles'] = list( - MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id) - ) - classifiers['tags'] = list( - MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id) - ) + classifiers["titles"] = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifiers["tags"] = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) return classifiers def title_and_body(self, story, usersub, notification_title_only=False): def replace_with_newlines(element): - text = '' + text = "" for elem in element.recursiveChildGenerator(): if isinstance(elem, (str,)): text += elem - elif elem.name == 'br': - text += '\n' - elif elem.name == 'p': - text += '\n\n' - text = re.sub(r' +', ' ', text).strip() + elif elem.name == "br": + text += "\n" + elif elem.name == "p": + text += "\n\n" + text = re.sub(r" +", " ", text).strip() return text feed_title = usersub.user_title or usersub.feed.feed_title # title = "%s: %s" % (feed_title, story['story_title']) title = feed_title - soup = BeautifulSoup(story['story_content'].strip(), features="lxml") + soup = BeautifulSoup(story["story_content"].strip(), features="lxml") # if notification_title_only: subtitle = None - body_title = html.unescape(story['story_title']).strip() + body_title = html.unescape(story["story_title"]).strip() body_content = replace_with_newlines(soup) if body_content: - if ( - body_title == body_content[: len(body_title)] - or body_content[:100] == body_title[:100] - ): + if body_title == body_content[: len(body_title)] or body_content[:100] == body_title[:100]: body_content = "" else: body_content = f"\n※ {body_content}" @@ -283,7 +274,7 @@ class MUserFeedNotification(mongo.Document): logging.user( user, "~FCSending push notification: %s/%s (score: %s)" - % (story['story_title'][:40], story['story_hash'], story_score), + % (story["story_title"][:40], story["story_hash"], story_score), ) self.send_web(story, user) @@ -298,7 +289,7 @@ class MUserFeedNotification(mongo.Document): return r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'notification:%s,%s' % (story['story_hash'], story['story_title'])) + r.publish(user.username, "notification:%s,%s" % (story["story_hash"], story["story_title"])) def send_ios(self, story, user, usersub): if not self.is_ios: @@ -319,45 +310,42 @@ class MUserFeedNotification(mongo.Document): # 7. cat aps.pem aps_key.noenc.pem > aps.p12.pem # 8. Verify: openssl s_client -connect gateway.push.apple.com:2195 -cert aps.p12.pem # 9. Deploy: aps -l work -t apns,repo,celery - apns = APNsClient( - '/srv/newsblur/config/certificates/aps.p12.pem', use_sandbox=tokens.use_sandbox - ) + apns = APNsClient("/srv/newsblur/config/certificates/aps.p12.pem", use_sandbox=tokens.use_sandbox) - notification_title_only = is_true(user.profile.preference_value('notification_title_only')) + notification_title_only = is_true(user.profile.preference_value("notification_title_only")) title, subtitle, body = self.title_and_body(story, usersub, notification_title_only) image_url = None - if len(story['image_urls']): - image_url = story['image_urls'][0] + if len(story["image_urls"]): + image_url = story["image_urls"][0] # print image_url confirmed_ios_tokens = [] for token in tokens.ios_tokens: logging.user( user, - '~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s' - % (story['story_title'][:50], usersub.feed.feed_title[:50]), + "~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s" + % (story["story_title"][:50], usersub.feed.feed_title[:50]), ) payload = Payload( - alert={'title': title, 'subtitle': subtitle, 'body': body}, + alert={"title": title, "subtitle": subtitle, "body": body}, category="STORY_CATEGORY", mutable_content=True, custom={ - 'story_hash': story['story_hash'], - 'story_feed_id': story['story_feed_id'], - 'image_url': image_url, + "story_hash": story["story_hash"], + "story_feed_id": story["story_feed_id"], + "image_url": image_url, }, ) try: apns.send_notification(token, payload, topic="com.newsblur.NewsBlur") except (BadDeviceToken, Unregistered, DeviceTokenNotForTopic): - logging.user(user, '~BMiOS token expired: ~FR~SB%s' % (token[:50])) + logging.user(user, "~BMiOS token expired: ~FR~SB%s" % (token[:50])) else: confirmed_ios_tokens.append(token) if settings.DEBUG: logging.user( user, - '~BMiOS token good: ~FB~SB%s / %s' - % (token[:50], len(confirmed_ios_tokens)), + "~BMiOS token good: ~FB~SB%s / %s" % (token[:50], len(confirmed_ios_tokens)), ) if len(confirmed_ios_tokens) < len(tokens.ios_tokens): @@ -379,11 +367,14 @@ class MUserFeedNotification(mongo.Document): r.expire(emails_sent_date_key, 60 * 60 * 24) # Keep for a day count = int(r.hget(emails_sent_date_key, usersub.user_id) or 0) if count > settings.MAX_EMAILS_SENT_PER_DAY_PER_USER: - logging.user(usersub.user, "~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count)) + logging.user( + usersub.user, + "~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count), + ) return feed = usersub.feed - story_content = self.sanitize_story(story['story_content']) + story_content = self.sanitize_story(story["story_content"]) params = { "story": story, @@ -392,14 +383,14 @@ class MUserFeedNotification(mongo.Document): "feed_title": usersub.user_title or feed.feed_title, "favicon_border": feed.favicon_color, } - from_address = 'notifications@newsblur.com' - to_address = '%s <%s>' % (usersub.user.username, usersub.user.email) - text = render_to_string('mail/email_story_notification.txt', params) - html = render_to_string('mail/email_story_notification.xhtml', params) - subject = '%s: %s' % (usersub.user_title or usersub.feed.feed_title, story['story_title']) - subject = subject.replace('\n', ' ') + from_address = "notifications@newsblur.com" + to_address = "%s <%s>" % (usersub.user.username, usersub.user.email) + text = render_to_string("mail/email_story_notification.txt", params) + html = render_to_string("mail/email_story_notification.xhtml", params) + subject = "%s: %s" % (usersub.user_title or usersub.feed.feed_title, story["story_title"]) + subject = subject.replace("\n", " ") msg = EmailMultiAlternatives( - subject, text, from_email='NewsBlur <%s>' % from_address, to=[to_address] + subject, text, from_email="NewsBlur <%s>" % from_address, to=[to_address] ) msg.attach_alternative(html, "text/html") # try: @@ -409,8 +400,8 @@ class MUserFeedNotification(mongo.Document): # return logging.user( usersub.user, - '~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s' - % (story['story_title'][:50], usersub.feed.feed_title[:50]), + "~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s" + % (story["story_title"][:50], usersub.feed.feed_title[:50]), ) def sanitize_story(self, story_content): @@ -419,15 +410,15 @@ class MUserFeedNotification(mongo.Document): # Convert videos in newsletters to images for iframe in soup("iframe"): - url = dict(iframe.attrs).get('src', "") + url = dict(iframe.attrs).get("src", "") youtube_id = self.extract_youtube_id(url) if youtube_id: - a = soup.new_tag('a', href=url) + a = soup.new_tag("a", href=url) img = soup.new_tag( - 'img', + "img", style="display: block; 'background-image': \"url(https://%s/img/reader/youtube_play.png), url(http://img.youtube.com/vi/%s/0.jpg)\"" % (fqdn, youtube_id), - src='http://img.youtube.com/vi/%s/0.jpg' % youtube_id, + src="http://img.youtube.com/vi/%s/0.jpg" % youtube_id, ) a.insert(0, img) iframe.replaceWith(a) @@ -439,20 +430,20 @@ class MUserFeedNotification(mongo.Document): def extract_youtube_id(self, url): youtube_id = None - if 'youtube.com' in url: + if "youtube.com" in url: youtube_parts = urllib.parse.urlparse(url) - if '/embed/' in youtube_parts.path: - youtube_id = youtube_parts.path.replace('/embed/', '') + if "/embed/" in youtube_parts.path: + youtube_id = youtube_parts.path.replace("/embed/", "") return youtube_id def story_score(self, story, classifiers): score = compute_story_score( story, - classifier_titles=classifiers.get('titles', []), - classifier_authors=classifiers.get('authors', []), - classifier_tags=classifiers.get('tags', []), - classifier_feeds=classifiers.get('feeds', []), + classifier_titles=classifiers.get("titles", []), + classifier_authors=classifiers.get("authors", []), + classifier_tags=classifiers.get("tags", []), + classifier_feeds=classifiers.get("feeds", []), ) return score diff --git a/apps/notifications/urls.py b/apps/notifications/urls.py index a304edb6f..53bd617fd 100644 --- a/apps/notifications/urls.py +++ b/apps/notifications/urls.py @@ -3,9 +3,9 @@ from apps.notifications import views from oauth2_provider import views as op_views urlpatterns = [ - url(r'^$', views.notifications_by_feed, name='notifications-by-feed'), - url(r'^feed/?$', views.set_notifications_for_feed, name='set-notifications-for-feed'), - url(r'^apns_token/?$', views.set_apns_token, name='set-apns-token'), - url(r'^android_token/?$', views.set_android_token, name='set-android-token'), - url(r'^force_push/?$', views.force_push, name='force-push-notification'), -] \ No newline at end of file + url(r"^$", views.notifications_by_feed, name="notifications-by-feed"), + url(r"^feed/?$", views.set_notifications_for_feed, name="set-notifications-for-feed"), + url(r"^apns_token/?$", views.set_apns_token, name="set-apns-token"), + url(r"^android_token/?$", views.set_android_token, name="set-android-token"), + url(r"^force_push/?$", views.force_push, name="force-push-notification"), +] diff --git a/apps/notifications/views.py b/apps/notifications/views.py index b31315f57..8c2662278 100644 --- a/apps/notifications/views.py +++ b/apps/notifications/views.py @@ -17,82 +17,90 @@ def notifications_by_feed(request): return notifications_by_feed + @ajax_login_required @json.json_view def set_notifications_for_feed(request): user = get_user(request) - feed_id = request.POST['feed_id'] - notification_types = request.POST.getlist('notification_types') or request.POST.getlist('notification_types[]') - notification_filter = request.POST.get('notification_filter') - + feed_id = request.POST["feed_id"] + notification_types = request.POST.getlist("notification_types") or request.POST.getlist( + "notification_types[]" + ) + notification_filter = request.POST.get("notification_filter") + try: notification = MUserFeedNotification.objects.get(user_id=user.pk, feed_id=feed_id) except MUserFeedNotification.DoesNotExist: params = { - "user_id": user.pk, + "user_id": user.pk, "feed_id": feed_id, } notification = MUserFeedNotification.objects.create(**params) - + web_was_off = not notification.is_web notification.is_focus = bool(notification_filter == "focus") - notification.is_email = bool('email' in notification_types) - notification.is_ios = bool('ios' in notification_types) - notification.is_android = bool('android' in notification_types) - notification.is_web = bool('web' in notification_types) + notification.is_email = bool("email" in notification_types) + notification.is_ios = bool("ios" in notification_types) + notification.is_android = bool("android" in notification_types) + notification.is_web = bool("web" in notification_types) notification.save() - - if (not notification.is_email and - not notification.is_ios and - not notification.is_android and - not notification.is_web): + + if ( + not notification.is_email + and not notification.is_ios + and not notification.is_android + and not notification.is_web + ): notification.delete() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) if web_was_off and notification.is_web: - r.publish(user.username, 'notification:setup:%s' % feed_id) - + r.publish(user.username, "notification:setup:%s" % feed_id) + notifications_by_feed = MUserFeedNotification.feeds_for_user(user.pk) return {"notifications_by_feed": notifications_by_feed} + @ajax_login_required @json.json_view def set_apns_token(request): """ - Apple Push Notification Service, token is sent by the iOS app. Used to send + Apple Push Notification Service, token is sent by the iOS app. Used to send push notifications to iOS. """ user = get_user(request) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) - apns_token = request.POST['apns_token'] - + apns_token = request.POST["apns_token"] + logging.user(user, "~FCUpdating APNS push token") if apns_token not in tokens.ios_tokens: tokens.ios_tokens.append(apns_token) tokens.save() - return {'message': 'Token saved.'} - - return {'message': 'Token already saved.'} + return {"message": "Token saved."} + + return {"message": "Token already saved."} + @ajax_login_required @json.json_view def set_android_token(request): """ - Android's push notification tokens. Not sure why I can't find this function in + Android's push notification tokens. Not sure why I can't find this function in the Android code. """ user = get_user(request) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) - token = request.POST['token'] - + token = request.POST["token"] + logging.user(user, "~FCUpdating Android push token") if token not in tokens.android_tokens: tokens.android_tokens.append(token) tokens.save() - return {'message': 'Token saved.'} - - return {'message': 'Token already saved.'} + return {"message": "Token saved."} + + return {"message": "Token already saved."} + @required_params(feed_id=int) @staff_member_required @@ -102,10 +110,12 @@ def force_push(request): Intended to force a push notification for a feed for testing. Handier than the console. """ user = get_user(request) - feed_id = request.GET['feed_id'] - count = int(request.GET.get('count', 1)) - + feed_id = request.GET["feed_id"] + count = int(request.GET.get("count", 1)) + logging.user(user, "~BM~FWForce pushing %s stories: ~SB%s" % (count, Feed.get_by_id(feed_id))) - sent_count, user_count = MUserFeedNotification.push_feed_notifications(feed_id, new_stories=count, force=True) - - return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)} \ No newline at end of file + sent_count, user_count = MUserFeedNotification.push_feed_notifications( + feed_id, new_stories=count, force=True + ) + + return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)} diff --git a/apps/oauth/models.py b/apps/oauth/models.py index c6ff4f1c4..c1522cf11 100644 --- a/apps/oauth/models.py +++ b/apps/oauth/models.py @@ -1 +1 @@ -# No models for OAuth. Use MSocialServices model in social. \ No newline at end of file +# No models for OAuth. Use MSocialServices model in social. diff --git a/apps/oauth/urls.py b/apps/oauth/urls.py index 66742e5cf..7dcb326e1 100644 --- a/apps/oauth/urls.py +++ b/apps/oauth/urls.py @@ -3,33 +3,43 @@ from apps.oauth import views from oauth2_provider import views as op_views urlpatterns = [ - url(r'^twitter_connect/?$', views.twitter_connect, name='twitter-connect'), - url(r'^facebook_connect/?$', views.facebook_connect, name='facebook-connect'), - url(r'^twitter_disconnect/?$', views.twitter_disconnect, name='twitter-disconnect'), - url(r'^facebook_disconnect/?$', views.facebook_disconnect, name='facebook-disconnect'), - url(r'^follow_twitter_account/?$', views.follow_twitter_account, name='social-follow-twitter'), - url(r'^unfollow_twitter_account/?$', views.unfollow_twitter_account, name='social-unfollow-twitter'), - + url(r"^twitter_connect/?$", views.twitter_connect, name="twitter-connect"), + url(r"^facebook_connect/?$", views.facebook_connect, name="facebook-connect"), + url(r"^twitter_disconnect/?$", views.twitter_disconnect, name="twitter-disconnect"), + url(r"^facebook_disconnect/?$", views.facebook_disconnect, name="facebook-disconnect"), + url(r"^follow_twitter_account/?$", views.follow_twitter_account, name="social-follow-twitter"), + url(r"^unfollow_twitter_account/?$", views.unfollow_twitter_account, name="social-unfollow-twitter"), # Django OAuth Toolkit - url(r'^status/?$', views.ifttt_status, name="ifttt-status"), - url(r'^authorize/?$', op_views.AuthorizationView.as_view(), name="oauth-authorize"), - url(r'^token/?$', op_views.TokenView.as_view(), name="oauth-token"), - url(r'^oauth2/authorize/?$', op_views.AuthorizationView.as_view(), name="ifttt-authorize"), - url(r'^oauth2/token/?$', op_views.TokenView.as_view(), name="ifttt-token"), - url(r'^user/info/?$', views.api_user_info, name="ifttt-user-info"), - url(r'^triggers/(?Pnew-unread-(focus-)?story)/fields/feed_or_folder/options/?$', - views.api_feed_list, name="ifttt-trigger-feedlist"), - url(r'^triggers/(?Pnew-unread-(focus-)?story)/?$', - views.api_unread_story, name="ifttt-trigger-unreadstory"), - url(r'^triggers/new-saved-story/fields/story_tag/options/?$', - views.api_saved_tag_list, name="ifttt-trigger-taglist"), - url(r'^triggers/new-saved-story/?$', views.api_saved_story, name="ifttt-trigger-saved"), - url(r'^triggers/new-shared-story/fields/blurblog_user/options/?$', - views.api_shared_usernames, name="ifttt-trigger-blurbloglist"), - url(r'^triggers/new-shared-story/?$', views.api_shared_story, name="ifttt-trigger-shared"), - url(r'^actions/share-story/?$', views.api_share_new_story, name="ifttt-action-share"), - url(r'^actions/save-story/?$', views.api_save_new_story, name="ifttt-action-saved"), - url(r'^actions/add-site/?$', views.api_save_new_subscription, name="ifttt-action-subscription"), - url(r'^actions/add-site/fields/folder/options/?$', - views.api_folder_list, name="ifttt-action-folderlist"), + url(r"^status/?$", views.ifttt_status, name="ifttt-status"), + url(r"^authorize/?$", op_views.AuthorizationView.as_view(), name="oauth-authorize"), + url(r"^token/?$", op_views.TokenView.as_view(), name="oauth-token"), + url(r"^oauth2/authorize/?$", op_views.AuthorizationView.as_view(), name="ifttt-authorize"), + url(r"^oauth2/token/?$", op_views.TokenView.as_view(), name="ifttt-token"), + url(r"^user/info/?$", views.api_user_info, name="ifttt-user-info"), + url( + r"^triggers/(?Pnew-unread-(focus-)?story)/fields/feed_or_folder/options/?$", + views.api_feed_list, + name="ifttt-trigger-feedlist", + ), + url( + r"^triggers/(?Pnew-unread-(focus-)?story)/?$", + views.api_unread_story, + name="ifttt-trigger-unreadstory", + ), + url( + r"^triggers/new-saved-story/fields/story_tag/options/?$", + views.api_saved_tag_list, + name="ifttt-trigger-taglist", + ), + url(r"^triggers/new-saved-story/?$", views.api_saved_story, name="ifttt-trigger-saved"), + url( + r"^triggers/new-shared-story/fields/blurblog_user/options/?$", + views.api_shared_usernames, + name="ifttt-trigger-blurbloglist", + ), + url(r"^triggers/new-shared-story/?$", views.api_shared_story, name="ifttt-trigger-shared"), + url(r"^actions/share-story/?$", views.api_share_new_story, name="ifttt-action-share"), + url(r"^actions/save-story/?$", views.api_save_new_story, name="ifttt-action-saved"), + url(r"^actions/add-site/?$", views.api_save_new_subscription, name="ifttt-action-subscription"), + url(r"^actions/add-site/fields/folder/options/?$", views.api_folder_list, name="ifttt-action-folderlist"), ] diff --git a/apps/oauth/views.py b/apps/oauth/views.py index 3c436f257..c4374b21e 100644 --- a/apps/oauth/views.py +++ b/apps/oauth/views.py @@ -24,22 +24,23 @@ from utils import urlnorm from utils import json_functions as json from vendor import facebook + @login_required -@render_to('social/social_connect.xhtml') +@render_to("social/social_connect.xhtml") def twitter_connect(request): twitter_consumer_key = settings.TWITTER_CONSUMER_KEY twitter_consumer_secret = settings.TWITTER_CONSUMER_SECRET - - oauth_token = request.GET.get('oauth_token') - oauth_verifier = request.GET.get('oauth_verifier') - denied = request.GET.get('denied') + + oauth_token = request.GET.get("oauth_token") + oauth_verifier = request.GET.get("oauth_verifier") + denied = request.GET.get("denied") if denied: logging.user(request, "~BB~FRDenied Twitter connect") - return {'error': 'Denied! Try connecting again.'} + return {"error": "Denied! Try connecting again."} elif oauth_token and oauth_verifier: try: auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret) - auth.request_token = request.session['twitter_request_token'] + auth.request_token = request.session["twitter_request_token"] # auth.set_request_token(oauth_token, oauth_verifier) auth.get_access_token(oauth_verifier) api = tweepy.API(auth) @@ -54,9 +55,13 @@ def twitter_connect(request): try: user = User.objects.get(pk=existing_user[0].user_id) logging.user(request, "~BB~FRFailed Twitter connect, another user: %s" % user.username) - return dict(error=("Another user (%s, %s) has " - "already connected with those Twitter credentials." - % (user.username, user.email or "no email"))) + return dict( + error=( + "Another user (%s, %s) has " + "already connected with those Twitter credentials." + % (user.username, user.email or "no email") + ) + ) except User.DoesNotExist: existing_user.delete() @@ -68,42 +73,43 @@ def twitter_connect(request): social_services.save() SyncTwitterFriends.delay(user_id=request.user.pk) - + logging.user(request, "~BB~FRFinishing Twitter connect") return {} else: # Start the OAuth process auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret) auth_url = auth.get_authorization_url() - request.session['twitter_request_token'] = auth.request_token + request.session["twitter_request_token"] = auth.request_token logging.user(request, "~BB~FRStarting Twitter connect: %s" % auth.request_token) - return {'next': auth_url} + return {"next": auth_url} @login_required -@render_to('social/social_connect.xhtml') +@render_to("social/social_connect.xhtml") def facebook_connect(request): facebook_app_id = settings.FACEBOOK_APP_ID facebook_secret = settings.FACEBOOK_SECRET - + args = { "client_id": facebook_app_id, - "redirect_uri": "https://" + Site.objects.get_current().domain + '/oauth/facebook_connect', + "redirect_uri": "https://" + Site.objects.get_current().domain + "/oauth/facebook_connect", "scope": "user_friends", "display": "popup", } - verification_code = request.GET.get('code') + verification_code = request.GET.get("code") if verification_code: args["client_secret"] = facebook_secret args["code"] = verification_code - uri = "https://graph.facebook.com/oauth/access_token?" + \ - urllib.parse.urlencode(args) + uri = "https://graph.facebook.com/oauth/access_token?" + urllib.parse.urlencode(args) response_text = urllib.request.urlopen(uri).read() response = json.decode(response_text) - + if "access_token" not in response: - logging.user(request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response)) + logging.user( + request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response) + ) return dict(error="Facebook has returned an error. Try connecting again.") access_token = response["access_token"] @@ -119,9 +125,13 @@ def facebook_connect(request): try: user = User.objects.get(pk=existing_user[0].user_id) logging.user(request, "~BB~FRFailed FB connect, another user: %s" % user.username) - return dict(error=("Another user (%s, %s) has " - "already connected with those Facebook credentials." - % (user.username, user.email or "no email"))) + return dict( + error=( + "Another user (%s, %s) has " + "already connected with those Facebook credentials." + % (user.username, user.email or "no email") + ) + ) except User.DoesNotExist: existing_user.delete() @@ -130,48 +140,51 @@ def facebook_connect(request): social_services.facebook_access_token = access_token social_services.syncing_facebook = True social_services.save() - + SyncFacebookFriends.delay(user_id=request.user.pk) - + logging.user(request, "~BB~FRFinishing Facebook connect") return {} - elif request.GET.get('error'): - logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get('error')) - return {'error': '%s... Try connecting again.' % request.GET.get('error')} + elif request.GET.get("error"): + logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get("error")) + return {"error": "%s... Try connecting again." % request.GET.get("error")} else: # Start the OAuth process logging.user(request, "~BB~FRStarting Facebook connect") url = "https://www.facebook.com/dialog/oauth?" + urllib.parse.urlencode(args) - return {'next': url} + return {"next": url} + @ajax_login_required def twitter_disconnect(request): logging.user(request, "~BB~FRDisconnecting Twitter") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_twitter() - - return HttpResponseRedirect(reverse('load-user-friends')) + + return HttpResponseRedirect(reverse("load-user-friends")) + @ajax_login_required def facebook_disconnect(request): logging.user(request, "~BB~FRDisconnecting Facebook") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_facebook() - - return HttpResponseRedirect(reverse('load-user-friends')) - + + return HttpResponseRedirect(reverse("load-user-friends")) + + @ajax_login_required @json.json_view def follow_twitter_account(request): - username = request.POST['username'] + username = request.POST["username"] code = 1 message = "OK" - + logging.user(request, "~BB~FR~SKFollowing Twitter: %s" % username) - - if username not in ['samuelclay', 'newsblur']: + + if username not in ["samuelclay", "newsblur"]: return HttpResponseForbidden() - + social_services = MSocialServices.objects.get(user_id=request.user.pk) try: api = social_services.twitter_api() @@ -179,21 +192,22 @@ def follow_twitter_account(request): except tweepy.TweepError as e: code = -1 message = e - - return {'code': code, 'message': message} - + + return {"code": code, "message": message} + + @ajax_login_required @json.json_view def unfollow_twitter_account(request): - username = request.POST['username'] + username = request.POST["username"] code = 1 message = "OK" - + logging.user(request, "~BB~FRUnfollowing Twitter: %s" % username) - - if username not in ['samuelclay', 'newsblur']: + + if username not in ["samuelclay", "newsblur"]: return HttpResponseForbidden() - + social_services = MSocialServices.objects.get(user_id=request.user.pk) try: api = social_services.twitter_api() @@ -201,18 +215,25 @@ def unfollow_twitter_account(request): except tweepy.TweepError as e: code = -1 message = e - - return {'code': code, 'message': message} + + return {"code": code, "message": message} + @oauth_login_required def api_user_info(request): user = request.user - - return json.json_response(request, {"data": { - "name": user.username, - "id": user.pk, - }}) - + + return json.json_response( + request, + { + "data": { + "name": user.username, + "id": user.pk, + } + }, + ) + + @oauth_login_required @json.json_view def api_feed_list(request, trigger_slug=None): @@ -220,18 +241,16 @@ def api_feed_list(request, trigger_slug=None): try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: - return {"errors": [{ - 'message': 'Could not find feeds for user.' - }]} + return {"errors": [{"message": "Could not find feeds for user."}]} flat_folders = usf.flatten_folders() titles = [dict(label=" - Folder: All Site Stories", value="all")] feeds = {} - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) - + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) + for sub in user_subs: feeds[sub.feed_id] = sub.canonical() - + for folder_title in sorted(flat_folders.keys()): if folder_title and folder_title != " ": titles.append(dict(label=" - Folder: %s" % folder_title, value=folder_title, optgroup=True)) @@ -239,53 +258,62 @@ def api_feed_list(request, trigger_slug=None): titles.append(dict(label=" - Folder: Top Level", value="Top Level", optgroup=True)) folder_contents = [] for feed_id in flat_folders[folder_title]: - if feed_id not in feeds: continue + if feed_id not in feeds: + continue feed = feeds[feed_id] - folder_contents.append(dict(label=feed['feed_title'], value=str(feed['id']))) - folder_contents = sorted(folder_contents, key=lambda f: f['label'].lower()) + folder_contents.append(dict(label=feed["feed_title"], value=str(feed["id"]))) + folder_contents = sorted(folder_contents, key=lambda f: f["label"].lower()) titles.extend(folder_contents) - + return {"data": titles} - + + @oauth_login_required @json.json_view def api_folder_list(request, trigger_slug=None): user = request.user usf = UserSubscriptionFolders.objects.get(user=user) flat_folders = usf.flatten_folders() - if 'add-new-subscription' in request.path: + if "add-new-subscription" in request.path: titles = [] else: titles = [dict(label="All Site Stories", value="all")] - + for folder_title in sorted(flat_folders.keys()): if folder_title and folder_title != " ": titles.append(dict(label=folder_title, value=folder_title)) else: titles.append(dict(label="Top Level", value="Top Level")) - + return {"data": titles} + @oauth_login_required @json.json_view def api_saved_tag_list(request): user = request.user starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) tags = [] - + for tag in starred_counts: - if not tag['tag'] or tag['tag'] == "": continue - tags.append(dict(label="%s (%s %s)" % (tag['tag'], tag['count'], - 'story' if tag['count'] == 1 else 'stories'), - value=tag['tag'])) - tags = sorted(tags, key=lambda t: t['value'].lower()) - catchall = dict(label="All Saved Stories (%s %s)" % (starred_count, - 'story' if starred_count == 1 else 'stories'), - value="all") + if not tag["tag"] or tag["tag"] == "": + continue + tags.append( + dict( + label="%s (%s %s)" % (tag["tag"], tag["count"], "story" if tag["count"] == 1 else "stories"), + value=tag["tag"], + ) + ) + tags = sorted(tags, key=lambda t: t["value"].lower()) + catchall = dict( + label="All Saved Stories (%s %s)" % (starred_count, "story" if starred_count == 1 else "stories"), + value="all", + ) tags.insert(0, catchall) - + return {"data": tags} + @oauth_login_required @json.json_view def api_shared_usernames(request): @@ -294,28 +322,36 @@ def api_shared_usernames(request): blurblogs = [] for social_feed in social_feeds: - if not social_feed['shared_stories_count']: continue - blurblogs.append(dict(label="%s (%s %s)" % (social_feed['username'], - social_feed['shared_stories_count'], - 'story' if social_feed['shared_stories_count'] == 1 else 'stories'), - value="%s" % social_feed['user_id'])) - blurblogs = sorted(blurblogs, key=lambda b: b['label'].lower()) - catchall = dict(label="All Shared Stories", - value="all") + if not social_feed["shared_stories_count"]: + continue + blurblogs.append( + dict( + label="%s (%s %s)" + % ( + social_feed["username"], + social_feed["shared_stories_count"], + "story" if social_feed["shared_stories_count"] == 1 else "stories", + ), + value="%s" % social_feed["user_id"], + ) + ) + blurblogs = sorted(blurblogs, key=lambda b: b["label"].lower()) + catchall = dict(label="All Shared Stories", value="all") blurblogs.insert(0, catchall) - + return {"data": blurblogs} + @oauth_login_required @json.json_view def api_unread_story(request, trigger_slug=None): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - feed_or_folder = fields['feed_or_folder'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + feed_or_folder = fields["feed_or_folder"] entries = [] if isinstance(feed_or_folder, int) or feed_or_folder.isdigit(): @@ -326,8 +362,7 @@ def api_unread_story(request, trigger_slug=None): return dict(data=[]) found_feed_ids = [feed_id] found_trained_feed_ids = [feed_id] if usersub.is_trained else [] - stories = usersub.get_stories(order="newest", read_filter="unread", - offset=0, limit=limit) + stories = usersub.get_stories(order="newest", read_filter="unread", offset=0, limit=limit) else: folder_title = feed_or_folder if folder_title == "Top Level": @@ -337,11 +372,10 @@ def api_unread_story(request, trigger_slug=None): feed_ids = None if folder_title != "all": feed_ids = flat_folders.get(folder_title) - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter="unread") + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="unread") feed_ids = [sub.feed_id for sub in usersubs] params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": limit, @@ -351,261 +385,321 @@ def api_unread_story(request, trigger_slug=None): "cutoff_date": user.profile.unread_cutoff, } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) - mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') + mstories = MStory.objects(story_hash__in=story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) - + if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) for story in stories: - if before and int(story['story_date'].strftime("%s")) > before: continue - if after and int(story['story_date'].strftime("%s")) < after: continue + if before and int(story["story_date"].strftime("%s")) > before: + continue + if after and int(story["story_date"].strftime("%s")) < after: + continue score = 0 - if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids: - score = compute_story_score(story, classifier_titles=classifier_titles, - classifier_authors=classifier_authors, - classifier_tags=classifier_tags, - classifier_feeds=classifier_feeds) - if score < 0: continue - if trigger_slug == "new-unread-focus-story" and score < 1: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "StoryScore": score, - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['story_date'].strftime("%s")) - }, - }) - + if found_trained_feed_ids and story["story_feed_id"] in found_trained_feed_ids: + score = compute_story_score( + story, + classifier_titles=classifier_titles, + classifier_authors=classifier_authors, + classifier_tags=classifier_tags, + classifier_feeds=classifier_feeds, + ) + if score < 0: + continue + if trigger_slug == "new-unread-focus-story" and score < 1: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "StoryScore": score, + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["story_date"].strftime("%s"))}, + } + ) + if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries))) - + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" + % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries)), + ) + return {"data": entries[:limit]} + @oauth_login_required @json.json_view def api_saved_story(request): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - story_tag = fields['story_tag'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + story_tag = fields["story_tag"] entries = [] - + if story_tag == "all": story_tag = "" - + params = dict(user_id=user.pk) if story_tag: params.update(dict(user_tags__contains=story_tag)) - mstories = MStarredStory.objects(**params).order_by('-starred_date')[:limit] - stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) + mstories = MStarredStory.objects(**params).order_by("-starred_date")[:limit] + stories = Feed.format_stories(mstories) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) for story in stories: - if before and int(story['story_date'].strftime("%s")) > before: continue - if after and int(story['story_date'].strftime("%s")) < after: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "SavedAt": story['starred_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "Tags": ', '.join(story['user_tags']), - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['starred_date'].strftime("%s")) - }, - }) + if before and int(story["story_date"].strftime("%s")) > before: + continue + if after and int(story["story_date"].strftime("%s")) < after: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "SavedAt": story["starred_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "Tags": ", ".join(story["user_tags"]), + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["starred_date"].strftime("%s"))}, + } + ) if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FCChecking saved stories from ~SBIFTTT~SB: ~SB%s~SN - ~SB%s~SN stories" % (story_tag if story_tag else "[All stories]", len(entries))) - + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FCChecking saved stories from ~SBIFTTT~SB: ~SB%s~SN - ~SB%s~SN stories" + % (story_tag if story_tag else "[All stories]", len(entries)), + ) + return {"data": entries} - + + @oauth_login_required @json.json_view def api_shared_story(request): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - blurblog_user = fields['blurblog_user'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + blurblog_user = fields["blurblog_user"] entries = [] - + if isinstance(blurblog_user, int) or blurblog_user.isdigit(): social_user_ids = [int(blurblog_user)] elif blurblog_user == "all": socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) social_user_ids = [ss.subscription_user_id for ss in socialsubs] - mstories = MSharedStory.objects( - user_id__in=social_user_ids - ).order_by('-shared_date')[:limit] + mstories = MSharedStory.objects(user_id__in=social_user_ids).order_by("-shared_date")[:limit] stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) - share_user_ids = list(set([story['user_id'] for story in stories])) - users = dict([(u.pk, u.username) - for u in User.objects.filter(pk__in=share_user_ids).only('pk', 'username')]) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) - - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) + share_user_ids = list(set([story["user_id"] for story in stories])) + users = dict( + [(u.pk, u.username) for u in User.objects.filter(pk__in=share_user_ids).only("pk", "username")] + ) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) + + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id__in=social_user_ids)) # Merge with feed specific classifiers - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + for story in stories: - if before and int(story['shared_date'].strftime("%s")) > before: continue - if after and int(story['shared_date'].strftime("%s")) < after: continue - score = compute_story_score(story, classifier_titles=classifier_titles, - classifier_authors=classifier_authors, - classifier_tags=classifier_tags, - classifier_feeds=classifier_feeds) - if score < 0: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "StoryScore": score, - "Comments": story['comments'], - "Username": users.get(story['user_id']), - "SharedAt": story['shared_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['shared_date'].strftime("%s")) - }, - }) + if before and int(story["shared_date"].strftime("%s")) > before: + continue + if after and int(story["shared_date"].strftime("%s")) < after: + continue + score = compute_story_score( + story, + classifier_titles=classifier_titles, + classifier_authors=classifier_authors, + classifier_tags=classifier_tags, + classifier_feeds=classifier_feeds, + ) + if score < 0: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "StoryScore": score, + "Comments": story["comments"], + "Username": users.get(story["user_id"]), + "SharedAt": story["shared_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["shared_date"].strftime("%s"))}, + } + ) if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" % (blurblog_user, len(entries))) + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" + % (blurblog_user, len(entries)), + ) return {"data": entries} + @json.json_view def ifttt_status(request): logging.user(request, "~FCChecking ~SBIFTTT~SN status") - return {"data": { - "status": "OK", - "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), - }} + return { + "data": { + "status": "OK", + "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), + } + } + @oauth_login_required @json.json_view def api_share_new_story(request): user = request.user body = request.body_json - fields = body.get('actionFields') - story_url = urlnorm.normalize(fields['story_url']) - story_content = fields.get('story_content', "") - story_title = fields.get('story_title', "") - story_author = fields.get('story_author', "") - comments = fields.get('comments', None) - + fields = body.get("actionFields") + story_url = urlnorm.normalize(fields["story_url"]) + story_content = fields.get("story_content", "") + story_title = fields.get("story_title", "") + story_author = fields.get("story_author", "") + comments = fields.get("comments", None) + logging.user(request.user, "~FBFinding feed (api_share_new_story): %s" % story_url) original_feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) story_hash = MStory.guid_hash_unsaved(story_url) - feed_id = (original_feed and original_feed.pk or 0) + feed_id = original_feed and original_feed.pk or 0 if not user.profile.is_premium and MSharedStory.feed_quota(user.pk, story_hash, feed_id=feed_id): - return {"errors": [{ - 'message': 'Only premium users can share multiple stories per day from the same site.' - }]} - + return { + "errors": [ + {"message": "Only premium users can share multiple stories per day from the same site."} + ] + } + quota = 3 if MSharedStory.feed_quota(user.pk, story_hash, quota=quota): - logging.user(request, "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments)) - return {"errors": [{ - 'message': 'You can only share %s stories per day.' % quota - }]} - + logging.user( + request, + "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments), + ) + return {"errors": [{"message": "You can only share %s stories per day." % quota}]} + if not story_content or not story_title: ti = TextImporter(feed=original_feed, story_url=story_url, request=request) original_story = ti.fetch(return_document=True) if original_story: - story_url = original_story['url'] + story_url = original_story["url"] if not story_content: - story_content = original_story['content'] + story_content = original_story["content"] if not story_title: - story_title = original_story['title'] - + story_title = original_story["title"] + if story_content: story_content = lxml.html.fromstring(story_content) story_content.make_links_absolute(story_url) story_content = lxml.html.tostring(story_content) - - shared_story = MSharedStory.objects.filter(user_id=user.pk, - story_feed_id=original_feed and original_feed.pk or 0, - story_guid=story_url).limit(1).first() + + shared_story = ( + MSharedStory.objects.filter( + user_id=user.pk, story_feed_id=original_feed and original_feed.pk or 0, story_guid=story_url + ) + .limit(1) + .first() + ) if not shared_story: - title_max = MSharedStory._fields['story_title'].max_length + title_max = MSharedStory._fields["story_title"].max_length story_db = { "story_guid": story_url, "story_permalink": story_url, @@ -624,107 +718,121 @@ def api_share_new_story(request): for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() - logging.user(request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) + logging.user( + request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments) + ) except NotUniqueError: - logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) + logging.user( + request, + "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments), + ) else: - logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) - + logging.user( + request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments) + ) + try: - socialsub = MSocialSubscription.objects.get(user_id=user.pk, - subscription_user_id=user.pk) + socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=user.pk) except MSocialSubscription.DoesNotExist: socialsub = None - + if socialsub and shared_story: - socialsub.mark_story_ids_as_read([shared_story.story_hash], - shared_story.story_feed_id, - request=request) + socialsub.mark_story_ids_as_read( + [shared_story.story_hash], shared_story.story_feed_id, request=request + ) elif shared_story: RUserStory.mark_read(user.pk, shared_story.story_feed_id, shared_story.story_hash) - + if shared_story: shared_story.publish_update_to_subscribers() - - return {"data": [{ - "id": shared_story and shared_story.story_guid, - "url": shared_story and shared_story.blurblog_permalink() - }]} + + return { + "data": [ + { + "id": shared_story and shared_story.story_guid, + "url": shared_story and shared_story.blurblog_permalink(), + } + ] + } + @oauth_login_required @json.json_view def api_save_new_story(request): user = request.user body = request.body_json - fields = body.get('actionFields') - story_url = urlnorm.normalize(fields['story_url']) - story_content = fields.get('story_content', "") - story_title = fields.get('story_title', "") - story_author = fields.get('story_author', "") - user_tags = fields.get('user_tags', "") + fields = body.get("actionFields") + story_url = urlnorm.normalize(fields["story_url"]) + story_content = fields.get("story_content", "") + story_title = fields.get("story_title", "") + story_author = fields.get("story_author", "") + user_tags = fields.get("user_tags", "") story = None - + logging.user(request.user, "~FBFinding feed (api_save_new_story): %s" % story_url) original_feed = Feed.get_feed_from_url(story_url) if not story_content or not story_title: ti = TextImporter(feed=original_feed, story_url=story_url, request=request) original_story = ti.fetch(return_document=True) if original_story: - story_url = original_story['url'] + story_url = original_story["url"] if not story_content: - story_content = original_story['content'] + story_content = original_story["content"] if not story_title: - story_title = original_story['title'] + story_title = original_story["title"] try: story_db = { "user_id": user.pk, "starred_date": datetime.datetime.now(), "story_date": datetime.datetime.now(), - "story_title": story_title or '[Untitled]', + "story_title": story_title or "[Untitled]", "story_permalink": story_url, "story_guid": story_url, "story_content": story_content, "story_author_name": story_author, "story_feed_id": original_feed and original_feed.pk or 0, - "user_tags": [tag for tag in user_tags.split(',')] + "user_tags": [tag for tag in user_tags.split(",")], } story = MStarredStory.objects.create(**story_db) - logging.user(request, "~FCStarring by ~SBIFTTT~SN: ~SB%s~SN in ~SB%s" % (story_db['story_title'][:50], original_feed and original_feed)) + logging.user( + request, + "~FCStarring by ~SBIFTTT~SN: ~SB%s~SN in ~SB%s" + % (story_db["story_title"][:50], original_feed and original_feed), + ) MStarredStoryCounts.count_for_user(user.pk) except OperationError: - logging.user(request, "~FCAlready starred by ~SBIFTTT~SN: ~SB%s" % (story_db['story_title'][:50])) + logging.user(request, "~FCAlready starred by ~SBIFTTT~SN: ~SB%s" % (story_db["story_title"][:50])) pass - - return {"data": [{ - "id": story and story.id, - "url": story and story.story_permalink - }]} + + return {"data": [{"id": story and story.id, "url": story and story.story_permalink}]} + @oauth_login_required @json.json_view def api_save_new_subscription(request): user = request.user body = request.body_json - fields = body.get('actionFields') - url = urlnorm.normalize(fields['url']) - folder = fields['folder'] - + fields = body.get("actionFields") + url = urlnorm.normalize(fields["url"]) + folder = fields["folder"] + if folder == "Top Level": folder = " " - + code, message, us = UserSubscription.add_subscription( - user=user, - feed_address=url, - folder=folder, - bookmarklet=True + user=user, feed_address=url, folder=folder, bookmarklet=True ) - + logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder)) if us and us.feed: url = us.feed.feed_address - return {"data": [{ - "id": us and us.feed_id, - "url": url, - }]} + return { + "data": [ + { + "id": us and us.feed_id, + "url": url, + } + ] + } diff --git a/apps/profile/factories.py b/apps/profile/factories.py index b5b57d51b..3c20c2520 100644 --- a/apps/profile/factories.py +++ b/apps/profile/factories.py @@ -3,18 +3,19 @@ from factory.django import DjangoModelFactory from django.contrib.auth.models import User from apps.profile.models import Profile + class UserFactory(DjangoModelFactory): - first_name = factory.Faker('first_name') - last_name = factory.Faker('last_name') - username = factory.Faker('email') - date_joined = factory.Faker('date_time') + first_name = factory.Faker("first_name") + last_name = factory.Faker("last_name") + username = factory.Faker("email") + date_joined = factory.Faker("date_time") class Meta: model = User - class ProfileFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) + class Meta: model = Profile diff --git a/apps/profile/forms.py b/apps/profile/forms.py index b47a06a3d..f5adc12a3 100644 --- a/apps/profile/forms.py +++ b/apps/profile/forms.py @@ -14,135 +14,133 @@ PLANS = [ ("newsblur-premium-pro", mark_safe("$299 / year (~$25/month)")), ] + class HorizRadioRenderer(forms.RadioSelect): - """ this overrides widget method to put radio buttons horizontally - instead of vertically. + """this overrides widget method to put radio buttons horizontally + instead of vertically. """ + def render(self, name, value, attrs=None, renderer=None): - """Outputs radios""" - choices = '\n'.join(['%s\n' % w for w in self]) - return mark_safe('
%s
' % choices) + """Outputs radios""" + choices = "\n".join(["%s\n" % w for w in self]) + return mark_safe('
%s
' % choices) + class StripePlusPaymentForm(StripePaymentForm): def __init__(self, *args, **kwargs): - email = kwargs.pop('email') - plan = kwargs.pop('plan', '') + email = kwargs.pop("email") + plan = kwargs.pop("plan", "") super(StripePlusPaymentForm, self).__init__(*args, **kwargs) - self.fields['email'].initial = email + self.fields["email"].initial = email if plan: - self.fields['plan'].initial = plan + self.fields["plan"].initial = plan - email = forms.EmailField(widget=forms.TextInput(attrs=dict(maxlength=75)), - label='Email address', - required=False) - plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, - choices=PLANS, label='Plan') + email = forms.EmailField( + widget=forms.TextInput(attrs=dict(maxlength=75)), label="Email address", required=False + ) + plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, choices=PLANS, label="Plan") class DeleteAccountForm(forms.Form): - password = forms.CharField(widget=forms.PasswordInput(), - label="Confirm your password", - required=False) - confirm = forms.CharField(label="Type \"Delete\" to confirm", - widget=forms.TextInput(), - required=False) + password = forms.CharField(widget=forms.PasswordInput(), label="Confirm your password", required=False) + confirm = forms.CharField(label='Type "Delete" to confirm', widget=forms.TextInput(), required=False) def __init__(self, *args, **kwargs): - self.user = kwargs.pop('user') + self.user = kwargs.pop("user") super(DeleteAccountForm, self).__init__(*args, **kwargs) - + def clean_password(self): - user_auth = authenticate(username=self.user.username, - password=self.cleaned_data['password']) + user_auth = authenticate(username=self.user.username, password=self.cleaned_data["password"]) if not user_auth: user_auth = blank_authenticate(username=self.user.username) - - if not user_auth: - raise forms.ValidationError('Your password doesn\'t match.') - return self.cleaned_data['password'] + if not user_auth: + raise forms.ValidationError("Your password doesn't match.") + + return self.cleaned_data["password"] def clean_confirm(self): - if self.cleaned_data.get('confirm', "").lower() != "delete": + if self.cleaned_data.get("confirm", "").lower() != "delete": raise forms.ValidationError('Please type "DELETE" to confirm deletion.') - return self.cleaned_data['confirm'] + return self.cleaned_data["confirm"] + class ForgotPasswordForm(forms.Form): - email = forms.CharField(widget=forms.TextInput(), - label="Your email address", - required=False) + email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False) def __init__(self, *args, **kwargs): super(ForgotPasswordForm, self).__init__(*args, **kwargs) - + def clean_email(self): - if not self.cleaned_data['email']: - raise forms.ValidationError('Please enter in an email address.') + if not self.cleaned_data["email"]: + raise forms.ValidationError("Please enter in an email address.") try: - User.objects.get(email__iexact=self.cleaned_data['email']) + User.objects.get(email__iexact=self.cleaned_data["email"]) except User.MultipleObjectsReturned: pass except User.DoesNotExist: - raise forms.ValidationError('No user has that email address.') + raise forms.ValidationError("No user has that email address.") + + return self.cleaned_data["email"] - return self.cleaned_data['email'] class ForgotPasswordReturnForm(forms.Form): - password = forms.CharField(widget=forms.PasswordInput(), - label="Your new password", - required=False) + password = forms.CharField(widget=forms.PasswordInput(), label="Your new password", required=False) + class AccountSettingsForm(forms.Form): use_required_attribute = False - username = forms.RegexField(regex=r'^\w+$', - max_length=30, - widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='username', - required=False, - error_messages={ - 'invalid': "Your username may only contain letters and numbers." - }) - email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), - label='email address', - required=True, - error_messages={'required': 'Please enter an email.'}) - new_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), - label='password', - required=False) - # error_messages={'required': 'Please enter a password.'}) - old_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), - label='password', - required=False) - custom_js = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='custom_js', - required=False) - custom_css = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='custom_css', - required=False) - + username = forms.RegexField( + regex=r"^\w+$", + max_length=30, + widget=forms.TextInput(attrs={"class": "NB-input"}), + label="username", + required=False, + error_messages={"invalid": "Your username may only contain letters and numbers."}, + ) + email = forms.EmailField( + widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}), + label="email address", + required=True, + error_messages={"required": "Please enter an email."}, + ) + new_password = forms.CharField( + widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False + ) + # error_messages={'required': 'Please enter a password.'}) + old_password = forms.CharField( + widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False + ) + custom_js = forms.CharField( + widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_js", required=False + ) + custom_css = forms.CharField( + widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_css", required=False + ) + def __init__(self, user, *args, **kwargs): self.user = user super(AccountSettingsForm, self).__init__(*args, **kwargs) - + def clean_username(self): - username = self.cleaned_data['username'] + username = self.cleaned_data["username"] return username def clean_password(self): - if not self.cleaned_data['password']: + if not self.cleaned_data["password"]: return "" - return self.cleaned_data['password'] - + return self.cleaned_data["password"] + def clean_email(self): - return self.cleaned_data['email'] - + return self.cleaned_data["email"] + def clean(self): - username = self.cleaned_data.get('username', '') - new_password = self.cleaned_data.get('new_password', '') - old_password = self.cleaned_data.get('old_password', '') - email = self.cleaned_data.get('email', None) - + username = self.cleaned_data.get("username", "") + new_password = self.cleaned_data.get("new_password", "") + old_password = self.cleaned_data.get("old_password", "") + email = self.cleaned_data.get("email", None) + if username and self.user.username != username: try: User.objects.get(username__iexact=username) @@ -150,26 +148,28 @@ class AccountSettingsForm(forms.Form): pass else: raise forms.ValidationError("This username is already taken. Try something different.") - + if self.user.email != email: if email and User.objects.filter(email__iexact=email).count(): - raise forms.ValidationError("This email is already being used by another account. Try something different.") - + raise forms.ValidationError( + "This email is already being used by another account. Try something different." + ) + if old_password or new_password: code = change_password(self.user, old_password, new_password, only_check=True) if code <= 0: - raise forms.ValidationError("Your old password is incorrect.") + raise forms.ValidationError("Your old password is incorrect.") return self.cleaned_data - + def save(self, profile_callback=None): - username = self.cleaned_data['username'] - new_password = self.cleaned_data.get('new_password', None) - old_password = self.cleaned_data.get('old_password', None) - email = self.cleaned_data.get('email', None) - custom_css = self.cleaned_data.get('custom_css', None) - custom_js = self.cleaned_data.get('custom_js', None) - + username = self.cleaned_data["username"] + new_password = self.cleaned_data.get("new_password", None) + old_password = self.cleaned_data.get("old_password", None) + email = self.cleaned_data.get("email", None) + custom_css = self.cleaned_data.get("custom_css", None) + custom_js = self.cleaned_data.get("custom_js", None) + if username and self.user.username != username: change_password(self.user, self.user.username, username) self.user.username = username @@ -178,28 +178,26 @@ class AccountSettingsForm(forms.Form): social_profile.username = username social_profile.save() - self.user.profile.update_email(email) - + if old_password or new_password: change_password(self.user, old_password, new_password) - + MCustomStyling.save_user(self.user.pk, custom_css, custom_js) - + + class RedeemCodeForm(forms.Form): use_required_attribute = False - gift_code = forms.CharField(widget=forms.TextInput(), - label="Gift code", - required=True) - + gift_code = forms.CharField(widget=forms.TextInput(), label="Gift code", required=True) + def clean_gift_code(self): - gift_code = self.cleaned_data['gift_code'] - - gift_code = re.sub(r'[^a-zA-Z0-9]', '', gift_code).lower() + gift_code = self.cleaned_data["gift_code"] + + gift_code = re.sub(r"[^a-zA-Z0-9]", "", gift_code).lower() if len(gift_code) != 12: - raise forms.ValidationError('Your gift code should be 12 characters long.') - + raise forms.ValidationError("Your gift code should be 12 characters long.") + newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code) if newsblur_gift_code: @@ -208,15 +206,17 @@ class RedeemCodeForm(forms.Form): return newsblur_gift_code.gift_code else: # Thinkup / Good Web Bundle - req = requests.get('https://www.thinkup.com/join/api/bundle/', params={'code': gift_code}) + req = requests.get("https://www.thinkup.com/join/api/bundle/", params={"code": gift_code}) response = req.json() - - is_valid = response.get('is_valid', None) + + is_valid = response.get("is_valid", None) if is_valid: return gift_code elif is_valid == False: - raise forms.ValidationError('Your gift code is invalid. Check it for errors.') - elif response.get('error', None): - raise forms.ValidationError('Your gift code is invalid, says the server: %s' % response['error']) - + raise forms.ValidationError("Your gift code is invalid. Check it for errors.") + elif response.get("error", None): + raise forms.ValidationError( + "Your gift code is invalid, says the server: %s" % response["error"] + ) + return gift_code diff --git a/apps/profile/management/commands/check_db.py b/apps/profile/management/commands/check_db.py index 941f5b57e..c8aa983a2 100644 --- a/apps/profile/management/commands/check_db.py +++ b/apps/profile/management/commands/check_db.py @@ -3,10 +3,10 @@ from django.core.management.base import BaseCommand from django.db import connections from django.db.utils import OperationalError -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - db_conn = connections['default'] + db_conn = connections["default"] connected = False while not connected: try: diff --git a/apps/profile/management/commands/fp.py b/apps/profile/management/commands/fp.py index 21b055f56..1f7ed64f8 100644 --- a/apps/profile/management/commands/fp.py +++ b/apps/profile/management/commands/fp.py @@ -1,15 +1,15 @@ from django.core.management.base import BaseCommand from django.contrib.auth.models import User -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-e", "--email", dest="email", nargs=1, help="Specify email if it doesn't exist") def handle(self, *args, **options): - username = options.get('username') - email = options.get('email') + username = options.get("username") + email = options.get("email") user = None if username: try: @@ -30,11 +30,9 @@ class Command(BaseCommand): user = users[0] except User.DoesNotExist: print(" ---> No email found at: %s" % email) - + if user: email = options.get("email") or user.email user.profile.send_forgot_password_email(email) else: print(" ---> No user/email found at: %s/%s" % (username, email)) - - \ No newline at end of file diff --git a/apps/profile/management/commands/reimport_paypal_history.py b/apps/profile/management/commands/reimport_paypal_history.py index 10c4afd6b..4b6e943b4 100644 --- a/apps/profile/management/commands/reimport_paypal_history.py +++ b/apps/profile/management/commands/reimport_paypal_history.py @@ -7,25 +7,46 @@ from django.contrib.auth.models import User from utils import log as logging from apps.profile.models import Profile, PaymentHistory -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back") - parser.add_argument("-o", "--offset", dest="offset", nargs=1, type=int, default=0, help="Offset customer (in date DESC)") - parser.add_argument("-f", "--force", dest="force", nargs=1, type=bool, default=False, help="Force reimport for every user") - + parser.add_argument( + "-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back" + ) + parser.add_argument( + "-o", + "--offset", + dest="offset", + nargs=1, + type=int, + default=0, + help="Offset customer (in date DESC)", + ) + parser.add_argument( + "-f", + "--force", + dest="force", + nargs=1, + type=bool, + default=False, + help="Force reimport for every user", + ) + def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET - week = datetime.datetime.now() - datetime.timedelta(days=int(options.get('days'))) + week = datetime.datetime.now() - datetime.timedelta(days=int(options.get("days"))) failed = [] limit = 100 - offset = options.get('offset') - + offset = options.get("offset") + while True: logging.debug(" ---> At %s" % offset) - user_ids = PaymentHistory.objects.filter(payment_provider='paypal', - payment_date__gte=week).values('user_id').distinct()[offset:offset+limit] - user_ids = [u['user_id'] for u in user_ids] + user_ids = ( + PaymentHistory.objects.filter(payment_provider="paypal", payment_date__gte=week) + .values("user_id") + .distinct()[offset : offset + limit] + ) + user_ids = [u["user_id"] for u in user_ids] if not len(user_ids): logging.debug("At %s, finished" % offset) break @@ -36,7 +57,7 @@ class Command(BaseCommand): except User.DoesNotExist: logging.debug(" ***> Couldn't find paypal user_id=%s" % user_id) failed.append(user_id) - + if not user.profile.is_premium: user.profile.activate_premium() elif user.payments.all().count() != 1: @@ -45,10 +66,9 @@ class Command(BaseCommand): user.profile.setup_premium_history() elif user.profile.premium_expire > datetime.datetime.now() + datetime.timedelta(days=365): user.profile.setup_premium_history() - elif options.get('force'): + elif options.get("force"): user.profile.setup_premium_history() else: logging.debug(" ---> %s is fine" % user.username) return failed - diff --git a/apps/profile/management/commands/reimport_stripe_history.py b/apps/profile/management/commands/reimport_stripe_history.py index fe00e10bd..6f8e803e4 100644 --- a/apps/profile/management/commands/reimport_stripe_history.py +++ b/apps/profile/management/commands/reimport_stripe_history.py @@ -6,16 +6,29 @@ from django.core.management.base import BaseCommand from utils import log as logging from apps.profile.models import Profile + class Command(BaseCommand): - def add_arguments(self, parser) - parser.add_argument("-d", "--days", dest="days", nargs=1, type='int', default=365, help="Number of days to go back") - parser.add_argument("-l", "--limit", dest="limit", nargs=1, type='int', default=100, help="Charges per batch") - parser.add_argument("-s", "--start", dest="start", nargs=1, type='string', default=None, help="Offset customer_id (starting_after)") + def add_arguments(self, parser): + parser.add_argument( + "-d", "--days", dest="days", nargs=1, type="int", default=365, help="Number of days to go back" + ) + parser.add_argument( + "-l", "--limit", dest="limit", nargs=1, type="int", default=100, help="Charges per batch" + ) + parser.add_argument( + "-s", + "--start", + dest="start", + nargs=1, + type="string", + default=None, + help="Offset customer_id (starting_after)", + ) def handle(self, *args, **options): - limit = options.get('limit') - days = int(options.get('days')) - starting_after = options.get('start') - - Profile.reimport_stripe_history(limit, days, starting_after) \ No newline at end of file + limit = options.get("limit") + days = int(options.get("days")) + starting_after = options.get("start") + + Profile.reimport_stripe_history(limit, days, starting_after) diff --git a/apps/profile/management/commands/remove_last_user.py b/apps/profile/management/commands/remove_last_user.py index 3e6a07883..f61b9b188 100644 --- a/apps/profile/management/commands/remove_last_user.py +++ b/apps/profile/management/commands/remove_last_user.py @@ -5,11 +5,12 @@ from django.contrib.auth.models import User from django.core.management.base import BaseCommand from apps.profile.models import Profile -class Command(BaseCommand): + +class Command(BaseCommand): def handle(self, *args, **options): user = User.objects.last() - profile = Profile.objects.get(user=user) + profile = Profile.objects.get(user=user) profile.delete() user.delete() - print("User and profile for user {0} deleted".format(user)) \ No newline at end of file + print("User and profile for user {0} deleted".format(user)) diff --git a/apps/profile/middleware.py b/apps/profile/middleware.py index 4a0d23e16..9710f466c 100644 --- a/apps/profile/middleware.py +++ b/apps/profile/middleware.py @@ -19,16 +19,16 @@ class LastSeenMiddleware(object): def process_response(self, request, response): if ( ( - request.path == '/' - or request.path.startswith('/reader/refresh_feeds') - or request.path.startswith('/reader/load_feeds') - or request.path.startswith('/reader/feeds') + request.path == "/" + or request.path.startswith("/reader/refresh_feeds") + or request.path.startswith("/reader/load_feeds") + or request.path.startswith("/reader/feeds") ) - and hasattr(request, 'user') + and hasattr(request, "user") and request.user.is_authenticated ): hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60) - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] if request.user.profile.last_seen_on < hour_ago: logging.user( request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (request.user.profile.last_seen_on, ip) @@ -50,11 +50,11 @@ class LastSeenMiddleware(object): def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -65,31 +65,31 @@ class DBProfilerMiddleware: self.get_response = get_response def process_request(self, request): - setattr(request, 'activated_segments', []) + setattr(request, "activated_segments", []) if ( - # request.path.startswith('/reader/feed') or - request.path.startswith('/reader/feed/') + # request.path.startswith('/reader/feed') or + request.path.startswith("/reader/feed/") ) and random.random() < 0.05: - request.activated_segments.append('db_profiler') + request.activated_segments.append("db_profiler") connection.use_debug_cursor = True - setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) + setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG) settings.DEBUG = True def process_celery(self): - setattr(self, 'activated_segments', []) + setattr(self, "activated_segments", []) if random.random() < 0.01 or settings.DEBUG_QUERIES: - self.activated_segments.append('db_profiler') + self.activated_segments.append("db_profiler") connection.use_debug_cursor = True - setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) + setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG) settings.DEBUG = True return self def process_exception(self, request, exception): - if hasattr(request, 'sql_times_elapsed'): + if hasattr(request, "sql_times_elapsed"): self._save_times(request.sql_times_elapsed) def process_response(self, request, response): - if hasattr(request, 'sql_times_elapsed'): + if hasattr(request, "sql_times_elapsed"): # middleware = SQLLogToConsoleMiddleware() # middleware.process_celery(self) # logging.debug(" ---> ~FGProfiling~FB app: %s" % request.sql_times_elapsed) @@ -99,16 +99,16 @@ class DBProfilerMiddleware: def process_celery_finished(self): middleware = SQLLogToConsoleMiddleware() middleware.process_celery(self) - if hasattr(self, 'sql_times_elapsed'): + if hasattr(self, "sql_times_elapsed"): logging.debug(" ---> ~FGProfiling~FB task: %s" % self.sql_times_elapsed) - self._save_times(self.sql_times_elapsed, 'task_') + self._save_times(self.sql_times_elapsed, "task_") def process_request_finished(self): middleware = SQLLogToConsoleMiddleware() middleware.process_celery(self) - if hasattr(self, 'sql_times_elapsed'): + if hasattr(self, "sql_times_elapsed"): logging.debug(" ---> ~FGProfiling~FB app: %s" % self.sql_times_elapsed) - self._save_times(self.sql_times_elapsed, 'app_') + self._save_times(self.sql_times_elapsed, "app_") def _save_times(self, db_times, prefix=""): if not db_times: @@ -118,7 +118,7 @@ class DBProfilerMiddleware: pipe = r.pipeline() minute = round_time(round_to=60) for db, duration in list(db_times.items()): - key = "DB:%s%s:%s" % (prefix, db, minute.strftime('%s')) + key = "DB:%s%s:%s" % (prefix, db, minute.strftime("%s")) pipe.incr("%s:c" % key) pipe.expireat("%s:c" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) if duration: @@ -128,11 +128,11 @@ class DBProfilerMiddleware: def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -144,7 +144,7 @@ class SQLLogToConsoleMiddleware: def activated(self, request): return settings.DEBUG_QUERIES or ( - hasattr(request, 'activated_segments') and 'db_profiler' in request.activated_segments + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments ) def process_response(self, request, response): @@ -152,38 +152,39 @@ class SQLLogToConsoleMiddleware: return response if connection.queries: queries = connection.queries - if getattr(connection, 'queriesx', False): + if getattr(connection, "queriesx", False): queries.extend(connection.queriesx) connection.queriesx = [] - time_elapsed = sum([float(q['time']) for q in connection.queries]) + time_elapsed = sum([float(q["time"]) for q in connection.queries]) for query in queries: - sql_time = float(query['time']) - query['color'] = '~FC' if sql_time < 0.015 else '~FK~SB' if sql_time < 0.05 else '~FR~SB' - if query.get('mongo'): - query['sql'] = "~FM%s %s: %s" % (query['mongo']['op'], query['mongo']['collection'], query['mongo']['query']) - elif query.get('redis_user'): - query['sql'] = "~FC%s" % (query['redis_user']['query']) - elif query.get('redis_story'): - query['sql'] = "~FC%s" % (query['redis_story']['query']) - elif query.get('redis_session'): - query['sql'] = "~FC%s" % (query['redis_session']['query']) - elif query.get('redis_pubsub'): - query['sql'] = "~FC%s" % (query['redis_pubsub']['query']) - elif query.get('db_redis'): - query['sql'] = "~FC%s" % (query['db_redis']['query']) - elif 'sql' not in query: + sql_time = float(query["time"]) + query["color"] = "~FC" if sql_time < 0.015 else "~FK~SB" if sql_time < 0.05 else "~FR~SB" + if query.get("mongo"): + query["sql"] = "~FM%s %s: %s" % ( + query["mongo"]["op"], + query["mongo"]["collection"], + query["mongo"]["query"], + ) + elif query.get("redis_user"): + query["sql"] = "~FC%s" % (query["redis_user"]["query"]) + elif query.get("redis_story"): + query["sql"] = "~FC%s" % (query["redis_story"]["query"]) + elif query.get("redis_session"): + query["sql"] = "~FC%s" % (query["redis_session"]["query"]) + elif query.get("redis_pubsub"): + query["sql"] = "~FC%s" % (query["redis_pubsub"]["query"]) + elif query.get("db_redis"): + query["sql"] = "~FC%s" % (query["db_redis"]["query"]) + elif "sql" not in query: logging.debug(" ***> Query log missing: %s" % query) else: - query['sql'] = re.sub(r'SELECT (.*?) FROM', 'SELECT * FROM', query['sql']) - query['sql'] = re.sub(r'SELECT', '~FYSELECT', query['sql']) - query['sql'] = re.sub(r'INSERT', '~FGINSERT', query['sql']) - query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql']) - query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql']) + query["sql"] = re.sub(r"SELECT (.*?) FROM", "SELECT * FROM", query["sql"]) + query["sql"] = re.sub(r"SELECT", "~FYSELECT", query["sql"]) + query["sql"] = re.sub(r"INSERT", "~FGINSERT", query["sql"]) + query["sql"] = re.sub(r"UPDATE", "~FY~SBUPDATE", query["sql"]) + query["sql"] = re.sub(r"DELETE", "~FR~SBDELETE", query["sql"]) - if ( - settings.DEBUG_QUERIES - and not getattr(settings, 'DEBUG_QUERIES_SUMMARY_ONLY', False) - ): + if settings.DEBUG_QUERIES and not getattr(settings, "DEBUG_QUERIES_SUMMARY_ONLY", False): t = Template( "{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] {{sql.color}}{{sql.time}}~SN~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}" ) @@ -191,51 +192,51 @@ class SQLLogToConsoleMiddleware: t.render( Context( { - 'sqllog': queries, - 'count': len(queries), - 'time': time_elapsed, + "sqllog": queries, + "count": len(queries), + "time": time_elapsed, } ) ) ) times_elapsed = { - 'sql': sum( + "sql": sum( [ - float(q['time']) + float(q["time"]) for q in queries - if not q.get('mongo') - and not q.get('redis_user') - and not q.get('redis_story') - and not q.get('redis_session') - and not q.get('redis_pubsub') + if not q.get("mongo") + and not q.get("redis_user") + and not q.get("redis_story") + and not q.get("redis_session") + and not q.get("redis_pubsub") ] ), - 'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]), - 'redis_user': sum([float(q['time']) for q in queries if q.get('redis_user')]), - 'redis_story': sum([float(q['time']) for q in queries if q.get('redis_story')]), - 'redis_session': sum([float(q['time']) for q in queries if q.get('redis_session')]), - 'redis_pubsub': sum([float(q['time']) for q in queries if q.get('redis_pubsub')]), + "mongo": sum([float(q["time"]) for q in queries if q.get("mongo")]), + "redis_user": sum([float(q["time"]) for q in queries if q.get("redis_user")]), + "redis_story": sum([float(q["time"]) for q in queries if q.get("redis_story")]), + "redis_session": sum([float(q["time"]) for q in queries if q.get("redis_session")]), + "redis_pubsub": sum([float(q["time"]) for q in queries if q.get("redis_pubsub")]), } - setattr(request, 'sql_times_elapsed', times_elapsed) + setattr(request, "sql_times_elapsed", times_elapsed) else: print(" ***> No queries") - if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): + if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG): settings.DEBUG = False return response def process_celery(self, profiler): self.process_response(profiler, None) - if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): + if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG): settings.DEBUG = False def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -246,7 +247,7 @@ SIMPSONS_QUOTES = [ ("Ralph", "Me fail English? That's unpossible."), ( "Lionel Hutz", - "This is the greatest case of false advertising I've seen since I sued the movie \"The Never Ending Story.\"", + 'This is the greatest case of false advertising I\'ve seen since I sued the movie "The Never Ending Story."', ), ("Sideshow Bob", "No children have ever meddled with the Republican Party and lived to tell about it."), ( @@ -261,7 +262,7 @@ SIMPSONS_QUOTES = [ ), ( "Comic Book Guy", - "Your questions have become more redundant and annoying than the last three \"Highlander\" movies.", + 'Your questions have become more redundant and annoying than the last three "Highlander" movies.', ), ("Chief Wiggum", "Uh, no, you got the wrong number. This is 9-1...2."), ( @@ -282,11 +283,11 @@ SIMPSONS_QUOTES = [ ), ( "Lionel Hutz", - "Well, he's kind of had it in for me ever since I accidentally ran over his dog. Actually, replace \"accidentally\" with \"repeatedly\" and replace \"dog\" with \"son.\"", + 'Well, he\'s kind of had it in for me ever since I accidentally ran over his dog. Actually, replace "accidentally" with "repeatedly" and replace "dog" with "son."', ), ( "Comic Book Guy", - "Last night's \"Itchy and Scratchy Show\" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.", + 'Last night\'s "Itchy and Scratchy Show" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.', ), ("Homer", "I'm normally not a praying man, but if you're up there, please save me, Superman."), ("Homer", "Save me, Jeebus."), @@ -307,7 +308,7 @@ SIMPSONS_QUOTES = [ ("Homer", "Fame was like a drug. But what was even more like a drug were the drugs."), ( "Homer", - "Books are useless! I only ever read one book, \"To Kill A Mockingbird,\" and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?", + 'Books are useless! I only ever read one book, "To Kill A Mockingbird," and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?', ), ( "Chief Wiggum", @@ -325,8 +326,8 @@ SIMPSONS_QUOTES = [ "Homer", "You know, the one with all the well meaning rules that don't work out in real life, uh, Christianity.", ), - ("Smithers", "Uh, no, they're saying \"Boo-urns, Boo-urns.\""), - ("Hans Moleman", "I was saying \"Boo-urns.\""), + ("Smithers", 'Uh, no, they\'re saying "Boo-urns, Boo-urns."'), + ("Hans Moleman", 'I was saying "Boo-urns."'), ("Homer", "Kids, you tried your best and you failed miserably. The lesson is, never try."), ("Homer", "Here's to alcohol, the cause of - and solution to - all life's problems."), ( @@ -350,7 +351,7 @@ SIMPSONS_QUOTES = [ ), ( "Troy McClure", - "Hi. I'm Troy McClure. You may remember me from such self-help tapes as \"Smoke Yourself Thin\" and \"Get Some Confidence, Stupid!\"", + 'Hi. I\'m Troy McClure. You may remember me from such self-help tapes as "Smoke Yourself Thin" and "Get Some Confidence, Stupid!"', ), ("Homer", "A woman is a lot like a refrigerator. Six feet tall, 300 pounds...it makes ice."), ( @@ -425,7 +426,7 @@ SIMPSONS_QUOTES = [ ("Barney", "Jesus must be spinning in his grave!"), ( "Superintendent Chalmers", - "\"Thank the Lord\"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don't have a place within an organized religion.", + '"Thank the Lord"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don\'t have a place within an organized religion.', ), ("Mr Burns", "[answering the phone] Ahoy hoy?"), ("Comic Book Guy", "Oh, a *sarcasm* detector. Oh, that's a *really* useful invention!"), @@ -487,18 +488,18 @@ class SimpsonsMiddleware: def process_response(self, request, response): quote = random.choice(SIMPSONS_QUOTES) - source = quote[0].replace(' ', '-') + source = quote[0].replace(" ", "-") response["X-%s" % source] = quote[1] return response def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -515,11 +516,11 @@ class ServerHostnameMiddleware: def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -530,7 +531,7 @@ class TimingMiddleware: self.get_response = get_response def process_request(self, request): - setattr(request, 'start_time', time.time()) + setattr(request, "start_time", time.time()) def __call__(self, request): response = self.process_request(request) @@ -541,8 +542,8 @@ class TimingMiddleware: BANNED_USER_AGENTS = ( - 'feed reader-background', - 'missing', + "feed reader-background", + "missing", ) BANNED_USERNAMES = () @@ -553,46 +554,46 @@ class UserAgentBanMiddleware: self.get_response = get_response def process_request(self, request): - user_agent = request.environ.get('HTTP_USER_AGENT', 'missing').lower() + user_agent = request.environ.get("HTTP_USER_AGENT", "missing").lower() - if 'profile' in request.path: + if "profile" in request.path: return - if 'haproxy' in request.path: + if "haproxy" in request.path: return - if 'dbcheck' in request.path: + if "dbcheck" in request.path: return - if 'account' in request.path: + if "account" in request.path: return - if 'push' in request.path: + if "push" in request.path: return - if getattr(settings, 'TEST_DEBUG'): + if getattr(settings, "TEST_DEBUG"): return if any(ua in user_agent for ua in BANNED_USER_AGENTS): - data = {'error': 'User agent banned: %s' % user_agent, 'code': -1} + data = {"error": "User agent banned: %s" % user_agent, "code": -1} logging.user( request, "~FB~SN~BBBanned UA: ~SB%s / %s (%s)" % (user_agent, request.path, request.META) ) - return HttpResponse(json.encode(data), status=403, content_type='text/json') + return HttpResponse(json.encode(data), status=403, content_type="text/json") if request.user.is_authenticated and any( username == request.user.username for username in BANNED_USERNAMES ): - data = {'error': 'User banned: %s' % request.user.username, 'code': -1} + data = {"error": "User banned: %s" % request.user.username, "code": -1} logging.user( request, "~FB~SN~BBBanned Username: ~SB%s / %s (%s)" % (request.user, request.path, request.META), ) - return HttpResponse(json.encode(data), status=403, content_type='text/json') + return HttpResponse(json.encode(data), status=403, content_type="text/json") def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/apps/profile/migrations/0001_initial.py b/apps/profile/migrations/0001_initial.py index 82d4a4fe7..ac7841d50 100644 --- a/apps/profile/migrations/0001_initial.py +++ b/apps/profile/migrations/0001_initial.py @@ -8,7 +8,6 @@ import vendor.timezones.fields class Migration(migrations.Migration): - initial = True dependencies = [ @@ -17,51 +16,528 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='PaymentHistory', + name="PaymentHistory", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('payment_date', models.DateTimeField()), - ('payment_amount', models.IntegerField()), - ('payment_provider', models.CharField(max_length=20)), - ('payment_identifier', models.CharField(max_length=100, null=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='payments', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("payment_date", models.DateTimeField()), + ("payment_amount", models.IntegerField()), + ("payment_provider", models.CharField(max_length=20)), + ("payment_identifier", models.CharField(max_length=100, null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="payments", + to=settings.AUTH_USER_MODEL, + ), + ), ], options={ - 'ordering': ['-payment_date'], + "ordering": ["-payment_date"], }, ), migrations.CreateModel( - name='Profile', + name="Profile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_premium', models.BooleanField(default=False)), - ('premium_expire', models.DateTimeField(blank=True, null=True)), - ('send_emails', models.BooleanField(default=True)), - ('preferences', models.TextField(default='{}')), - ('view_settings', models.TextField(default='{}')), - ('collapsed_folders', models.TextField(default='[]')), - ('feed_pane_size', models.IntegerField(default=242)), - ('tutorial_finished', models.BooleanField(default=False)), - ('hide_getting_started', models.NullBooleanField(default=False)), - ('has_setup_feeds', models.NullBooleanField(default=False)), - ('has_found_friends', models.NullBooleanField(default=False)), - ('has_trained_intelligence', models.NullBooleanField(default=False)), - ('last_seen_on', models.DateTimeField(default=datetime.datetime.now)), - ('last_seen_ip', models.CharField(blank=True, max_length=50, null=True)), - ('dashboard_date', models.DateTimeField(default=datetime.datetime.now)), - ('timezone', vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0100) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Godthab', '(GMT-0200) America/Godthab'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0830) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qyzylorda', '(GMT+0600) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0300) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100)), - ('secret_token', models.CharField(blank=True, max_length=12, null=True)), - ('stripe_4_digits', models.CharField(blank=True, max_length=4, null=True)), - ('stripe_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("is_premium", models.BooleanField(default=False)), + ("premium_expire", models.DateTimeField(blank=True, null=True)), + ("send_emails", models.BooleanField(default=True)), + ("preferences", models.TextField(default="{}")), + ("view_settings", models.TextField(default="{}")), + ("collapsed_folders", models.TextField(default="[]")), + ("feed_pane_size", models.IntegerField(default=242)), + ("tutorial_finished", models.BooleanField(default=False)), + ("hide_getting_started", models.NullBooleanField(default=False)), + ("has_setup_feeds", models.NullBooleanField(default=False)), + ("has_found_friends", models.NullBooleanField(default=False)), + ("has_trained_intelligence", models.NullBooleanField(default=False)), + ("last_seen_on", models.DateTimeField(default=datetime.datetime.now)), + ("last_seen_ip", models.CharField(blank=True, max_length=50, null=True)), + ("dashboard_date", models.DateTimeField(default=datetime.datetime.now)), + ( + "timezone", + vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0100) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Godthab", "(GMT-0200) America/Godthab"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0830) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qyzylorda", "(GMT+0600) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0300) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), + ), + ("secret_token", models.CharField(blank=True, max_length=12, null=True)), + ("stripe_4_digits", models.CharField(blank=True, max_length=4, null=True)), + ("stripe_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="profile", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='StripeIds', + name="StripeIds", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('stripe_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stripe_ids', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("stripe_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="stripe_ids", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/profile/migrations/0002_auto_20200620_0803.py b/apps/profile/migrations/0002_auto_20200620_0803.py index 821480914..945ec7d9f 100644 --- a/apps/profile/migrations/0002_auto_20200620_0803.py +++ b/apps/profile/migrations/0002_auto_20200620_0803.py @@ -6,15 +6,19 @@ import django.db.models.deletion class Migration(migrations.Migration): - dependencies = [ - ('profile', '0001_initial'), + ("profile", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='stripeids', - name='user', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stripe_ids', to=settings.AUTH_USER_MODEL), + model_name="stripeids", + name="user", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="stripe_ids", + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/apps/profile/migrations/0003_auto_20201005_0932.py b/apps/profile/migrations/0003_auto_20201005_0932.py index 28c8dd8a2..db8f3c7a7 100644 --- a/apps/profile/migrations/0003_auto_20201005_0932.py +++ b/apps/profile/migrations/0003_auto_20201005_0932.py @@ -5,15 +5,458 @@ import vendor.timezones.fields class Migration(migrations.Migration): - dependencies = [ - ('profile', '0002_auto_20200620_0803'), + ("profile", "0002_auto_20200620_0803"), ] operations = [ migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0100) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Godthab', '(GMT-0200) America/Godthab'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0830) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qyzylorda', '(GMT+0600) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0300) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0100) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Godthab", "(GMT-0200) America/Godthab"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0830) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qyzylorda", "(GMT+0600) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0300) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0004_auto_20220110_2106.py b/apps/profile/migrations/0004_auto_20220110_2106.py index 5aaed426c..676d762a5 100644 --- a/apps/profile/migrations/0004_auto_20220110_2106.py +++ b/apps/profile/migrations/0004_auto_20220110_2106.py @@ -5,40 +5,484 @@ import vendor.timezones.fields class Migration(migrations.Migration): - dependencies = [ - ('profile', '0003_auto_20201005_0932'), + ("profile", "0003_auto_20201005_0932"), ] operations = [ migrations.AddField( - model_name='profile', - name='is_pro', + model_name="profile", + name="is_pro", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_found_friends', + model_name="profile", + name="has_found_friends", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_setup_feeds', + model_name="profile", + name="has_setup_feeds", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_trained_intelligence', + model_name="profile", + name="has_trained_intelligence", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='hide_getting_started', + model_name="profile", + name="hide_getting_started", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0100) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-1000) America/Adak'), ('America/Anchorage', '(GMT-0900) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0600) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0700) America/Boise'), ('America/Cambridge_Bay', '(GMT-0700) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0600) America/Chicago'), ('America/Chihuahua', '(GMT-0700) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0700) America/Denver'), ('America/Detroit', '(GMT-0500) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0700) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0400) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0400) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0500) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0400) America/Halifax'), ('America/Havana', '(GMT-0500) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0500) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0600) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0500) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0500) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0600) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0500) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0500) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0500) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0700) America/Inuvik'), ('America/Iqaluit', '(GMT-0500) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0900) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0500) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0500) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0800) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0600) America/Matamoros'), ('America/Mazatlan', '(GMT-0700) America/Mazatlan'), ('America/Menominee', '(GMT-0600) America/Menominee'), ('America/Merida', '(GMT-0600) America/Merida'), ('America/Metlakatla', '(GMT-0900) America/Metlakatla'), ('America/Mexico_City', '(GMT-0600) America/Mexico_City'), ('America/Miquelon', '(GMT-0300) America/Miquelon'), ('America/Moncton', '(GMT-0400) America/Moncton'), ('America/Monterrey', '(GMT-0600) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0500) America/Nassau'), ('America/New_York', '(GMT-0500) America/New_York'), ('America/Nipigon', '(GMT-0500) America/Nipigon'), ('America/Nome', '(GMT-0900) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0600) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0600) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0600) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0300) America/Nuuk'), ('America/Ojinaga', '(GMT-0700) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0500) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0500) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0600) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0600) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0600) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT-0100) America/Scoresbysund'), ('America/Sitka', '(GMT-0900) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0330) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0400) America/Thule'), ('America/Thunder_Bay', '(GMT-0500) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0800) America/Tijuana'), ('America/Toronto', '(GMT-0500) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0800) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0600) America/Winnipeg'), ('America/Yakutat', '(GMT-0900) America/Yakutat'), ('America/Yellowknife', '(GMT-0700) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0000) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0100) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0200) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0200) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0200) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0200) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0200) Asia/Gaza'), ('Asia/Hebron', '(GMT+0200) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0200) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0200) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT-0100) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0400) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0000) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0000) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0000) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0400) Canada/Atlantic'), ('Canada/Central', '(GMT-0600) Canada/Central'), ('Canada/Eastern', '(GMT-0500) Canada/Eastern'), ('Canada/Mountain', '(GMT-0700) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0330) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0800) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0100) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0100) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0200) Europe/Athens'), ('Europe/Belgrade', '(GMT+0100) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0100) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0100) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0100) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0200) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0100) Europe/Budapest'), ('Europe/Busingen', '(GMT+0100) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0200) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0100) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0000) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0100) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0000) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0200) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0000) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0000) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0200) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0000) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0100) Europe/Ljubljana'), ('Europe/London', '(GMT+0000) Europe/London'), ('Europe/Luxembourg', '(GMT+0100) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0100) Europe/Madrid'), ('Europe/Malta', '(GMT+0100) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0200) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0100) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0100) Europe/Oslo'), ('Europe/Paris', '(GMT+0100) Europe/Paris'), ('Europe/Podgorica', '(GMT+0100) Europe/Podgorica'), ('Europe/Prague', '(GMT+0100) Europe/Prague'), ('Europe/Riga', '(GMT+0200) Europe/Riga'), ('Europe/Rome', '(GMT+0100) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0100) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0100) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0100) Europe/Skopje'), ('Europe/Sofia', '(GMT+0200) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0100) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0200) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0100) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0200) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0100) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0100) Europe/Vatican'), ('Europe/Vienna', '(GMT+0100) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0200) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0100) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0100) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0200) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0100) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1300) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1200) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0900) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0600) US/Central'), ('US/Eastern', '(GMT-0500) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0700) US/Mountain'), ('US/Pacific', '(GMT-0800) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0100) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-1000) America/Adak"), + ("America/Anchorage", "(GMT-0900) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0600) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0700) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0700) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0600) America/Chicago"), + ("America/Chihuahua", "(GMT-0700) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0700) America/Denver"), + ("America/Detroit", "(GMT-0500) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0700) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0400) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0400) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0500) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0400) America/Halifax"), + ("America/Havana", "(GMT-0500) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0500) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0600) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0500) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0500) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0600) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0500) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0500) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0500) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0700) America/Inuvik"), + ("America/Iqaluit", "(GMT-0500) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0900) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0500) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0500) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0800) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0600) America/Matamoros"), + ("America/Mazatlan", "(GMT-0700) America/Mazatlan"), + ("America/Menominee", "(GMT-0600) America/Menominee"), + ("America/Merida", "(GMT-0600) America/Merida"), + ("America/Metlakatla", "(GMT-0900) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0600) America/Mexico_City"), + ("America/Miquelon", "(GMT-0300) America/Miquelon"), + ("America/Moncton", "(GMT-0400) America/Moncton"), + ("America/Monterrey", "(GMT-0600) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0500) America/Nassau"), + ("America/New_York", "(GMT-0500) America/New_York"), + ("America/Nipigon", "(GMT-0500) America/Nipigon"), + ("America/Nome", "(GMT-0900) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0600) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0600) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0600) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0300) America/Nuuk"), + ("America/Ojinaga", "(GMT-0700) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0500) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0500) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0600) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0600) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0600) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT-0100) America/Scoresbysund"), + ("America/Sitka", "(GMT-0900) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0330) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0400) America/Thule"), + ("America/Thunder_Bay", "(GMT-0500) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0800) America/Tijuana"), + ("America/Toronto", "(GMT-0500) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0800) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0600) America/Winnipeg"), + ("America/Yakutat", "(GMT-0900) America/Yakutat"), + ("America/Yellowknife", "(GMT-0700) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0000) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0100) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0200) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0200) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0200) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0200) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0200) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0200) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0200) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0200) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT-0100) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0400) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0000) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0000) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0000) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0400) Canada/Atlantic"), + ("Canada/Central", "(GMT-0600) Canada/Central"), + ("Canada/Eastern", "(GMT-0500) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0700) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0330) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0800) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0100) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0100) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0200) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0100) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0100) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0100) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0100) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0200) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0100) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0100) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0200) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0100) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0000) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0100) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0000) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0200) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0000) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0000) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0200) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0000) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0100) Europe/Ljubljana"), + ("Europe/London", "(GMT+0000) Europe/London"), + ("Europe/Luxembourg", "(GMT+0100) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0100) Europe/Madrid"), + ("Europe/Malta", "(GMT+0100) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0200) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0100) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0100) Europe/Oslo"), + ("Europe/Paris", "(GMT+0100) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0100) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0100) Europe/Prague"), + ("Europe/Riga", "(GMT+0200) Europe/Riga"), + ("Europe/Rome", "(GMT+0100) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0100) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0100) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0100) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0200) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0100) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0200) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0100) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0200) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0100) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0100) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0100) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0200) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0100) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0100) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0200) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0100) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1300) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1200) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0900) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0600) US/Central"), + ("US/Eastern", "(GMT-0500) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0700) US/Mountain"), + ("US/Pacific", "(GMT-0800) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0005_profile_is_archive.py b/apps/profile/migrations/0005_profile_is_archive.py index 0b87acc91..66f2e1cea 100644 --- a/apps/profile/migrations/0005_profile_is_archive.py +++ b/apps/profile/migrations/0005_profile_is_archive.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('profile', '0004_auto_20220110_2106'), + ("profile", "0004_auto_20220110_2106"), ] operations = [ migrations.AddField( - model_name='profile', - name='is_archive', + model_name="profile", + name="is_archive", field=models.BooleanField(blank=True, default=False, null=True), ), ] diff --git a/apps/profile/migrations/0006_profile_days_of_unread.py b/apps/profile/migrations/0006_profile_days_of_unread.py index d7740bbfd..240bcce63 100644 --- a/apps/profile/migrations/0006_profile_days_of_unread.py +++ b/apps/profile/migrations/0006_profile_days_of_unread.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('profile', '0005_profile_is_archive'), + ("profile", "0005_profile_is_archive"), ] operations = [ migrations.AddField( - model_name='profile', - name='days_of_unread', + model_name="profile", + name="days_of_unread", field=models.IntegerField(default=30, blank=True, null=True), ), ] diff --git a/apps/profile/migrations/0007_auto_20220125_2108.py b/apps/profile/migrations/0007_auto_20220125_2108.py index 624d89d85..a456b15e3 100644 --- a/apps/profile/migrations/0007_auto_20220125_2108.py +++ b/apps/profile/migrations/0007_auto_20220125_2108.py @@ -5,20 +5,464 @@ import vendor.timezones.fields class Migration(migrations.Migration): - dependencies = [ - ('profile', '0006_profile_days_of_unread'), + ("profile", "0006_profile_days_of_unread"), ] operations = [ migrations.AddField( - model_name='profile', - name='premium_renewal', + model_name="profile", + name="premium_renewal", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0100) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-1000) America/Adak'), ('America/Anchorage', '(GMT-0900) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0600) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0700) America/Boise'), ('America/Cambridge_Bay', '(GMT-0700) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0600) America/Chicago'), ('America/Chihuahua', '(GMT-0700) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0700) America/Denver'), ('America/Detroit', '(GMT-0500) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0700) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0400) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0400) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0500) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0400) America/Halifax'), ('America/Havana', '(GMT-0500) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0500) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0600) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0500) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0500) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0600) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0500) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0500) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0500) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0700) America/Inuvik'), ('America/Iqaluit', '(GMT-0500) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0900) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0500) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0500) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0800) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0600) America/Matamoros'), ('America/Mazatlan', '(GMT-0700) America/Mazatlan'), ('America/Menominee', '(GMT-0600) America/Menominee'), ('America/Merida', '(GMT-0600) America/Merida'), ('America/Metlakatla', '(GMT-0900) America/Metlakatla'), ('America/Mexico_City', '(GMT-0600) America/Mexico_City'), ('America/Miquelon', '(GMT-0300) America/Miquelon'), ('America/Moncton', '(GMT-0400) America/Moncton'), ('America/Monterrey', '(GMT-0600) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0500) America/Nassau'), ('America/New_York', '(GMT-0500) America/New_York'), ('America/Nipigon', '(GMT-0500) America/Nipigon'), ('America/Nome', '(GMT-0900) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0600) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0600) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0600) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0300) America/Nuuk'), ('America/Ojinaga', '(GMT-0700) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0500) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0500) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0600) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0600) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0600) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT-0100) America/Scoresbysund'), ('America/Sitka', '(GMT-0900) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0330) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0400) America/Thule'), ('America/Thunder_Bay', '(GMT-0500) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0800) America/Tijuana'), ('America/Toronto', '(GMT-0500) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0800) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0600) America/Winnipeg'), ('America/Yakutat', '(GMT-0900) America/Yakutat'), ('America/Yellowknife', '(GMT-0700) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0000) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0100) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0200) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0200) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0200) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0200) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0200) Asia/Gaza'), ('Asia/Hebron', '(GMT+0200) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0200) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0200) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT-0100) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0400) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0000) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0000) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0000) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0400) Canada/Atlantic'), ('Canada/Central', '(GMT-0600) Canada/Central'), ('Canada/Eastern', '(GMT-0500) Canada/Eastern'), ('Canada/Mountain', '(GMT-0700) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0330) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0800) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0100) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0100) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0200) Europe/Athens'), ('Europe/Belgrade', '(GMT+0100) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0100) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0100) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0100) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0200) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0100) Europe/Budapest'), ('Europe/Busingen', '(GMT+0100) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0200) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0100) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0000) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0100) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0000) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0200) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0000) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0000) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0200) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0000) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0100) Europe/Ljubljana'), ('Europe/London', '(GMT+0000) Europe/London'), ('Europe/Luxembourg', '(GMT+0100) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0100) Europe/Madrid'), ('Europe/Malta', '(GMT+0100) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0200) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0100) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0100) Europe/Oslo'), ('Europe/Paris', '(GMT+0100) Europe/Paris'), ('Europe/Podgorica', '(GMT+0100) Europe/Podgorica'), ('Europe/Prague', '(GMT+0100) Europe/Prague'), ('Europe/Riga', '(GMT+0200) Europe/Riga'), ('Europe/Rome', '(GMT+0100) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0100) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0100) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0100) Europe/Skopje'), ('Europe/Sofia', '(GMT+0200) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0100) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0200) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0100) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0200) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0100) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0100) Europe/Vatican'), ('Europe/Vienna', '(GMT+0100) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0200) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0100) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0100) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0200) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0100) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1200) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0900) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0600) US/Central'), ('US/Eastern', '(GMT-0500) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0700) US/Mountain'), ('US/Pacific', '(GMT-0800) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0100) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-1000) America/Adak"), + ("America/Anchorage", "(GMT-0900) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0600) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0700) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0700) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0600) America/Chicago"), + ("America/Chihuahua", "(GMT-0700) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0700) America/Denver"), + ("America/Detroit", "(GMT-0500) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0700) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0400) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0400) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0500) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0400) America/Halifax"), + ("America/Havana", "(GMT-0500) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0500) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0600) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0500) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0500) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0600) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0500) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0500) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0500) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0700) America/Inuvik"), + ("America/Iqaluit", "(GMT-0500) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0900) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0500) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0500) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0800) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0600) America/Matamoros"), + ("America/Mazatlan", "(GMT-0700) America/Mazatlan"), + ("America/Menominee", "(GMT-0600) America/Menominee"), + ("America/Merida", "(GMT-0600) America/Merida"), + ("America/Metlakatla", "(GMT-0900) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0600) America/Mexico_City"), + ("America/Miquelon", "(GMT-0300) America/Miquelon"), + ("America/Moncton", "(GMT-0400) America/Moncton"), + ("America/Monterrey", "(GMT-0600) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0500) America/Nassau"), + ("America/New_York", "(GMT-0500) America/New_York"), + ("America/Nipigon", "(GMT-0500) America/Nipigon"), + ("America/Nome", "(GMT-0900) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0600) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0600) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0600) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0300) America/Nuuk"), + ("America/Ojinaga", "(GMT-0700) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0500) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0500) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0600) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0600) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0600) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT-0100) America/Scoresbysund"), + ("America/Sitka", "(GMT-0900) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0330) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0400) America/Thule"), + ("America/Thunder_Bay", "(GMT-0500) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0800) America/Tijuana"), + ("America/Toronto", "(GMT-0500) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0800) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0600) America/Winnipeg"), + ("America/Yakutat", "(GMT-0900) America/Yakutat"), + ("America/Yellowknife", "(GMT-0700) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0000) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0100) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0200) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0200) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0200) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0200) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0200) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0200) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0200) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0200) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT-0100) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0400) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0000) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0000) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0000) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0400) Canada/Atlantic"), + ("Canada/Central", "(GMT-0600) Canada/Central"), + ("Canada/Eastern", "(GMT-0500) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0700) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0330) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0800) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0100) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0100) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0200) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0100) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0100) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0100) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0100) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0200) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0100) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0100) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0200) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0100) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0000) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0100) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0000) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0200) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0000) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0000) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0200) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0000) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0100) Europe/Ljubljana"), + ("Europe/London", "(GMT+0000) Europe/London"), + ("Europe/Luxembourg", "(GMT+0100) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0100) Europe/Madrid"), + ("Europe/Malta", "(GMT+0100) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0200) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0100) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0100) Europe/Oslo"), + ("Europe/Paris", "(GMT+0100) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0100) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0100) Europe/Prague"), + ("Europe/Riga", "(GMT+0200) Europe/Riga"), + ("Europe/Rome", "(GMT+0100) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0100) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0100) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0100) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0200) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0100) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0200) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0100) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0200) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0100) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0100) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0100) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0200) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0100) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0100) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0200) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0100) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1200) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0900) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0600) US/Central"), + ("US/Eastern", "(GMT-0500) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0700) US/Mountain"), + ("US/Pacific", "(GMT-0800) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0008_profile_paypal_sub_id.py b/apps/profile/migrations/0008_profile_paypal_sub_id.py index 1b700d17c..a974b2cc9 100644 --- a/apps/profile/migrations/0008_profile_paypal_sub_id.py +++ b/apps/profile/migrations/0008_profile_paypal_sub_id.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('profile', '0007_auto_20220125_2108'), + ("profile", "0007_auto_20220125_2108"), ] operations = [ migrations.AddField( - model_name='profile', - name='paypal_sub_id', + model_name="profile", + name="paypal_sub_id", field=models.CharField(blank=True, max_length=24, null=True), ), ] diff --git a/apps/profile/migrations/0009_paypalids.py b/apps/profile/migrations/0009_paypalids.py index c181c77b3..6480de3a3 100644 --- a/apps/profile/migrations/0009_paypalids.py +++ b/apps/profile/migrations/0009_paypalids.py @@ -6,19 +6,29 @@ import django.db.models.deletion class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('profile', '0008_profile_paypal_sub_id'), + ("profile", "0008_profile_paypal_sub_id"), ] operations = [ migrations.CreateModel( - name='PaypalIds', + name="PaypalIds", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('paypal_sub_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='paypal_ids', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("paypal_sub_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="paypal_ids", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/profile/migrations/0010_profile_active_provider.py b/apps/profile/migrations/0010_profile_active_provider.py index 53c49773b..c880e37a9 100644 --- a/apps/profile/migrations/0010_profile_active_provider.py +++ b/apps/profile/migrations/0010_profile_active_provider.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('profile', '0009_paypalids'), + ("profile", "0009_paypalids"), ] operations = [ migrations.AddField( - model_name='profile', - name='active_provider', + model_name="profile", + name="active_provider", field=models.CharField(blank=True, max_length=24, null=True), ), ] diff --git a/apps/profile/migrations/0011_auto_20220408_1908.py b/apps/profile/migrations/0011_auto_20220408_1908.py index 76f8bc6b1..187fbf0b0 100644 --- a/apps/profile/migrations/0011_auto_20220408_1908.py +++ b/apps/profile/migrations/0011_auto_20220408_1908.py @@ -5,25 +5,469 @@ import vendor.timezones.fields class Migration(migrations.Migration): - dependencies = [ - ('profile', '0010_profile_active_provider'), + ("profile", "0010_profile_active_provider"), ] operations = [ migrations.AddField( - model_name='paymenthistory', - name='refunded', + model_name="paymenthistory", + name="refunded", field=models.BooleanField(blank=True, null=True), ), migrations.AlterField( - model_name='profile', - name='feed_pane_size', + model_name="profile", + name="feed_pane_size", field=models.IntegerField(default=282), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0000) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0000) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0200) America/Nuuk'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1000) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0000) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0000) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0200) America/Nuuk"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1000) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0012_auto_20220511_1710.py b/apps/profile/migrations/0012_auto_20220511_1710.py index a915e7c80..582db6063 100644 --- a/apps/profile/migrations/0012_auto_20220511_1710.py +++ b/apps/profile/migrations/0012_auto_20220511_1710.py @@ -5,15 +5,459 @@ import vendor.timezones.fields class Migration(migrations.Migration): - dependencies = [ - ('profile', '0011_auto_20220408_1908'), + ("profile", "0011_auto_20220408_1908"), ] operations = [ migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0200) America/Nuuk'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1000) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0200) America/Nuuk"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1000) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/models.py b/apps/profile/models.py index 2482c0166..353619038 100644 --- a/apps/profile/models.py +++ b/apps/profile/models.py @@ -41,44 +41,45 @@ from zebra.signals import zebra_webhook_charge_succeeded from zebra.signals import zebra_webhook_charge_refunded from zebra.signals import zebra_webhook_checkout_session_completed + class Profile(models.Model): - user = models.OneToOneField(User, unique=True, related_name="profile", on_delete=models.CASCADE) - is_premium = models.BooleanField(default=False) - is_archive = models.BooleanField(default=False, blank=True, null=True) - is_pro = models.BooleanField(default=False, blank=True, null=True) - premium_expire = models.DateTimeField(blank=True, null=True) - send_emails = models.BooleanField(default=True) - preferences = models.TextField(default="{}") - view_settings = models.TextField(default="{}") + user = models.OneToOneField(User, unique=True, related_name="profile", on_delete=models.CASCADE) + is_premium = models.BooleanField(default=False) + is_archive = models.BooleanField(default=False, blank=True, null=True) + is_pro = models.BooleanField(default=False, blank=True, null=True) + premium_expire = models.DateTimeField(blank=True, null=True) + send_emails = models.BooleanField(default=True) + preferences = models.TextField(default="{}") + view_settings = models.TextField(default="{}") collapsed_folders = models.TextField(default="[]") - feed_pane_size = models.IntegerField(default=282) - days_of_unread = models.IntegerField(default=settings.DAYS_OF_UNREAD, blank=True, null=True) + feed_pane_size = models.IntegerField(default=282) + days_of_unread = models.IntegerField(default=settings.DAYS_OF_UNREAD, blank=True, null=True) tutorial_finished = models.BooleanField(default=False) hide_getting_started = models.BooleanField(default=False, null=True, blank=True) - has_setup_feeds = models.BooleanField(default=False, null=True, blank=True) + has_setup_feeds = models.BooleanField(default=False, null=True, blank=True) has_found_friends = models.BooleanField(default=False, null=True, blank=True) has_trained_intelligence = models.BooleanField(default=False, null=True, blank=True) - last_seen_on = models.DateTimeField(default=datetime.datetime.now) - last_seen_ip = models.CharField(max_length=50, blank=True, null=True) - dashboard_date = models.DateTimeField(default=datetime.datetime.now) - timezone = TimeZoneField(default="America/New_York") - secret_token = models.CharField(max_length=12, blank=True, null=True) - stripe_4_digits = models.CharField(max_length=4, blank=True, null=True) - stripe_id = models.CharField(max_length=24, blank=True, null=True) - paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) + last_seen_on = models.DateTimeField(default=datetime.datetime.now) + last_seen_ip = models.CharField(max_length=50, blank=True, null=True) + dashboard_date = models.DateTimeField(default=datetime.datetime.now) + timezone = TimeZoneField(default="America/New_York") + secret_token = models.CharField(max_length=12, blank=True, null=True) + stripe_4_digits = models.CharField(max_length=4, blank=True, null=True) + stripe_id = models.CharField(max_length=24, blank=True, null=True) + paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) # paypal_payer_id = models.CharField(max_length=24, blank=True, null=True) - premium_renewal = models.BooleanField(default=False, blank=True, null=True) - active_provider = models.CharField(max_length=24, blank=True, null=True) - + premium_renewal = models.BooleanField(default=False, blank=True, null=True) + active_provider = models.CharField(max_length=24, blank=True, null=True) + def __str__(self): return "%s <%s>%s%s%s" % ( - self.user, - self.user.email, - " (Premium)" if self.is_premium and not self.is_archive and not self.is_pro else "", + self.user, + self.user.email, + " (Premium)" if self.is_premium and not self.is_archive and not self.is_pro else "", " (Premium ARCHIVE)" if self.is_archive and not self.is_pro else "", " (Premium PRO)" if self.is_pro else "", ) - + @classmethod def plan_to_stripe_price(cls, plan): price = None @@ -93,7 +94,7 @@ class Profile(models.Model): if settings.DEBUG: price = "price_0KK5twwdsmP8XBlasifbX56Z" return price - + @classmethod def plan_to_paypal_plan_id(cls, plan): price = None @@ -118,13 +119,13 @@ class Profile(models.Model): return datetime.datetime.utcnow() - datetime.timedelta(days=days_of_unread) if self.is_premium or force_premium: return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE) @property def unread_cutoff_premium(self): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + @property def days_of_story_hashes(self): if self.is_archive: @@ -133,19 +134,19 @@ class Profile(models.Model): def canonical(self): return { - 'is_premium': self.is_premium, - 'is_archive': self.is_archive, - 'is_pro': self.is_pro, - 'premium_expire': int(self.premium_expire.strftime('%s')) if self.premium_expire else 0, - 'preferences': json.decode(self.preferences), - 'tutorial_finished': self.tutorial_finished, - 'hide_getting_started': self.hide_getting_started, - 'has_setup_feeds': self.has_setup_feeds, - 'has_found_friends': self.has_found_friends, - 'has_trained_intelligence': self.has_trained_intelligence, - 'dashboard_date': self.dashboard_date + "is_premium": self.is_premium, + "is_archive": self.is_archive, + "is_pro": self.is_pro, + "premium_expire": int(self.premium_expire.strftime("%s")) if self.premium_expire else 0, + "preferences": json.decode(self.preferences), + "tutorial_finished": self.tutorial_finished, + "hide_getting_started": self.hide_getting_started, + "has_setup_feeds": self.has_setup_feeds, + "has_found_friends": self.has_found_friends, + "has_trained_intelligence": self.has_trained_intelligence, + "dashboard_date": self.dashboard_date, } - + def save(self, *args, **kwargs): if not self.secret_token: self.secret_token = generate_secret_token(self.user.username, 12) @@ -153,26 +154,29 @@ class Profile(models.Model): super(Profile, self).save(*args, **kwargs) except DatabaseError as e: print(f" ---> Profile not saved: {e}") - + def delete_user(self, confirm=False, fast=False): if not confirm: print(" ---> You must pass confirm=True to delete this user.") return - + logging.user(self.user, "Deleting user: %s / %s" % (self.user.email, self.user.profile.last_seen_ip)) try: if not fast: self.cancel_premium() except: logging.user(self.user, "~BR~SK~FWError cancelling premium renewal for: %s" % self.user.username) - + from apps.social.models import MSocialProfile, MSharedStory, MSocialSubscription from apps.social.models import MActivity, MInteraction + try: social_profile = MSocialProfile.objects.get(user_id=self.user.pk) - logging.user(self.user, "Unfollowing %s followings and %s followers" % - (social_profile.following_count, - social_profile.follower_count)) + logging.user( + self.user, + "Unfollowing %s followings and %s followers" + % (social_profile.following_count, social_profile.follower_count), + ) for follow in social_profile.following_user_ids: social_profile.unfollow_user(follow) for follower in social_profile.follower_user_ids: @@ -182,7 +186,7 @@ class Profile(models.Model): except (MSocialProfile.DoesNotExist, IndexError): logging.user(self.user, " ***> No social profile found. S'ok, moving on.") pass - + shared_stories = MSharedStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s shared stories" % shared_stories.count()) for story in shared_stories: @@ -193,54 +197,56 @@ class Profile(models.Model): except MStory.DoesNotExist: pass story.delete() - + subscriptions = MSocialSubscription.objects.filter(subscription_user_id=self.user.pk) logging.user(self.user, "Deleting %s social subscriptions" % subscriptions.count()) subscriptions.delete() - + interactions = MInteraction.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s interactions for user." % interactions.count()) interactions.delete() - + interactions = MInteraction.objects.filter(with_user_id=self.user.pk) logging.user(self.user, "Deleting %s interactions with user." % interactions.count()) interactions.delete() - + activities = MActivity.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s activities for user." % activities.count()) activities.delete() - + activities = MActivity.objects.filter(with_user_id=self.user.pk) logging.user(self.user, "Deleting %s activities with user." % activities.count()) activities.delete() - + starred_stories = MStarredStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s starred stories." % starred_stories.count()) starred_stories.delete() - + paypal_ids = PaypalIds.objects.filter(user=self.user) logging.user(self.user, "Deleting %s PayPal IDs." % paypal_ids.count()) paypal_ids.delete() - + stripe_ids = StripeIds.objects.filter(user=self.user) logging.user(self.user, "Deleting %s Stripe IDs." % stripe_ids.count()) stripe_ids.delete() - + logging.user(self.user, "Deleting user: %s" % self.user) self.user.delete() - + def activate_premium(self, never_expire=False): from apps.profile.tasks import EmailNewPremium - + EmailNewPremium.delay(user_id=self.user.pk) subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 5000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to premium. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to premium. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium self.is_premium = True self.is_archive = False @@ -248,48 +254,57 @@ class Profile(models.Model): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) - + UserSubscription.queue_new_feeds(self.user) - + # self.setup_premium_history() # Let's not call this unnecessarily - + if never_expire: self.premium_expire = None self.save() if not was_premium: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count()), + ) + return True - + def activate_archive(self, never_expire=False): UserSubscription.schedule_fetch_archive_feeds_for_user(self.user.pk) - + subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 2000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to archive. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to archive. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium was_archive = self.is_archive was_pro = self.is_pro @@ -298,52 +313,62 @@ class Profile(models.Model): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + # Count subscribers to turn on archive_subscribers counts, then show that count to users # on the paypal_archive_return page. try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) UserSubscription.queue_new_feeds(self.user) - + self.setup_premium_history() - + if never_expire: self.premium_expire = None self.save() if not was_archive: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ~BBARCHIVE~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ~BBARCHIVE~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" + % (subs.count()), + ) + return True - + def activate_pro(self, never_expire=False): from apps.profile.tasks import EmailNewPremiumPro - + EmailNewPremiumPro.delay(user_id=self.user.pk) - + subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 1000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to pro. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to pro. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium was_archive = self.is_archive was_pro = self.is_pro @@ -353,44 +378,52 @@ class Profile(models.Model): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) - + UserSubscription.queue_new_feeds(self.user) - + self.setup_premium_history() - + if never_expire: self.premium_expire = None self.save() if not was_pro: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ~BGPRO~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ~BGPRO~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" + % (subs.count()), + ) + return True - + def deactivate_premium(self): self.is_premium = False self.is_pro = False self.is_archive = False self.save() - + subs = UserSubscription.objects.filter(user=self.user) for sub in subs: sub.active = False @@ -400,57 +433,61 @@ class Profile(models.Model): # sub.feed.setup_feed_for_premium_subscribers() except (IntegrityError, Feed.DoesNotExist): pass - - logging.user(self.user, "~BY~FW~SBBOO! Deactivating premium account: ~FR%s subscriptions~SN!" % (subs.count())) - + + logging.user( + self.user, "~BY~FW~SBBOO! Deactivating premium account: ~FR%s subscriptions~SN!" % (subs.count()) + ) + def activate_free(self): if self.user.is_active: return - + self.user.is_active = True self.user.save() self.send_new_user_queue_email() - + def paypal_change_billing_details_url(self): return "https://paypal.com" - + def switch_stripe_subscription(self, plan): stripe_customer = self.stripe_customer() if not stripe_customer: return - + stripe_subscriptions = stripe.Subscription.list(customer=stripe_customer.id).data existing_subscription = None for subscription in stripe_subscriptions: if subscription.plan.active: existing_subscription = subscription break - if not existing_subscription: + if not existing_subscription: return try: stripe.Subscription.modify( existing_subscription.id, cancel_at_period_end=False, - proration_behavior='always_invoice', - items=[{ - 'id': existing_subscription['items']['data'][0].id, - 'price': Profile.plan_to_stripe_price(plan) - }] + proration_behavior="always_invoice", + items=[ + { + "id": existing_subscription["items"]["data"][0].id, + "price": Profile.plan_to_stripe_price(plan), + } + ], ) except stripe.error.CardError as e: logging.user(self.user, f"~FRStripe switch subscription failed: ~SB{e}") return - + self.setup_premium_history() - + return True def cancel_and_prorate_existing_paypal_subscriptions(self, data): paypal_api = self.paypal_api() if not paypal_api: return - + canceled_paypal_sub_id = self.cancel_premium_paypal(cancel_older_subscriptions_only=True) if not canceled_paypal_sub_id: logging.user(self.user, f"~FRCould not cancel and prorate older paypal premium: {data}") @@ -463,36 +500,43 @@ class Profile(models.Model): paypal_api = self.paypal_api() if not paypal_api: return - paypal_return = reverse('paypal-return') + paypal_return = reverse("paypal-return") if plan == "archive": - paypal_return = reverse('paypal-archive-return') + paypal_return = reverse("paypal-archive-return") try: application_context = { - 'shipping_preference': 'NO_SHIPPING', - 'user_action': 'SUBSCRIBE_NOW', + "shipping_preference": "NO_SHIPPING", + "user_action": "SUBSCRIBE_NOW", } if settings.DEBUG: - application_context['return_url'] = f"https://a6d3-161-77-224-226.ngrok.io{paypal_return}" + application_context["return_url"] = f"https://a6d3-161-77-224-226.ngrok.io{paypal_return}" else: - application_context['return_url'] = f"https://{Site.objects.get_current().domain}{paypal_return}" - paypal_subscription = paypal_api.post(f'/v1/billing/subscriptions', { - 'plan_id': Profile.plan_to_paypal_plan_id(plan), - 'custom_id': self.user.pk, - 'application_context': application_context, - }) + application_context[ + "return_url" + ] = f"https://{Site.objects.get_current().domain}{paypal_return}" + paypal_subscription = paypal_api.post( + f"/v1/billing/subscriptions", + { + "plan_id": Profile.plan_to_paypal_plan_id(plan), + "custom_id": self.user.pk, + "application_context": application_context, + }, + ) except paypalrestsdk.ResourceNotFound as e: - logging.user(self.user, f"~FRCouldn't create paypal subscription: {self.paypal_sub_id} {plan}: {e}") + logging.user( + self.user, f"~FRCouldn't create paypal subscription: {self.paypal_sub_id} {plan}: {e}" + ) paypal_subscription = None if not paypal_subscription: return logging.user(self.user, paypal_subscription) - - for link in paypal_subscription.get('links', []): - if link['rel'] == 'approve': - return link['href'] - + + for link in paypal_subscription.get("links", []): + if link["rel"] == "approve": + return link["href"] + logging.user(self.user, f"~FRFailed to switch paypal subscription: ~FC{paypal_subscription}") def store_paypal_sub_id(self, paypal_sub_id, skip_save_primary=False): @@ -503,12 +547,12 @@ class Profile(models.Model): if not skip_save_primary or not self.paypal_sub_id: self.paypal_sub_id = paypal_sub_id self.save() - + seen_paypal_ids = set(p.paypal_sub_id for p in self.user.paypal_ids.all()) if paypal_sub_id in seen_paypal_ids: logging.user(self.user, f"~FBPaypal sub seen before, ignoring: {paypal_sub_id}") return - + self.user.paypal_ids.create(paypal_sub_id=paypal_sub_id) logging.user(self.user, f"~FBPaypal sub ~SBadded~SN: ~SB{paypal_sub_id}") @@ -519,7 +563,7 @@ class Profile(models.Model): active_plan = None premium_renewal = False active_provider = None - + # Find modern Paypal payments self.retrieve_paypal_ids() if self.paypal_sub_id: @@ -534,76 +578,92 @@ class Profile(models.Model): seen_payments.add(payment.payment_date.date()) total_paypal_payments += 1 if deleted_paypal_payments > 0: - logging.user(self.user, f"~BY~SN~FRDeleting~FW duplicate paypal history: ~SB{deleted_paypal_payments} payments") + logging.user( + self.user, + f"~BY~SN~FRDeleting~FW duplicate paypal history: ~SB{deleted_paypal_payments} payments", + ) paypal_api = self.paypal_api() for paypal_id_model in self.user.paypal_ids.all(): paypal_id = paypal_id_model.paypal_sub_id try: - paypal_subscription = paypal_api.get(f'/v1/billing/subscriptions/{paypal_id}?fields=plan') + paypal_subscription = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}?fields=plan") except paypalrestsdk.ResourceNotFound: logging.user(self.user, f"~FRCouldn't find paypal payments: {paypal_id}") paypal_subscription = None if paypal_subscription: - if paypal_subscription['status'] in ["APPROVAL_PENDING", "APPROVED", "ACTIVE"]: - active_plan = paypal_subscription.get('plan_id', None) + if paypal_subscription["status"] in ["APPROVAL_PENDING", "APPROVED", "ACTIVE"]: + active_plan = paypal_subscription.get("plan_id", None) if not active_plan: - active_plan = paypal_subscription['plan']['name'] + active_plan = paypal_subscription["plan"]["name"] active_provider = "paypal" premium_renewal = True start_date = datetime.datetime(2009, 1, 1).strftime("%Y-%m-%dT%H:%M:%S.000Z") end_date = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000Z") try: - transactions = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}/transactions?start_time={start_date}&end_time={end_date}") + transactions = paypal_api.get( + f"/v1/billing/subscriptions/{paypal_id}/transactions?start_time={start_date}&end_time={end_date}" + ) except paypalrestsdk.exceptions.ResourceNotFound: transactions = None - if not transactions or 'transactions' not in transactions: + if not transactions or "transactions" not in transactions: logging.user(self.user, f"~FRCouldn't find paypal transactions: ~SB{paypal_id}") continue - for transaction in transactions['transactions']: - created = dateutil.parser.parse(transaction['time']).date() - if transaction['status'] not in ['COMPLETED', 'PARTIALLY_REFUNDED', 'REFUNDED']: continue - if created in seen_payments: continue + for transaction in transactions["transactions"]: + created = dateutil.parser.parse(transaction["time"]).date() + if transaction["status"] not in ["COMPLETED", "PARTIALLY_REFUNDED", "REFUNDED"]: + continue + if created in seen_payments: + continue seen_payments.add(created) total_paypal_payments += 1 refunded = None - if transaction['status'] in ['PARTIALLY_REFUNDED', 'REFUNDED']: + if transaction["status"] in ["PARTIALLY_REFUNDED", "REFUNDED"]: refunded = True - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=int(float(transaction['amount_with_breakdown']['gross_amount']['value'])), - payment_provider='paypal', - refunded=refunded) + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=int( + float(transaction["amount_with_breakdown"]["gross_amount"]["value"]) + ), + payment_provider="paypal", + refunded=refunded, + ) - ipns = PayPalIPN.objects.filter(Q(custom=self.user.username) | - Q(payer_email=self.user.email) | - Q(custom=self.user.pk)).order_by('-payment_date') + ipns = PayPalIPN.objects.filter( + Q(custom=self.user.username) | Q(payer_email=self.user.email) | Q(custom=self.user.pk) + ).order_by("-payment_date") for transaction in ipns: if transaction.txn_type != "subscr_payment": continue created = transaction.payment_date.date() - if created in seen_payments: + if created in seen_payments: continue seen_payments.add(created) total_paypal_payments += 1 - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=int(transaction.payment_gross), - payment_provider='paypal') + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=int(transaction.payment_gross), + payment_provider="paypal", + ) else: logging.user(self.user, "~FBNo Paypal payments") - + # Record Stripe payments - existing_stripe_history = PaymentHistory.objects.filter(user=self.user, - payment_provider="stripe") + existing_stripe_history = PaymentHistory.objects.filter(user=self.user, payment_provider="stripe") if existing_stripe_history.count(): - logging.user(self.user, "~BY~SN~FRDeleting~FW existing stripe history: ~SB%s payments" % existing_stripe_history.count()) + logging.user( + self.user, + "~BY~SN~FRDeleting~FW existing stripe history: ~SB%s payments" + % existing_stripe_history.count(), + ) existing_stripe_history.delete() - + if self.stripe_id: self.retrieve_stripe_ids() - + stripe.api_key = settings.STRIPE_SECRET seen_payments = set() for stripe_id_model in self.user.stripe_ids.all(): @@ -611,7 +671,7 @@ class Profile(models.Model): stripe_customer = stripe.Customer.retrieve(stripe_id) stripe_payments = stripe.Charge.list(customer=stripe_customer.id).data stripe_subscriptions = stripe.Subscription.list(customer=stripe_customer.id).data - + for subscription in stripe_subscriptions: if subscription.plan.active: active_plan = subscription.plan.id @@ -619,21 +679,25 @@ class Profile(models.Model): if not subscription.cancel_at: premium_renewal = True break - + for payment in stripe_payments: created = datetime.datetime.fromtimestamp(payment.created) - if payment.status == 'failed': continue - if created in seen_payments: continue + if payment.status == "failed": + continue + if created in seen_payments: + continue seen_payments.add(created) total_stripe_payments += 1 refunded = None if payment.refunded: refunded = True - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=payment.amount / 100.0, - payment_provider='stripe', - refunded=refunded) + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=payment.amount / 100.0, + payment_provider="stripe", + refunded=refunded, + ) else: logging.user(self.user, "~FBNo Stripe payments") @@ -655,14 +719,17 @@ class Profile(models.Model): recent_payments_count += 1 if not oldest_recent_payment_date or payment.payment_date < oldest_recent_payment_date: oldest_recent_payment_date = payment.payment_date - + if oldest_recent_payment_date: - new_premium_expire = (oldest_recent_payment_date + - datetime.timedelta(days=365*recent_payments_count)) + new_premium_expire = oldest_recent_payment_date + datetime.timedelta( + days=365 * recent_payments_count + ) # Only move premium expire forward, never earlier. Also set expiration if not premium. - if (force_expiration or - (set_premium_expire and not self.premium_expire and not free_lifetime_premium) or - (self.premium_expire and new_premium_expire > self.premium_expire)): + if ( + force_expiration + or (set_premium_expire and not self.premium_expire and not free_lifetime_premium) + or (self.premium_expire and new_premium_expire > self.premium_expire) + ): self.premium_expire = new_premium_expire self.save() @@ -670,28 +737,43 @@ class Profile(models.Model): active_sub_id = self.stripe_id if active_provider == "paypal": active_sub_id = self.paypal_sub_id - logging.user(self.user, "~FCTurning ~SB~%s~SN~FC premium renewal (%s: %s)" % ("FRoff" if not premium_renewal else "FBon", active_provider, active_sub_id)) + logging.user( + self.user, + "~FCTurning ~SB~%s~SN~FC premium renewal (%s: %s)" + % ("FRoff" if not premium_renewal else "FBon", active_provider, active_sub_id), + ) self.premium_renewal = premium_renewal self.active_provider = active_provider self.save() - - logging.user(self.user, "~BY~SN~FWFound ~SB~FB%s paypal~FW~SN and ~SB~FC%s stripe~FW~SN payments (~SB%s payments expire: ~SN~FB%s~FW)" % ( - total_paypal_payments, total_stripe_payments, len(payment_history), self.premium_expire)) - if (set_premium_expire and not self.is_premium and - self.premium_expire > datetime.datetime.now()): + logging.user( + self.user, + "~BY~SN~FWFound ~SB~FB%s paypal~FW~SN and ~SB~FC%s stripe~FW~SN payments (~SB%s payments expire: ~SN~FB%s~FW)" + % (total_paypal_payments, total_stripe_payments, len(payment_history), self.premium_expire), + ) + + if set_premium_expire and not self.is_premium and self.premium_expire > datetime.datetime.now(): self.activate_premium() - - logging.user(self.user, "~FCActive plan: %s, stripe/paypal: %s/%s, is_archive? %s" % (active_plan, Profile.plan_to_stripe_price('archive'), Profile.plan_to_paypal_plan_id('archive'), self.is_archive)) - if (active_plan == Profile.plan_to_stripe_price('pro') and not self.is_pro): + + logging.user( + self.user, + "~FCActive plan: %s, stripe/paypal: %s/%s, is_archive? %s" + % ( + active_plan, + Profile.plan_to_stripe_price("archive"), + Profile.plan_to_paypal_plan_id("archive"), + self.is_archive, + ), + ) + if active_plan == Profile.plan_to_stripe_price("pro") and not self.is_pro: self.activate_pro() - elif (active_plan == Profile.plan_to_stripe_price('archive') and not self.is_archive): + elif active_plan == Profile.plan_to_stripe_price("archive") and not self.is_archive: self.activate_archive() - elif (active_plan == Profile.plan_to_paypal_plan_id('pro') and not self.is_pro): + elif active_plan == Profile.plan_to_paypal_plan_id("pro") and not self.is_pro: self.activate_pro() - elif (active_plan == Profile.plan_to_paypal_plan_id('archive') and not self.is_archive): + elif active_plan == Profile.plan_to_paypal_plan_id("archive") and not self.is_archive: self.activate_archive() - + def preference_value(self, key, default=None): preferences = json.decode(self.preferences) return preferences.get(key, default) @@ -700,8 +782,7 @@ class Profile(models.Model): def resync_stripe_and_paypal_history(cls, start_days=365, end_days=0, skip=0): start_date = datetime.datetime.now() - datetime.timedelta(days=start_days) end_date = datetime.datetime.now() - datetime.timedelta(days=end_days) - payments = PaymentHistory.objects.filter(payment_date__gte=start_date, - payment_date__lte=end_date) + payments = PaymentHistory.objects.filter(payment_date__gte=start_date, payment_date__lte=end_date) last_seen_date = None for p, payment in enumerate(payments): if p < skip: @@ -711,30 +792,30 @@ class Profile(models.Model): if payment.payment_date.date() != last_seen_date: last_seen_date = payment.payment_date.date() print(f" ---> Payment date: {last_seen_date} (#{p})") - + payment.user.profile.setup_premium_history() @classmethod def reimport_stripe_history(cls, limit=10, days=7, starting_after=None): stripe.api_key = settings.STRIPE_SECRET - week = (datetime.datetime.now() - datetime.timedelta(days=days)).strftime('%s') + week = (datetime.datetime.now() - datetime.timedelta(days=days)).strftime("%s") failed = [] i = 0 - + while True: logging.debug(" ---> At %s / %s" % (i, starting_after)) i += 1 try: - data = stripe.Charge.list(created={'gt': week}, count=limit, starting_after=starting_after) + data = stripe.Charge.list(created={"gt": week}, count=limit, starting_after=starting_after) except stripe.error.APIConnectionError: time.sleep(10) continue - charges = data['data'] + charges = data["data"] if not len(charges): logging.debug("At %s (%s), finished" % (i, starting_after)) break starting_after = charges[-1]["id"] - customers = [c['customer'] for c in charges if 'customer' in c] + customers = [c["customer"] for c in charges if "customer" in c] for customer in customers: if not customer: print(" ***> No customer!") @@ -758,8 +839,8 @@ class Profile(models.Model): time.sleep(2) continue - return ','.join(failed) - + return ",".join(failed) + def refund_premium(self, partial=False, provider=None): refunded = False if provider == "paypal": @@ -770,24 +851,27 @@ class Profile(models.Model): # self.cancel_premium_stripe() else: # Find last payment, refund that - payment_history = PaymentHistory.objects.filter(user=self.user, - payment_provider__in=['paypal', 'stripe']) + payment_history = PaymentHistory.objects.filter( + user=self.user, payment_provider__in=["paypal", "stripe"] + ) if payment_history.count(): provider = payment_history[0].payment_provider if provider == "stripe": refunded = self.refund_latest_stripe_payment(partial=partial) # self.cancel_premium_stripe() elif provider == "paypal": - refunded = self.refund_paypal_payment_from_subscription(self.paypal_sub_id, prorate=partial) + refunded = self.refund_paypal_payment_from_subscription( + self.paypal_sub_id, prorate=partial + ) self.cancel_premium_paypal() return refunded - + def refund_latest_stripe_payment(self, partial=False): refunded = False if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_payments = stripe.Charge.list(customer=stripe_customer.id).data @@ -797,116 +881,128 @@ class Profile(models.Model): else: stripe_payments[0].refund() self.cancel_premium_stripe() - refunded = stripe_payments[0].amount/100 - + refunded = stripe_payments[0].amount / 100 + logging.user(self.user, "~FRRefunding stripe payment: $%s" % refunded) return refunded - + def refund_paypal_payment_from_subscription(self, paypal_sub_id, prorate=False): - if not paypal_sub_id: + if not paypal_sub_id: return - + paypal_api = self.paypal_api() refunded = False # Find transaction from subscription now = datetime.datetime.now() + datetime.timedelta(days=1) # 200 days captures Paypal's 180 day limit on refunds - start_date = (now-datetime.timedelta(days=200)).strftime("%Y-%m-%dT%H:%M:%SZ") + start_date = (now - datetime.timedelta(days=200)).strftime("%Y-%m-%dT%H:%M:%SZ") end_date = now.strftime("%Y-%m-%dT%H:%M:%SZ") try: - transactions = paypal_api.get(f"/v1/billing/subscriptions/{paypal_sub_id}/transactions?start_time={start_date}&end_time={end_date}") + transactions = paypal_api.get( + f"/v1/billing/subscriptions/{paypal_sub_id}/transactions?start_time={start_date}&end_time={end_date}" + ) except paypalrestsdk.ResourceNotFound: transactions = {} - if 'transactions' not in transactions or not len(transactions['transactions']): - logging.user(self.user, f"~FRCouldn't find paypal transactions for refund: {paypal_sub_id} {transactions}") + if "transactions" not in transactions or not len(transactions["transactions"]): + logging.user( + self.user, f"~FRCouldn't find paypal transactions for refund: {paypal_sub_id} {transactions}" + ) return - + # Refund the latest transaction - transaction = transactions['transactions'][0] - today = datetime.datetime.now().strftime('%B %d, %Y') + transaction = transactions["transactions"][0] + today = datetime.datetime.now().strftime("%B %d, %Y") url = f"/v2/payments/captures/{transaction['id']}/refund" - refund_amount = float(transaction['amount_with_breakdown']['gross_amount']['value']) + refund_amount = float(transaction["amount_with_breakdown"]["gross_amount"]["value"]) if prorate: - transaction_date = dateutil.parser.parse(transaction['time']) + transaction_date = dateutil.parser.parse(transaction["time"]) days_since = (datetime.datetime.now() - transaction_date.replace(tzinfo=None)).days if days_since < 365: - days_left = (365 - days_since) - pct_left = days_left/365 + days_left = 365 - days_since + pct_left = days_left / 365 refund_amount = pct_left * refund_amount else: logging.user(self.user, f"~FRCouldn't prorate paypal payment, too old: ~SB{transaction}") try: - response = paypal_api.post(url, { - 'reason': f"Refunded on {today}", - 'amount': { - 'currency_code': 'USD', - 'value': f"{refund_amount:.2f}", - } - }) + response = paypal_api.post( + url, + { + "reason": f"Refunded on {today}", + "amount": { + "currency_code": "USD", + "value": f"{refund_amount:.2f}", + }, + }, + ) except paypalrestsdk.exceptions.ResourceInvalid as e: response = e.response.json() - if len(response.get('details', [])): - response = response['details'][0]['description'] + if len(response.get("details", [])): + response = response["details"][0]["description"] if settings.DEBUG: logging.user(self.user, f"Paypal refund response: {response}") - if 'status' in response and response['status'] == "COMPLETED": - refunded = int(float(transaction['amount_with_breakdown']['gross_amount']['value'])) + if "status" in response and response["status"] == "COMPLETED": + refunded = int(float(transaction["amount_with_breakdown"]["gross_amount"]["value"])) logging.user(self.user, "~FRRefunding paypal payment: $%s/%s" % (refund_amount, refunded)) else: logging.user(self.user, "~FRCouldn't refund paypal payment: %s" % response) refunded = response - + return refunded - + def cancel_premium(self): paypal_cancel = self.cancel_premium_paypal() stripe_cancel = self.cancel_premium_stripe() - self.setup_premium_history() # Sure, webhooks will force new history, but they take forever + self.setup_premium_history() # Sure, webhooks will force new history, but they take forever return stripe_cancel or paypal_cancel - + def cancel_premium_paypal(self, cancel_older_subscriptions_only=False): self.retrieve_paypal_ids() if not self.paypal_sub_id: logging.user(self.user, "~FRUser doesn't have a Paypal subscription, how did we get here?") return if not self.premium_renewal and not cancel_older_subscriptions_only: - logging.user(self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % self.paypal_sub_id) + logging.user( + self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % self.paypal_sub_id + ) return paypal_api = self.paypal_api() - today = datetime.datetime.now().strftime('%B %d, %Y') + today = datetime.datetime.now().strftime("%B %d, %Y") for paypal_id_model in self.user.paypal_ids.all(): paypal_id = paypal_id_model.paypal_sub_id if cancel_older_subscriptions_only and paypal_id == self.paypal_sub_id: - logging.user(self.user, "~FBNot canceling active Paypal subscription: %s" % self.paypal_sub_id) + logging.user( + self.user, "~FBNot canceling active Paypal subscription: %s" % self.paypal_sub_id + ) continue try: - paypal_subscription = paypal_api.get(f'/v1/billing/subscriptions/{paypal_id}') + paypal_subscription = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}") except paypalrestsdk.ResourceNotFound: logging.user(self.user, f"~FRCouldn't find paypal payments: {paypal_id}") continue - if paypal_subscription['status'] not in ['ACTIVE', 'APPROVED', 'APPROVAL_PENDING']: + if paypal_subscription["status"] not in ["ACTIVE", "APPROVED", "APPROVAL_PENDING"]: logging.user(self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % paypal_id) continue url = f"/v1/billing/subscriptions/{paypal_id}/suspend" try: - response = paypal_api.post(url, { - 'reason': f"Cancelled on {today}" - }) + response = paypal_api.post(url, {"reason": f"Cancelled on {today}"}) except paypalrestsdk.ResourceNotFound as e: - logging.user(self.user, f"~FRCouldn't find paypal response during ~FB~SB{paypal_id}~SN~FR profile suspend: ~SB~FB{e}") - + logging.user( + self.user, + f"~FRCouldn't find paypal response during ~FB~SB{paypal_id}~SN~FR profile suspend: ~SB~FB{e}", + ) + logging.user(self.user, "~FRCanceling Paypal subscription: %s" % paypal_id) return paypal_id return True - + def cancel_premium_stripe(self): if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET for stripe_id_model in self.user.stripe_ids.all(): stripe_id = stripe_id_model.stripe_id @@ -914,56 +1010,57 @@ class Profile(models.Model): try: subscriptions = stripe.Subscription.list(customer=stripe_customer) for subscription in subscriptions.data: - stripe.Subscription.modify(subscription['id'], cancel_at_period_end=True) - logging.user(self.user, "~FRCanceling Stripe subscription: %s" % subscription['id']) + stripe.Subscription.modify(subscription["id"], cancel_at_period_end=True) + logging.user(self.user, "~FRCanceling Stripe subscription: %s" % subscription["id"]) except stripe.error.InvalidRequestError: logging.user(self.user, "~FRFailed to cancel Stripe subscription: %s" % stripe_id) continue - + return True - + def retrieve_stripe_ids(self): if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_email = stripe_customer.email - + stripe_ids = set() for email in set([stripe_email, self.user.email]): customers = stripe.Customer.list(email=email) for customer in customers: stripe_ids.add(customer.stripe_id) - + self.user.stripe_ids.all().delete() for stripe_id in stripe_ids: self.user.stripe_ids.create(stripe_id=stripe_id) - + def retrieve_paypal_ids(self, force=False): if self.paypal_sub_id and not force: return - - ipns = PayPalIPN.objects.filter(Q(custom=self.user.username) | - Q(payer_email=self.user.email) | - Q(custom=self.user.pk)).order_by('-payment_date') + + ipns = PayPalIPN.objects.filter( + Q(custom=self.user.username) | Q(payer_email=self.user.email) | Q(custom=self.user.pk) + ).order_by("-payment_date") if not len(ipns): return - + self.paypal_sub_id = ipns[0].subscr_id self.save() paypal_ids = set() for ipn in ipns: - if not ipn.subscr_id: continue + if not ipn.subscr_id: + continue paypal_ids.add(ipn.subscr_id) - + seen_paypal_ids = set(p.paypal_sub_id for p in self.user.paypal_ids.all()) for paypal_id in paypal_ids: if paypal_id in seen_paypal_ids: continue self.user.paypal_ids.create(paypal_sub_id=paypal_id) - + @property def latest_paypal_email(self): ipn = PayPalIPN.objects.filter(custom=self.user.username) @@ -971,9 +1068,9 @@ class Profile(models.Model): ipn = PayPalIPN.objects.filter(payer_email=self.user.email) if not len(ipn): return - + return ipn[0].payer_email - + def update_email(self, new_email): from apps.social.models import MSocialProfile @@ -982,14 +1079,14 @@ class Profile(models.Model): self.user.email = new_email self.user.save() - + sp = MSocialProfile.get_user(self.user.pk) sp.email = new_email sp.save() if self.stripe_id: stripe_customer = self.stripe_customer() - stripe_customer.update({'email': new_email}) + stripe_customer.update({"email": new_email}) stripe_customer.save() def stripe_customer(self): @@ -997,71 +1094,85 @@ class Profile(models.Model): stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) return stripe_customer - + def paypal_api(self): if self.paypal_sub_id: - api = paypalrestsdk.Api({ - "mode": "sandbox" if settings.DEBUG else "live", - "client_id": settings.PAYPAL_API_CLIENTID, - "client_secret": settings.PAYPAL_API_SECRET - }) + api = paypalrestsdk.Api( + { + "mode": "sandbox" if settings.DEBUG else "live", + "client_id": settings.PAYPAL_API_CLIENTID, + "client_secret": settings.PAYPAL_API_SECRET, + } + ) return api - + def activate_ios_premium(self, transaction_identifier=None, amount=36): - payments = PaymentHistory.objects.filter(user=self.user, - payment_identifier=transaction_identifier, - payment_date__gte=datetime.datetime.now()-datetime.timedelta(days=3)) + payments = PaymentHistory.objects.filter( + user=self.user, + payment_identifier=transaction_identifier, + payment_date__gte=datetime.datetime.now() - datetime.timedelta(days=3), + ) if len(payments): # Already paid - logging.user(self.user, "~FG~BBAlready paid iOS premium subscription: $%s~FW" % transaction_identifier) + logging.user( + self.user, "~FG~BBAlready paid iOS premium subscription: $%s~FW" % transaction_identifier + ) return False - PaymentHistory.objects.create(user=self.user, - payment_date=datetime.datetime.now(), - payment_amount=amount, - payment_provider='ios-subscription', - payment_identifier=transaction_identifier) - + PaymentHistory.objects.create( + user=self.user, + payment_date=datetime.datetime.now(), + payment_amount=amount, + payment_provider="ios-subscription", + payment_identifier=transaction_identifier, + ) + self.setup_premium_history() - + if not self.is_premium: self.activate_premium() - + logging.user(self.user, "~FG~BBNew iOS premium subscription: $%s~FW" % amount) return True - + def activate_android_premium(self, order_id=None, amount=36): - payments = PaymentHistory.objects.filter(user=self.user, - payment_identifier=order_id, - payment_date__gte=datetime.datetime.now()-datetime.timedelta(days=3)) + payments = PaymentHistory.objects.filter( + user=self.user, + payment_identifier=order_id, + payment_date__gte=datetime.datetime.now() - datetime.timedelta(days=3), + ) if len(payments): # Already paid logging.user(self.user, "~FG~BBAlready paid Android premium subscription: $%s~FW" % amount) return False - PaymentHistory.objects.create(user=self.user, - payment_date=datetime.datetime.now(), - payment_amount=amount, - payment_provider='android-subscription', - payment_identifier=order_id) - + PaymentHistory.objects.create( + user=self.user, + payment_date=datetime.datetime.now(), + payment_amount=amount, + payment_provider="android-subscription", + payment_identifier=order_id, + ) + self.setup_premium_history() - + if order_id == "nb.premium.archive.99": self.activate_archive() elif not self.is_premium: self.activate_premium() - + logging.user(self.user, "~FG~BBNew Android premium subscription: $%s~FW" % amount) return True - + @classmethod def clear_dead_spammers(self, days=30, confirm=False): - users = User.objects.filter(date_joined__gte=datetime.datetime.now()-datetime.timedelta(days=days)).order_by('-date_joined') + users = User.objects.filter( + date_joined__gte=datetime.datetime.now() - datetime.timedelta(days=days) + ).order_by("-date_joined") usernames = set() - numerics = re.compile(r'[0-9]+') + numerics = re.compile(r"[0-9]+") for user in users: - opens = UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'] + opens = UserSubscription.objects.filter(user=user).aggregate(sum=Sum("feed_opens"))["sum"] reads = RUserStory.read_story_count(user.pk) has_numbers = numerics.search(user.username) @@ -1069,7 +1180,9 @@ class Profile(models.Model): has_profile = user.profile.last_seen_ip except Profile.DoesNotExist: usernames.add(user.username) - print(" ---> Missing profile: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads)) + print( + " ---> Missing profile: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads) + ) continue if opens is None and not reads and has_numbers: @@ -1078,9 +1191,10 @@ class Profile(models.Model): elif not has_profile: usernames.add(user.username) print(" ---> No IP: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads)) - - if not confirm: return usernames - + + if not confirm: + return usernames + for username in usernames: try: u = User.objects.get(username=username) @@ -1090,27 +1204,33 @@ class Profile(models.Model): RNewUserQueue.user_count() RNewUserQueue.activate_all() - + @classmethod def count_feed_subscribers(self, feed_id=None, user_id=None, verbose=True): SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) entire_feed_counted = False - + if verbose: feed = Feed.get_by_id(feed_id) - logging.debug(" ---> [%-30s] ~SN~FBCounting subscribers for feed:~SB~FM%s~SN~FB user:~SB~FM%s" % (feed.log_title[:30], feed_id, user_id)) - + logging.debug( + " ---> [%-30s] ~SN~FBCounting subscribers for feed:~SB~FM%s~SN~FB user:~SB~FM%s" + % (feed.log_title[:30], feed_id, user_id) + ) + if feed_id: feed_ids = [feed_id] elif user_id: - feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user_id, active=True).values('feed_id')] + feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user_id, active=True).values("feed_id") + ] else: assert False, "feed_id or user_id required" if feed_id and not user_id: entire_feed_counted = True - + for feed_id in feed_ids: total = 0 premium = 0 @@ -1118,20 +1238,26 @@ class Profile(models.Model): active_premium = 0 archive = 0 pro = 0 - key = 's:%s' % feed_id - premium_key = 'sp:%s' % feed_id - archive_key = 'sarchive:%s' % feed_id - pro_key = 'spro:%s' % feed_id - + key = "s:%s" % feed_id + premium_key = "sp:%s" % feed_id + archive_key = "sarchive:%s" % feed_id + pro_key = "spro:%s" % feed_id + if user_id: - active = UserSubscription.objects.get(feed_id=feed_id, user_id=user_id).only('active').active + active = UserSubscription.objects.get(feed_id=feed_id, user_id=user_id).only("active").active user_active_feeds = dict([(user_id, active)]) else: - user_active_feeds = dict([(us.user_id, us.active) - for us in UserSubscription.objects.filter(feed_id=feed_id).only('user', 'active')]) - profiles = Profile.objects.filter(user_id__in=list(user_active_feeds.keys())).values('user_id', 'last_seen_on', 'is_premium', 'is_archive', 'is_pro') + user_active_feeds = dict( + [ + (us.user_id, us.active) + for us in UserSubscription.objects.filter(feed_id=feed_id).only("user", "active") + ] + ) + profiles = Profile.objects.filter(user_id__in=list(user_active_feeds.keys())).values( + "user_id", "last_seen_on", "is_premium", "is_archive", "is_pro" + ) feed = Feed.get_by_id(feed_id) - + if entire_feed_counted: pipeline = r.pipeline() pipeline.delete(key) @@ -1139,181 +1265,167 @@ class Profile(models.Model): pipeline.delete(archive_key) pipeline.delete(pro_key) pipeline.execute() - + for profiles_group in chunks(profiles, 20): pipeline = r.pipeline() for profile in profiles_group: - last_seen_on = int(profile['last_seen_on'].strftime('%s')) - muted_feed = not bool(user_active_feeds[profile['user_id']]) + last_seen_on = int(profile["last_seen_on"].strftime("%s")) + muted_feed = not bool(user_active_feeds[profile["user_id"]]) if muted_feed: last_seen_on = 0 - pipeline.zadd(key, { profile['user_id']: last_seen_on }) + pipeline.zadd(key, {profile["user_id"]: last_seen_on}) total += 1 - if profile['is_premium']: - pipeline.zadd(premium_key, { profile['user_id']: last_seen_on }) + if profile["is_premium"]: + pipeline.zadd(premium_key, {profile["user_id"]: last_seen_on}) premium += 1 else: - pipeline.zrem(premium_key, profile['user_id']) - if profile['is_archive']: - pipeline.zadd(archive_key, { profile['user_id']: last_seen_on }) + pipeline.zrem(premium_key, profile["user_id"]) + if profile["is_archive"]: + pipeline.zadd(archive_key, {profile["user_id"]: last_seen_on}) archive += 1 else: - pipeline.zrem(archive_key, profile['user_id']) - if profile['is_pro']: - pipeline.zadd(pro_key, { profile['user_id']: last_seen_on }) + pipeline.zrem(archive_key, profile["user_id"]) + if profile["is_pro"]: + pipeline.zadd(pro_key, {profile["user_id"]: last_seen_on}) pro += 1 else: - pipeline.zrem(pro_key, profile['user_id']) - if profile['last_seen_on'] > SUBSCRIBER_EXPIRE and not muted_feed: + pipeline.zrem(pro_key, profile["user_id"]) + if profile["last_seen_on"] > SUBSCRIBER_EXPIRE and not muted_feed: active += 1 - if profile['is_premium']: + if profile["is_premium"]: active_premium += 1 - + pipeline.execute() - + if entire_feed_counted: - now = int(datetime.datetime.now().strftime('%s')) - r.zadd(key, { -1: now }) - r.expire(key, settings.SUBSCRIBER_EXPIRE*24*60*60) + now = int(datetime.datetime.now().strftime("%s")) + r.zadd(key, {-1: now}) + r.expire(key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(premium_key, {-1: now}) - r.expire(premium_key, settings.SUBSCRIBER_EXPIRE*24*60*60) + r.expire(premium_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(archive_key, {-1: now}) - r.expire(archive_key, settings.SUBSCRIBER_EXPIRE*24*60*60) + r.expire(archive_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(pro_key, {-1: now}) - r.expire(pro_key, settings.SUBSCRIBER_EXPIRE*24*60*60) - - logging.info(" ---> [%-30s] ~SN~FBCounting subscribers, storing in ~SBredis~SN: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" % - (feed.log_title[:30], total, active, premium, active_premium, archive, pro)) + r.expire(pro_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) + + logging.info( + " ---> [%-30s] ~SN~FBCounting subscribers, storing in ~SBredis~SN: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" + % (feed.log_title[:30], total, active, premium, active_premium, archive, pro) + ) @classmethod def count_all_feed_subscribers_for_user(self, user): r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) if not isinstance(user, User): user = User.objects.get(pk=user) - - active_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=True).values('feed_id')] - muted_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=False).values('feed_id')] - logging.user(user, "~SN~FBRefreshing user last_login_on for ~SB%s~SN/~SB%s subscriptions~SN" % - (len(active_feed_ids), len(muted_feed_ids))) + + active_feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user.pk, active=True).values("feed_id") + ] + muted_feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user.pk, active=False).values("feed_id") + ] + logging.user( + user, + "~SN~FBRefreshing user last_login_on for ~SB%s~SN/~SB%s subscriptions~SN" + % (len(active_feed_ids), len(muted_feed_ids)), + ) for feed_ids in [active_feed_ids, muted_feed_ids]: for feeds_group in chunks(feed_ids, 20): pipeline = r.pipeline() for feed_id in feeds_group: - key = 's:%s' % feed_id - premium_key = 'sp:%s' % feed_id - archive_key = 'sarchive:%s' % feed_id - pro_key = 'spro:%s' % feed_id + key = "s:%s" % feed_id + premium_key = "sp:%s" % feed_id + archive_key = "sarchive:%s" % feed_id + pro_key = "spro:%s" % feed_id - last_seen_on = int(user.profile.last_seen_on.strftime('%s')) + last_seen_on = int(user.profile.last_seen_on.strftime("%s")) if feed_ids is muted_feed_ids: last_seen_on = 0 - pipeline.zadd(key, { user.pk: last_seen_on }) + pipeline.zadd(key, {user.pk: last_seen_on}) if user.profile.is_premium: - pipeline.zadd(premium_key, { user.pk: last_seen_on }) + pipeline.zadd(premium_key, {user.pk: last_seen_on}) else: pipeline.zrem(premium_key, user.pk) if user.profile.is_archive: - pipeline.zadd(archive_key, { user.pk: last_seen_on }) + pipeline.zadd(archive_key, {user.pk: last_seen_on}) else: pipeline.zrem(archive_key, user.pk) if user.profile.is_pro: - pipeline.zadd(pro_key, { user.pk: last_seen_on }) + pipeline.zadd(pro_key, {user.pk: last_seen_on}) else: pipeline.zrem(pro_key, user.pk) pipeline.execute() - + def send_new_user_email(self): if not self.user.email or not self.send_emails: return - - user = self.user - text = render_to_string('mail/email_new_account.txt', locals()) - html = render_to_string('mail/email_new_account.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_new_account.txt", locals()) + html = render_to_string("mail/email_new_account.xhtml", locals()) subject = "Welcome to NewsBlur, %s" % (self.user.username) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new user: %s" % self.user.email) - + def send_opml_export_email(self, reason=None, force=False): if not self.user.email: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='opml_export') + + emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, email_type="opml_export") day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~SN~FMNot sending opml export email, already sent today.") return - MSentEmail.record(receiver_user_id=self.user.pk, email_type='opml_export') - + MSentEmail.record(receiver_user_id=self.user.pk, email_type="opml_export") + exporter = OPMLExporter(self.user) - opml = exporter.process() + opml = exporter.process() params = { - 'feed_count': UserSubscription.objects.filter(user=self.user).count(), - 'reason': reason, + "feed_count": UserSubscription.objects.filter(user=self.user).count(), + "reason": reason, } - user = self.user - text = render_to_string('mail/email_opml_export.txt', params) - html = render_to_string('mail/email_opml_export.xhtml', params) + user = self.user + text = render_to_string("mail/email_opml_export.txt", params) + html = render_to_string("mail/email_opml_export.xhtml", params) subject = "Backup OPML file of your NewsBlur sites" - filename= 'NewsBlur Subscriptions - %s.xml' % datetime.datetime.now().strftime('%Y-%m-%d') - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + filename = "NewsBlur Subscriptions - %s.xml" % datetime.datetime.now().strftime("%Y-%m-%d") + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") - msg.attach(filename, opml, 'text/xml') + msg.attach(filename, opml, "text/xml") msg.send() - + from apps.social.models import MActivity + MActivity.new_opml_export(user_id=self.user.pk, count=exporter.feed_count, automated=True) - + logging.user(self.user, "~BB~FM~SBSending OPML backup email to: %s" % self.user.email) - + def send_first_share_to_blurblog_email(self, force=False): from apps.social.models import MSocialProfile, MSharedStory - + if not self.user.email: return - - params = dict(receiver_user_id=self.user.pk, email_type='first_share') - try: - MSentEmail.objects.get(**params) - if not force: - # Return if email already sent - return - except MSentEmail.DoesNotExist: - MSentEmail.objects.create(**params) - - social_profile = MSocialProfile.objects.get(user_id=self.user.pk) - params = { - 'shared_stories': MSharedStory.objects.filter(user_id=self.user.pk).count(), - 'blurblog_url': social_profile.blurblog_url, - 'blurblog_rss': social_profile.blurblog_rss - } - user = self.user - text = render_to_string('mail/email_first_share_to_blurblog.txt', params) - html = render_to_string('mail/email_first_share_to_blurblog.xhtml', params) - subject = "Your shared stories on NewsBlur are available on your Blurblog" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) - msg.attach_alternative(html, "text/html") - msg.send() - - logging.user(self.user, "~BB~FM~SBSending first share to blurblog email to: %s" % self.user.email) - - def send_new_premium_email(self, force=False): - if not self.user.email or not self.send_emails: - return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium') + + params = dict(receiver_user_id=self.user.pk, email_type="first_share") try: MSentEmail.objects.get(**params) if not force: @@ -1322,52 +1434,100 @@ class Profile(models.Model): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_premium.txt', locals()) - html = render_to_string('mail/email_new_premium.xhtml', locals()) - subject = "Thank you for subscribing to NewsBlur Premium!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + social_profile = MSocialProfile.objects.get(user_id=self.user.pk) + params = { + "shared_stories": MSharedStory.objects.filter(user_id=self.user.pk).count(), + "blurblog_url": social_profile.blurblog_url, + "blurblog_rss": social_profile.blurblog_rss, + } + user = self.user + text = render_to_string("mail/email_first_share_to_blurblog.txt", params) + html = render_to_string("mail/email_first_share_to_blurblog.xhtml", params) + subject = "Your shared stories on NewsBlur are available on your Blurblog" + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + + logging.user(self.user, "~BB~FM~SBSending first share to blurblog email to: %s" % self.user.email) + + def send_new_premium_email(self, force=False): + if not self.user.email or not self.send_emails: + return + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium") + try: + MSentEmail.objects.get(**params) + if not force: + # Return if email already sent + return + except MSentEmail.DoesNotExist: + MSentEmail.objects.create(**params) + + user = self.user + text = render_to_string("mail/email_new_premium.txt", locals()) + html = render_to_string("mail/email_new_premium.xhtml", locals()) + subject = "Thank you for subscribing to NewsBlur Premium!" + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) + msg.attach_alternative(html, "text/html") + msg.send() + logging.user(self.user, "~BB~FM~SBSending email for new premium: %s" % self.user.email) - + def send_new_premium_archive_email(self, total_story_count, pre_archive_count, force=False): if not self.user.email: return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium_archive') + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium_archive") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~BB~FMNot ~SBSending email for new premium archive: %s (%s to %s stories)" % (self.user.email, pre_archive_count, total_story_count)) + logging.user( + self.user, + "~BB~FMNot ~SBSending email for new premium archive: %s (%s to %s stories)" + % (self.user.email, pre_archive_count, total_story_count), + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) feed_count = UserSubscription.objects.filter(user=self.user).count() - user = self.user - text = render_to_string('mail/email_new_premium_archive.txt', locals()) - html = render_to_string('mail/email_new_premium_archive.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_premium_archive.txt", locals()) + html = render_to_string("mail/email_new_premium_archive.xhtml", locals()) if total_story_count > pre_archive_count: subject = f"NewsBlur archive backfill is complete: from {pre_archive_count:,} to {total_story_count:,} stories" else: subject = f"NewsBlur archive backfill is complete: {total_story_count:,} stories" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending email for new premium archive: %s (%s to %s stories)" % (self.user.email, pre_archive_count, total_story_count)) - + + logging.user( + self.user, + "~BB~FM~SBSending email for new premium archive: %s (%s to %s stories)" + % (self.user.email, pre_archive_count, total_story_count), + ) + def send_new_premium_pro_email(self, force=False): if not self.user.email or not self.send_emails: return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium_pro') + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium_pro") try: MSentEmail.objects.get(**params) if not force: @@ -1376,45 +1536,51 @@ class Profile(models.Model): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_premium_pro.txt', locals()) - html = render_to_string('mail/email_new_premium_pro.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_premium_pro.txt", locals()) + html = render_to_string("mail/email_new_premium_pro.xhtml", locals()) subject = "Thanks for subscribing to NewsBlur Premium Pro!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new premium pro: %s" % self.user.email) - + def send_forgot_password_email(self, email=None): if not self.user.email and not email: print("Please provide an email address.") return - + if not self.user.email and email: self.user.email = email self.user.save() - - user = self.user - text = render_to_string('mail/email_forgot_password.txt', locals()) - html = render_to_string('mail/email_forgot_password.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_forgot_password.txt", locals()) + html = render_to_string("mail/email_forgot_password.xhtml", locals()) subject = "Forgot your password on NewsBlur?" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for forgotten password: %s" % self.user.email) - + def send_new_user_queue_email(self, force=False): if not self.user.email: print("Please provide an email address.") return - - params = dict(receiver_user_id=self.user.pk, email_type='new_user_queue') + + params = dict(receiver_user_id=self.user.pk, email_type="new_user_queue") try: MSentEmail.objects.get(**params) if not force: @@ -1423,238 +1589,306 @@ class Profile(models.Model): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_user_queue.txt', locals()) - html = render_to_string('mail/email_new_user_queue.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_user_queue.txt", locals()) + html = render_to_string("mail/email_new_user_queue.xhtml", locals()) subject = "Your free account is now ready to go on NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new user queue: %s" % self.user.email) - + def send_upload_opml_finished_email(self, feed_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_upload_opml_finished.txt', locals()) - html = render_to_string('mail/email_upload_opml_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_upload_opml_finished.txt", locals()) + html = render_to_string("mail/email_upload_opml_finished.xhtml", locals()) subject = "Your OPML upload is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for OPML upload: %s" % self.user.email) - + def send_import_reader_finished_email(self, feed_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_import_reader_finished.txt', locals()) - html = render_to_string('mail/email_import_reader_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_import_reader_finished.txt", locals()) + html = render_to_string("mail/email_import_reader_finished.xhtml", locals()) subject = "Your Google Reader import is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for Google Reader import: %s" % self.user.email) - + def send_import_reader_starred_finished_email(self, feed_count, starred_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_import_reader_starred_finished.txt', locals()) - html = render_to_string('mail/email_import_reader_starred_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_import_reader_starred_finished.txt", locals()) + html = render_to_string("mail/email_import_reader_starred_finished.xhtml", locals()) subject = "Your Google Reader starred stories import is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending email for Google Reader starred stories import: %s" % self.user.email) - + + logging.user( + self.user, "~BB~FM~SBSending email for Google Reader starred stories import: %s" % self.user.email + ) + def send_launch_social_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_social') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_social") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_social.txt', data) - html = render_to_string('mail/email_launch_social.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_social.txt", data) + html = render_to_string("mail/email_launch_social.xhtml", data) subject = "NewsBlur is now a social news reader" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch social email for user: %s months, %s" % (months_ago, self.user.email)) - + + logging.user( + self.user, + "~BB~FM~SBSending launch social email for user: %s months, %s" % (months_ago, self.user.email), + ) + def send_launch_turntouch_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_turntouch') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_turntouch") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_turntouch.txt', data) - html = render_to_string('mail/email_launch_turntouch.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_turntouch.txt", data) + html = render_to_string("mail/email_launch_turntouch.xhtml", data) subject = "Introducing Turn Touch for NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch TT email for user: %s months, %s" % (months_ago, self.user.email)) + + logging.user( + self.user, + "~BB~FM~SBSending launch TT email for user: %s months, %s" % (months_ago, self.user.email), + ) def send_launch_turntouch_end_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT end email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT end email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_turntouch_end') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_turntouch_end") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT end email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT end email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_turntouch_end.txt', data) - html = render_to_string('mail/email_launch_turntouch_end.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_turntouch_end.txt", data) + html = render_to_string("mail/email_launch_turntouch_end.xhtml", data) subject = "Last day to back Turn Touch: NewsBlur's beautiful remote" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch TT end email for user: %s months, %s" % (months_ago, self.user.email)) - + + logging.user( + self.user, + "~BB~FM~SBSending launch TT end email for user: %s months, %s" % (months_ago, self.user.email), + ) + def grace_period_email_sent(self, force=False): - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='premium_expire_grace') + emails_sent = MSentEmail.objects.filter( + receiver_user_id=self.user.pk, email_type="premium_expire_grace" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=360) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~SN~FMNot sending premium expire grace email, already sent before.") return True - + def send_premium_expire_grace_period_email(self, force=False): if not self.user.email: - logging.user(self.user, "~FM~SB~FRNot~FM~SN sending premium expire grace for user: %s" % (self.user)) + logging.user( + self.user, "~FM~SB~FRNot~FM~SN sending premium expire grace for user: %s" % (self.user) + ) return if self.grace_period_email_sent(force=force): return - + if self.premium_expire and self.premium_expire < datetime.datetime.now(): self.premium_expire = datetime.datetime.now() self.save() - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = round(delta.days / 30) - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_premium_expire_grace.txt', data) - html = render_to_string('mail/email_premium_expire_grace.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_premium_expire_grace.txt", data) + html = render_to_string("mail/email_premium_expire_grace.xhtml", data) subject = "Your premium account on NewsBlur has one more month!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=self.user.pk, email_type='premium_expire_grace') - logging.user(self.user, "~BB~FM~SBSending premium expire grace email for user: %s months, %s" % (months_ago, self.user.email)) - + + MSentEmail.record(receiver_user_id=self.user.pk, email_type="premium_expire_grace") + logging.user( + self.user, + "~BB~FM~SBSending premium expire grace email for user: %s months, %s" + % (months_ago, self.user.email), + ) + def send_premium_expire_email(self, force=False): if not self.user.email: logging.user(self.user, "~FM~SB~FRNot~FM sending premium expire for user: %s" % (self.user)) return - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='premium_expire') + emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, email_type="premium_expire") day_ago = datetime.datetime.now() - datetime.timedelta(days=360) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~FM~SBNot sending premium expire email, already sent before.") return - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = round(delta.days / 30) - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_premium_expire.txt', data) - html = render_to_string('mail/email_premium_expire.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_premium_expire.txt", data) + html = render_to_string("mail/email_premium_expire.xhtml", data) subject = "Your premium account on NewsBlur has expired" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=self.user.pk, email_type='premium_expire') - logging.user(self.user, "~BB~FM~SBSending premium expire email for user: %s months, %s" % (months_ago, self.user.email)) - + + MSentEmail.record(receiver_user_id=self.user.pk, email_type="premium_expire") + logging.user( + self.user, + "~BB~FM~SBSending premium expire email for user: %s months, %s" % (months_ago, self.user.email), + ) + def autologin_url(self, next=None): - return reverse('autologin', kwargs={ - 'username': self.user.username, - 'secret': self.secret_token - }) + ('?' + next + '=1' if next else '') - - + return reverse("autologin", kwargs={"username": self.user.username, "secret": self.secret_token}) + ( + "?" + next + "=1" if next else "" + ) + @classmethod def doublecheck_paypal_payments(cls, days=14): - payments = PayPalIPN.objects.filter(txn_type='subscr_payment', - updated_at__gte=datetime.datetime.now() - - datetime.timedelta(days) - ).order_by('-created_at') + payments = PayPalIPN.objects.filter( + txn_type="subscr_payment", updated_at__gte=datetime.datetime.now() - datetime.timedelta(days) + ).order_by("-created_at") for payment in payments: try: profile = Profile.objects.get(user__username=payment.custom) @@ -1662,10 +1896,10 @@ class Profile(models.Model): logging.debug(" ---> ~FRCouldn't find user: ~SB~FC%s" % payment.custom) continue profile.setup_premium_history() - + class StripeIds(models.Model): - user = models.ForeignKey(User, related_name='stripe_ids', on_delete=models.CASCADE, null=True) + user = models.ForeignKey(User, related_name="stripe_ids", on_delete=models.CASCADE, null=True) stripe_id = models.CharField(max_length=24, blank=True, null=True) def __str__(self): @@ -1673,18 +1907,20 @@ class StripeIds(models.Model): class PaypalIds(models.Model): - user = models.ForeignKey(User, related_name='paypal_ids', on_delete=models.CASCADE, null=True) + user = models.ForeignKey(User, related_name="paypal_ids", on_delete=models.CASCADE, null=True) paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) def __str__(self): return "%s: %s" % (self.user.username, self.paypal_sub_id) - + def create_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) else: Profile.objects.get_or_create(user=instance) + + post_save.connect(create_profile, sender=User) @@ -1702,7 +1938,7 @@ def paypal_signup(sender, **kwargs): user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: pass - + if not user and ipn_obj.custom: try: user = User.objects.get(pk=ipn_obj.custom) @@ -1716,9 +1952,10 @@ def paypal_signup(sender, **kwargs): pass if not user: - logging.debug(" ---> Paypal subscription not found during paypal_signup: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during paypal_signup: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} logging.user(user, "~BC~SB~FBPaypal subscription signup") @@ -1733,8 +1970,11 @@ def paypal_signup(sender, **kwargs): # user.profile.cancel_premium_paypal(second_most_recent_only=True) # assert False, "Shouldn't be here anymore as the new Paypal REST API uses webhooks" + + valid_ipn_received.connect(paypal_signup) + def paypal_payment_history_sync(sender, **kwargs): ipn_obj = sender try: @@ -1743,9 +1983,10 @@ def paypal_payment_history_sync(sender, **kwargs): try: user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: - logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during flagging: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} logging.user(user, "~BC~SB~FBPaypal subscription payment") @@ -1753,8 +1994,11 @@ def paypal_payment_history_sync(sender, **kwargs): user.profile.setup_premium_history() except: return {"code": -1, "message": "User doesn't exist."} + + valid_ipn_received.connect(paypal_payment_history_sync) + def paypal_payment_was_flagged(sender, **kwargs): ipn_obj = sender try: @@ -1763,27 +2007,31 @@ def paypal_payment_was_flagged(sender, **kwargs): try: user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: - logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during flagging: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} - + try: user.profile.setup_premium_history() logging.user(user, "~BC~SB~FBPaypal subscription payment flagged") except: return {"code": -1, "message": "User doesn't exist."} + + invalid_ipn_received.connect(paypal_payment_was_flagged) + def stripe_checkout_session_completed(sender, full_json, **kwargs): - newsblur_user_id = full_json['data']['object']['metadata']['newsblur_user_id'] - stripe_id = full_json['data']['object']['customer'] + newsblur_user_id = full_json["data"]["object"]["metadata"]["newsblur_user_id"] + stripe_id = full_json["data"]["object"]["customer"] profile = None try: profile = Profile.objects.get(stripe_id=stripe_id) except Profile.DoesNotExist: pass - + if not profile: try: profile = User.objects.get(pk=int(newsblur_user_id)).profile @@ -1791,46 +2039,56 @@ def stripe_checkout_session_completed(sender, full_json, **kwargs): profile.save() except User.DoesNotExist: pass - + if profile: logging.user(profile.user, "~BC~SB~FBStripe checkout subscription signup") profile.retrieve_stripe_ids() else: logging.user(profile.user, "~BR~SB~FRCouldn't find Stripe user: ~FW%s" % full_json) return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_checkout_session_completed.connect(stripe_checkout_session_completed) + def stripe_signup(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] - plan_id = full_json['data']['object']['plan']['id'] + stripe_id = full_json["data"]["object"]["customer"] + plan_id = full_json["data"]["object"]["plan"]["id"] try: profile = Profile.objects.get(stripe_id=stripe_id) logging.user(profile.user, "~BC~SB~FBStripe subscription signup") - if plan_id == Profile.plan_to_stripe_price('premium'): + if plan_id == Profile.plan_to_stripe_price("premium"): profile.activate_premium() - elif plan_id == Profile.plan_to_stripe_price('archive'): + elif plan_id == Profile.plan_to_stripe_price("archive"): profile.activate_archive() - elif plan_id == Profile.plan_to_stripe_price('pro'): + elif plan_id == Profile.plan_to_stripe_price("pro"): profile.activate_pro() profile.cancel_premium_paypal() profile.retrieve_stripe_ids() except Profile.DoesNotExist: return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_customer_subscription_created.connect(stripe_signup) + def stripe_subscription_updated(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] - plan_id = full_json['data']['object']['plan']['id'] + stripe_id = full_json["data"]["object"]["customer"] + plan_id = full_json["data"]["object"]["plan"]["id"] try: profile = Profile.objects.get(stripe_id=stripe_id) - active = not full_json['data']['object']['cancel_at'] and full_json['data']['object']['plan']['active'] - logging.user(profile.user, "~BC~SB~FBStripe subscription updated: %s" % "active" if active else "cancelled") + active = ( + not full_json["data"]["object"]["cancel_at"] and full_json["data"]["object"]["plan"]["active"] + ) + logging.user( + profile.user, "~BC~SB~FBStripe subscription updated: %s" % "active" if active else "cancelled" + ) if active: - if plan_id == Profile.plan_to_stripe_price('premium'): + if plan_id == Profile.plan_to_stripe_price("premium"): profile.activate_premium() - elif plan_id == Profile.plan_to_stripe_price('archive'): + elif plan_id == Profile.plan_to_stripe_price("archive"): profile.activate_archive() - elif plan_id == Profile.plan_to_stripe_price('pro'): + elif plan_id == Profile.plan_to_stripe_price("pro"): profile.activate_pro() profile.cancel_premium_paypal() profile.retrieve_stripe_ids() @@ -1838,19 +2096,25 @@ def stripe_subscription_updated(sender, full_json, **kwargs): profile.setup_premium_history() except Profile.DoesNotExist: return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_customer_subscription_updated.connect(stripe_subscription_updated) + def stripe_payment_history_sync(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] + stripe_id = full_json["data"]["object"]["customer"] try: profile = Profile.objects.get(stripe_id=stripe_id) logging.user(profile.user, "~BC~SB~FBStripe subscription payment") profile.setup_premium_history() except Profile.DoesNotExist: - return {"code": -1, "message": "User doesn't exist."} + return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_charge_succeeded.connect(stripe_payment_history_sync) zebra_webhook_charge_refunded.connect(stripe_payment_history_sync) + def change_password(user, old_password, new_password, only_check=False): user_db = authenticate(username=user.username, password=old_password) if user_db is None: @@ -1860,7 +2124,7 @@ def change_password(user, old_password, new_password, only_check=False): user.save() if user_db is None: user_db = authenticate(username=user.username, password=user.username) - + if not user_db: return -1 else: @@ -1869,48 +2133,53 @@ def change_password(user, old_password, new_password, only_check=False): user_db.save() return 1 + def blank_authenticate(username, password=""): try: user = User.objects.get(username__iexact=username) except User.DoesNotExist: return - + if user.password == "!": return user - - algorithm, salt, hash = user.password.split('$', 2) - encoded_blank = hashlib.sha1((salt + password).encode(encoding='utf-8')).hexdigest() + + algorithm, salt, hash = user.password.split("$", 2) + encoded_blank = hashlib.sha1((salt + password).encode(encoding="utf-8")).hexdigest() encoded_username = authenticate(username=username, password=username) if encoded_blank == hash or encoded_username == user: return user + # Unfinished class MEmailUnsubscribe(mongo.Document): user_id = mongo.IntField() email_type = mongo.StringField() date = mongo.DateTimeField(default=datetime.datetime.now) - - EMAIL_TYPE_FOLLOWS = 'follows' - EMAIL_TYPE_REPLIES = 'replies' - EMAIL_TYOE_PRODUCT = 'product' - + + EMAIL_TYPE_FOLLOWS = "follows" + EMAIL_TYPE_REPLIES = "replies" + EMAIL_TYOE_PRODUCT = "product" + meta = { - 'collection': 'email_unsubscribes', - 'allow_inheritance': False, - 'indexes': ['user_id', - {'fields': ['user_id', 'email_type'], - 'unique': True, - }], + "collection": "email_unsubscribes", + "allow_inheritance": False, + "indexes": [ + "user_id", + { + "fields": ["user_id", "email_type"], + "unique": True, + }, + ], } - + def __str__(self): return "%s unsubscribed from %s on %s" % (self.user_id, self.email_type, self.date) - + @classmethod def user(cls, user_id): unsubs = cls.objects(user_id=user_id) return unsubs - + @classmethod def unsubscribe(cls, user_id, email_type): cls.objects.create() @@ -1921,13 +2190,13 @@ class MSentEmail(mongo.Document): receiver_user_id = mongo.IntField() email_type = mongo.StringField() date_sent = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'sent_emails', - 'allow_inheritance': False, - 'indexes': ['sending_user_id', 'receiver_user_id', 'email_type'], + "collection": "sent_emails", + "allow_inheritance": False, + "indexes": ["sending_user_id", "receiver_user_id", "email_type"], } - + def __str__(self): sender_user = self.sending_user_id if sender_user: @@ -1935,61 +2204,83 @@ class MSentEmail(mongo.Document): receiver_user = self.receiver_user_id if receiver_user: receiver_user = User.objects.get(pk=self.receiver_user_id) - return "%s sent %s email to %s %s" % (sender_user, self.email_type, receiver_user, receiver_user.profile if receiver_user else receiver_user) - + return "%s sent %s email to %s %s" % ( + sender_user, + self.email_type, + receiver_user, + receiver_user.profile if receiver_user else receiver_user, + ) + @classmethod def record(cls, email_type, receiver_user_id, sending_user_id=None): - cls.objects.create(email_type=email_type, - receiver_user_id=receiver_user_id, - sending_user_id=sending_user_id) + cls.objects.create( + email_type=email_type, receiver_user_id=receiver_user_id, sending_user_id=sending_user_id + ) + class PaymentHistory(models.Model): - user = models.ForeignKey(User, related_name='payments', on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="payments", on_delete=models.CASCADE) payment_date = models.DateTimeField() payment_amount = models.IntegerField() payment_provider = models.CharField(max_length=20) payment_identifier = models.CharField(max_length=100, null=True) refunded = models.BooleanField(blank=True, null=True) - + def __str__(self): - return "[%s] $%s/%s %s" % (self.payment_date.strftime("%Y-%m-%d"), self.payment_amount, - self.payment_provider, "" if self.refunded else "") + return "[%s] $%s/%s %s" % ( + self.payment_date.strftime("%Y-%m-%d"), + self.payment_amount, + self.payment_provider, + "" if self.refunded else "", + ) + class Meta: - ordering = ['-payment_date'] - + ordering = ["-payment_date"] + def canonical(self): return { - 'payment_date': self.payment_date.strftime('%Y-%m-%d'), - 'payment_amount': self.payment_amount, - 'payment_provider': self.payment_provider, - 'refunded': self.refunded, + "payment_date": self.payment_date.strftime("%Y-%m-%d"), + "payment_amount": self.payment_amount, + "payment_provider": self.payment_provider, + "refunded": self.refunded, } - + @classmethod def report(cls, months=26): output = "" - + def _counter(start_date, end_date, output, payments=None): if not payments: - payments = PaymentHistory.objects.filter(payment_date__gte=start_date, payment_date__lte=end_date) - payments = payments.aggregate(avg=Avg('payment_amount'), - sum=Sum('payment_amount'), - count=Count('user')) + payments = PaymentHistory.objects.filter( + payment_date__gte=start_date, payment_date__lte=end_date + ) + payments = payments.aggregate( + avg=Avg("payment_amount"), sum=Sum("payment_amount"), count=Count("user") + ) output += "%s-%02d-%02d - %s-%02d-%02d:\t$%.2f\t$%-6s\t%-4s\n" % ( - start_date.year, start_date.month, start_date.day, - end_date.year, end_date.month, end_date.day, - round(payments['avg'] if payments['avg'] else 0, 2), payments['sum'] if payments['sum'] else 0, payments['count']) - + start_date.year, + start_date.month, + start_date.day, + end_date.year, + end_date.month, + end_date.day, + round(payments["avg"] if payments["avg"] else 0, 2), + payments["sum"] if payments["sum"] else 0, + payments["count"], + ) + return payments, output output += "\nMonthly Totals:\n" for m in reversed(list(range(months))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(months=m) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + months=m + ) end_time = start_date + datetime.timedelta(days=31) end_date = datetime.datetime(end_time.year, end_time.month, 1) - datetime.timedelta(seconds=1) total, output = _counter(start_date, end_date, output) - total = total['sum'] + total = total["sum"] output += "\nMTD Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2001,18 +2292,21 @@ class PaymentHistory(models.Model): this_mtd_count = 0 for y in reversed(list(range(years))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(years=y) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + years=y + ) end_date = now - dateutil.relativedelta.relativedelta(years=y) - if end_date > now: end_date = now + if end_date > now: + end_date = now count, output = _counter(start_date, end_date, output) if end_date.year != now.year: - last_mtd_avg = count['avg'] or 0 - last_mtd_sum = count['sum'] or 0 - last_mtd_count = count['count'] + last_mtd_avg = count["avg"] or 0 + last_mtd_sum = count["sum"] or 0 + last_mtd_count = count["count"] else: - this_mtd_avg = count['avg'] or 0 - this_mtd_sum = count['sum'] or 0 - this_mtd_count = count['count'] + this_mtd_avg = count["avg"] or 0 + this_mtd_sum = count["sum"] or 0 + this_mtd_count = count["count"] output += "\nCurrent Month Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2021,19 +2315,25 @@ class PaymentHistory(models.Model): last_month_count = 0 for y in reversed(list(range(years))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(years=y) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + years=y + ) end_time = start_date + datetime.timedelta(days=31) end_date = datetime.datetime(end_time.year, end_time.month, 1) - datetime.timedelta(seconds=1) if end_date > now: - payments = {'avg': this_mtd_avg / (max(1, last_mtd_avg) / float(max(1, last_month_avg))), - 'sum': int(round(this_mtd_sum / (max(1, last_mtd_sum) / float(max(1, last_month_sum))))), - 'count': int(round(this_mtd_count / (max(1, last_mtd_count) / float(max(1, last_month_count)))))} + payments = { + "avg": this_mtd_avg / (max(1, last_mtd_avg) / float(max(1, last_month_avg))), + "sum": int(round(this_mtd_sum / (max(1, last_mtd_sum) / float(max(1, last_month_sum))))), + "count": int( + round(this_mtd_count / (max(1, last_mtd_count) / float(max(1, last_month_count)))) + ), + } _, output = _counter(start_date, end_date, output, payments=payments) else: count, output = _counter(start_date, end_date, output) - last_month_avg = count['avg'] - last_month_sum = count['sum'] - last_month_count = count['count'] + last_month_avg = count["avg"] + last_month_sum = count["sum"] + last_month_count = count["count"] output += "\nYTD Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2049,13 +2349,13 @@ class PaymentHistory(models.Model): end_date = now - dateutil.relativedelta.relativedelta(years=y) count, output = _counter(start_date, end_date, output) if end_date.year != now.year: - last_ytd_avg = count['avg'] or 0 - last_ytd_sum = count['sum'] or 0 - last_ytd_count = count['count'] + last_ytd_avg = count["avg"] or 0 + last_ytd_sum = count["sum"] or 0 + last_ytd_count = count["count"] else: - this_ytd_avg = count['avg'] or 0 - this_ytd_sum = count['sum'] or 0 - this_ytd_count = count['count'] + this_ytd_avg = count["avg"] or 0 + this_ytd_sum = count["sum"] or 0 + this_ytd_count = count["count"] output += "\nYearly Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2066,26 +2366,33 @@ class PaymentHistory(models.Model): for y in reversed(list(range(years))): now = datetime.datetime.now() start_date = datetime.datetime(now.year, 1, 1) - dateutil.relativedelta.relativedelta(years=y) - end_date = datetime.datetime(now.year, 1, 1) - dateutil.relativedelta.relativedelta(years=y-1) - datetime.timedelta(seconds=1) + end_date = ( + datetime.datetime(now.year, 1, 1) + - dateutil.relativedelta.relativedelta(years=y - 1) + - datetime.timedelta(seconds=1) + ) if end_date > now: - payments = {'avg': this_ytd_avg / (max(1, last_ytd_avg) / float(max(1, last_year_avg))), - 'sum': int(round(this_ytd_sum / (max(1, last_ytd_sum) / float(max(1, last_year_sum))))), - 'count': int(round(this_ytd_count / (max(1, last_ytd_count) / float(max(1, last_year_count)))))} + payments = { + "avg": this_ytd_avg / (max(1, last_ytd_avg) / float(max(1, last_year_avg))), + "sum": int(round(this_ytd_sum / (max(1, last_ytd_sum) / float(max(1, last_year_sum))))), + "count": int( + round(this_ytd_count / (max(1, last_ytd_count) / float(max(1, last_year_count)))) + ), + } count, output = _counter(start_date, end_date, output, payments=payments) - annual = count['sum'] + annual = count["sum"] else: count, output = _counter(start_date, end_date, output) - last_year_avg = count['avg'] or 0 - last_year_sum = count['sum'] or 0 - last_year_count = count['count'] - + last_year_avg = count["avg"] or 0 + last_year_sum = count["sum"] or 0 + last_year_count = count["count"] + + total = cls.objects.all().aggregate(sum=Sum("payment_amount")) + output += "\nTotal: $%s\n" % total["sum"] - total = cls.objects.all().aggregate(sum=Sum('payment_amount')) - output += "\nTotal: $%s\n" % total['sum'] - print(output) - - return {'annual': annual, 'output': output} + + return {"annual": annual, "output": output} class MGiftCode(mongo.Document): @@ -2095,108 +2402,124 @@ class MGiftCode(mongo.Document): duration_days = mongo.IntField() payment_amount = mongo.IntField() created_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'gift_codes', - 'allow_inheritance': False, - 'indexes': ['gifting_user_id', 'receiving_user_id', 'created_date'], + "collection": "gift_codes", + "allow_inheritance": False, + "indexes": ["gifting_user_id", "receiving_user_id", "created_date"], } - + def __str__(self): - return "%s gifted %s on %s: %s (redeemed %s times)" % (self.gifting_user_id, self.receiving_user_id, self.created_date, self.gift_code, self.redeemed) - + return "%s gifted %s on %s: %s (redeemed %s times)" % ( + self.gifting_user_id, + self.receiving_user_id, + self.created_date, + self.gift_code, + self.redeemed, + ) + @property def redeemed(self): redeemed_code = MRedeemedCode.objects.filter(gift_code=self.gift_code) return len(redeemed_code) - + @staticmethod def create_code(gift_code=None): u = str(uuid.uuid4()) code = u[:8] + u[9:13] if gift_code: - code = gift_code + code[len(gift_code):] + code = gift_code + code[len(gift_code) :] return code - + @classmethod def add(cls, gift_code=None, duration=0, gifting_user_id=None, receiving_user_id=None, payment=0): - return cls.objects.create(gift_code=cls.create_code(gift_code), - gifting_user_id=gifting_user_id, - receiving_user_id=receiving_user_id, - duration_days=duration, - payment_amount=payment) + return cls.objects.create( + gift_code=cls.create_code(gift_code), + gifting_user_id=gifting_user_id, + receiving_user_id=receiving_user_id, + duration_days=duration, + payment_amount=payment, + ) class MRedeemedCode(mongo.Document): user_id = mongo.IntField() gift_code = mongo.StringField() redeemed_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'redeemed_codes', - 'allow_inheritance': False, - 'indexes': ['user_id', 'gift_code', 'redeemed_date'], + "collection": "redeemed_codes", + "allow_inheritance": False, + "indexes": ["user_id", "gift_code", "redeemed_date"], } - + def __str__(self): return "%s redeemed %s on %s" % (self.user_id, self.gift_code, self.redeemed_date) - + @classmethod def record(cls, user_id, gift_code): - cls.objects.create(user_id=user_id, - gift_code=gift_code) + cls.objects.create(user_id=user_id, gift_code=gift_code) + @classmethod def redeem(cls, user, gift_code): newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code) if newsblur_gift_code: newsblur_gift_code = newsblur_gift_code[0] - PaymentHistory.objects.create(user=user, - payment_date=datetime.datetime.now(), - payment_amount=newsblur_gift_code.payment_amount, - payment_provider='newsblur-gift') - + PaymentHistory.objects.create( + user=user, + payment_date=datetime.datetime.now(), + payment_amount=newsblur_gift_code.payment_amount, + payment_provider="newsblur-gift", + ) + else: # Thinkup / Good Web Bundle - PaymentHistory.objects.create(user=user, - payment_date=datetime.datetime.now(), - payment_amount=12, - payment_provider='good-web-bundle') + PaymentHistory.objects.create( + user=user, + payment_date=datetime.datetime.now(), + payment_amount=12, + payment_provider="good-web-bundle", + ) cls.record(user.pk, gift_code) user.profile.activate_premium() logging.user(user, "~FG~BBRedeeming gift code: %s~FW" % gift_code) - + class MCustomStyling(mongo.Document): user_id = mongo.IntField(unique=True) custom_css = mongo.StringField() custom_js = mongo.StringField() updated_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'custom_styling', - 'allow_inheritance': False, - 'indexes': ['user_id'], + "collection": "custom_styling", + "allow_inheritance": False, + "indexes": ["user_id"], } - + def __str__(self): - return "%s custom style %s/%s %s" % (self.user_id, len(self.custom_css) if self.custom_css else "-", - len(self.custom_js) if self.custom_js else "-", self.updated_date) - + return "%s custom style %s/%s %s" % ( + self.user_id, + len(self.custom_css) if self.custom_css else "-", + len(self.custom_js) if self.custom_js else "-", + self.updated_date, + ) + def canonical(self): return { - 'css': self.custom_css, - 'js': self.custom_js, + "css": self.custom_css, + "js": self.custom_js, } - + @classmethod def get_user(cls, user_id): try: styling = cls.objects.get(user_id=user_id) except cls.DoesNotExist: return None - + return styling - + @classmethod def save_user(cls, user_id, css, js): styling = cls.get_user(user_id) @@ -2220,13 +2543,16 @@ class MDashboardRiver(mongo.Document): river_order = mongo.IntField() meta = { - 'collection': 'dashboard_river', - 'allow_inheritance': False, - 'indexes': ['user_id', - {'fields': ['user_id', 'river_id', 'river_side', 'river_order'], - 'unique': True, - }], - 'ordering': ['river_order'] + "collection": "dashboard_river", + "allow_inheritance": False, + "indexes": [ + "user_id", + { + "fields": ["user_id", "river_id", "river_side", "river_order"], + "unique": True, + }, + ], + "ordering": ["river_order"], } def __str__(self): @@ -2235,14 +2561,14 @@ class MDashboardRiver(mongo.Document): except User.DoesNotExist: u = "" return f"{u} ({self.river_side}/{self.river_order}): {self.river_id}" - + def canonical(self): return { - 'river_id': self.river_id, - 'river_side': self.river_side, - 'river_order': self.river_order, + "river_id": self.river_id, + "river_side": self.river_side, + "river_order": self.river_order, } - + @classmethod def get_user_rivers(cls, user_id): return cls.objects(user_id=user_id) @@ -2270,59 +2596,67 @@ class MDashboardRiver(mongo.Document): river = None if not river: - river = cls.objects.create(user_id=user_id, river_id=river_id, - river_side=river_side, river_order=river_order) + river = cls.objects.create( + user_id=user_id, river_id=river_id, river_side=river_side, river_order=river_order + ) river.river_id = river_id river.river_side = river_side river.river_order = river_order river.save() + class RNewUserQueue: - KEY = "new_user_queue" - + @classmethod def activate_next(cls): count = cls.user_count() if not count: return - + user_id = cls.pop_user() try: user = User.objects.get(pk=user_id) except User.DoesNotExist: - logging.debug("~FRCan't activate free account, can't find user ~SB%s~SN. ~FB%s still in queue." % (user_id, count-1)) + logging.debug( + "~FRCan't activate free account, can't find user ~SB%s~SN. ~FB%s still in queue." + % (user_id, count - 1) + ) return - - logging.user(user, "~FBActivating free account (%s / %s). %s still in queue." % (user.email, user.profile.last_seen_ip, (count-1))) + + logging.user( + user, + "~FBActivating free account (%s / %s). %s still in queue." + % (user.email, user.profile.last_seen_ip, (count - 1)), + ) user.profile.activate_free() - + @classmethod def activate_all(cls): count = cls.user_count() if not count: logging.debug("~FBNo users to activate, sleeping...") return - + for i in range(count): cls.activate_next() - + @classmethod def add_user(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) now = time.time() - - r.zadd(cls.KEY, { user_id: now }) - + + r.zadd(cls.KEY, {user_id: now}) + @classmethod def user_count(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) count = r.zcard(cls.KEY) return count - + @classmethod def user_position(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -2331,7 +2665,7 @@ class RNewUserQueue: return -1 if position >= 0: return position + 1 - + @classmethod def pop_user(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -2339,4 +2673,3 @@ class RNewUserQueue: r.zrem(cls.KEY, user) return user - diff --git a/apps/profile/tasks.py b/apps/profile/tasks.py index 9b8f7fd4b..2179f0125 100644 --- a/apps/profile/tasks.py +++ b/apps/profile/tasks.py @@ -5,16 +5,19 @@ from utils import log as logging from apps.reader.models import UserSubscription, UserSubscriptionFolders from apps.social.models import MSocialServices, MActivity, MInteraction + @app.task(name="email-new-user") def EmailNewUser(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_user_email() + @app.task(name="email-new-premium") def EmailNewPremium(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_email() + @app.task() def FetchArchiveFeedsForUser(user_id): # subs = UserSubscription.objects.filter(user=user_id) @@ -23,33 +26,39 @@ def FetchArchiveFeedsForUser(user_id): UserSubscription.fetch_archive_feeds_for_user(user_id) + @app.task() def FetchArchiveFeedsChunk(user_id, feed_ids): # logging.debug(" ---> Fetching archive stories: %s for %s" % (feed_ids, user_id)) UserSubscription.fetch_archive_feeds_chunk(user_id, feed_ids) + @app.task() def FinishFetchArchiveFeeds(results, user_id, start_time, starting_story_count): # logging.debug(" ---> Fetching archive stories finished for %s" % (user_id)) - ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds(user_id, start_time, starting_story_count) + ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds( + user_id, start_time, starting_story_count + ) user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_archive_email(ending_story_count, pre_archive_count) + @app.task(name="email-new-premium-pro") def EmailNewPremiumPro(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_pro_email() + @app.task(name="premium-expire") def PremiumExpire(**kwargs): # Get expired but grace period users two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2) thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30) - expired_profiles = Profile.objects.filter(is_premium=True, - premium_expire__lte=two_days_ago, - premium_expire__gt=thirty_days_ago) + expired_profiles = Profile.objects.filter( + is_premium=True, premium_expire__lte=two_days_ago, premium_expire__gt=thirty_days_ago + ) logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count()) for profile in expired_profiles: if profile.grace_period_email_sent(): @@ -57,21 +66,24 @@ def PremiumExpire(**kwargs): profile.setup_premium_history() if profile.premium_expire < two_days_ago: profile.send_premium_expire_grace_period_email() - + # Get fully expired users - expired_profiles = Profile.objects.filter(is_premium=True, - premium_expire__lte=thirty_days_ago) - logging.debug(" ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count()) + expired_profiles = Profile.objects.filter(is_premium=True, premium_expire__lte=thirty_days_ago) + logging.debug( + " ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count() + ) for profile in expired_profiles: profile.setup_premium_history() if profile.premium_expire < thirty_days_ago: profile.send_premium_expire_email() profile.deactivate_premium() + @app.task(name="activate-next-new-user") def ActivateNextNewUser(): RNewUserQueue.activate_next() + @app.task(name="cleanup-user") def CleanupUser(user_id): UserSubscription.trim_user_read_stories(user_id) @@ -82,7 +94,7 @@ def CleanupUser(user_id): UserSubscriptionFolders.add_missing_feeds_for_user(user_id) UserSubscriptionFolders.compact_for_user(user_id) UserSubscription.refresh_stale_feeds(user_id) - + try: ss = MSocialServices.objects.get(user_id=user_id) except MSocialServices.DoesNotExist: @@ -90,14 +102,14 @@ def CleanupUser(user_id): return ss.sync_twitter_photo() + @app.task(name="clean-spam") def CleanSpam(): logging.debug(" ---> Finding spammers...") Profile.clear_dead_spammers(confirm=True) + @app.task(name="reimport-stripe-history") def ReimportStripeHistory(): logging.debug(" ---> Reimporting Stripe history...") Profile.reimport_stripe_history(limit=10, days=1) - - diff --git a/apps/profile/test_profile.py b/apps/profile/test_profile.py index d35afcf3b..15a1a9dd2 100644 --- a/apps/profile/test_profile.py +++ b/apps/profile/test_profile.py @@ -5,33 +5,36 @@ from django.urls import reverse from django.conf import settings from mongoengine.connection import connect, disconnect + class Test_Profile(TestCase): fixtures = [ - 'subscriptions.json', - 'rss_feeds.json', + "subscriptions.json", + "rss_feeds.json", ] - + def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') - self.client = Client(HTTP_USER_AGENT='Mozilla/5.0') + settings.MONGODB = connect("test_newsblur") + self.client = Client(HTTP_USER_AGENT="Mozilla/5.0") def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') - - def test_create_account(self): - resp = self.client.get(reverse('load-feeds')) - response = json.decode(resp.content) - self.assertEquals(response['authenticated'], False) + settings.MONGODB.drop_database("test_newsblur") - response = self.client.post(reverse('welcome-signup'), { - 'signup-username': 'test', - 'signup-password': 'password', - 'signup-email': 'test@newsblur.com', - }) + def test_create_account(self): + resp = self.client.get(reverse("load-feeds")) + response = json.decode(resp.content) + self.assertEquals(response["authenticated"], False) + + response = self.client.post( + reverse("welcome-signup"), + { + "signup-username": "test", + "signup-password": "password", + "signup-email": "test@newsblur.com", + }, + ) self.assertEquals(response.status_code, 302) - resp = self.client.get(reverse('load-feeds')) + resp = self.client.get(reverse("load-feeds")) response = json.decode(resp.content) - self.assertEquals(response['authenticated'], True) - \ No newline at end of file + self.assertEquals(response["authenticated"], True) diff --git a/apps/profile/urls.py b/apps/profile/urls.py index cc264e4e4..57f7cc519 100644 --- a/apps/profile/urls.py +++ b/apps/profile/urls.py @@ -2,41 +2,45 @@ from django.conf.urls import * from apps.profile import views urlpatterns = [ - url(r'^get_preferences?/?', views.get_preference), - url(r'^set_preference/?', views.set_preference), - url(r'^set_account_settings/?', views.set_account_settings), - url(r'^get_view_setting/?', views.get_view_setting), - url(r'^set_view_setting/?', views.set_view_setting), - url(r'^clear_view_setting/?', views.clear_view_setting), - url(r'^set_collapsed_folders/?', views.set_collapsed_folders), - url(r'^paypal_form/?', views.paypal_form), - url(r'^paypal_return/?', views.paypal_return, name='paypal-return'), - url(r'^paypal_archive_return/?', views.paypal_archive_return, name='paypal-archive-return'), - url(r'^stripe_return/?', views.paypal_return, name='stripe-return'), - url(r'^switch_stripe_subscription/?', views.switch_stripe_subscription, name='switch-stripe-subscription'), - url(r'^switch_paypal_subscription/?', views.switch_paypal_subscription, name='switch-paypal-subscription'), - url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'), - url(r'^is_premium_archive/?', views.profile_is_premium_archive, name='profile-is-premium-archive'), + url(r"^get_preferences?/?", views.get_preference), + url(r"^set_preference/?", views.set_preference), + url(r"^set_account_settings/?", views.set_account_settings), + url(r"^get_view_setting/?", views.get_view_setting), + url(r"^set_view_setting/?", views.set_view_setting), + url(r"^clear_view_setting/?", views.clear_view_setting), + url(r"^set_collapsed_folders/?", views.set_collapsed_folders), + url(r"^paypal_form/?", views.paypal_form), + url(r"^paypal_return/?", views.paypal_return, name="paypal-return"), + url(r"^paypal_archive_return/?", views.paypal_archive_return, name="paypal-archive-return"), + url(r"^stripe_return/?", views.paypal_return, name="stripe-return"), + url( + r"^switch_stripe_subscription/?", views.switch_stripe_subscription, name="switch-stripe-subscription" + ), + url( + r"^switch_paypal_subscription/?", views.switch_paypal_subscription, name="switch-paypal-subscription" + ), + url(r"^is_premium/?", views.profile_is_premium, name="profile-is-premium"), + url(r"^is_premium_archive/?", views.profile_is_premium_archive, name="profile-is-premium-archive"), # url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'), - url(r'^paypal_ipn/?', views.paypal_ipn, name='paypal-ipn'), - url(r'^paypal_webhooks/?', views.paypal_webhooks, name='paypal-webhooks'), - url(r'^stripe_form/?', views.stripe_form, name='stripe-form'), - url(r'^stripe_checkout/?', views.stripe_checkout, name='stripe-checkout'), - url(r'^activities/?', views.load_activities, name='profile-activities'), - url(r'^payment_history/?', views.payment_history, name='profile-payment-history'), - url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'), - url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'), - url(r'^never_expire_premium/?', views.never_expire_premium, name='profile-never-expire-premium'), - url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'), - url(r'^save_ios_receipt/?', views.save_ios_receipt, name='save-ios-receipt'), - url(r'^save_android_receipt/?', views.save_android_receipt, name='save-android-receipt'), - url(r'^update_payment_history/?', views.update_payment_history, name='profile-update-payment-history'), - url(r'^delete_account/?', views.delete_account, name='profile-delete-account'), - url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'), - url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'), - url(r'^delete_starred_stories/?', views.delete_starred_stories, name='profile-delete-starred-stories'), - url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'), - url(r'^email_optout/?', views.email_optout, name='profile-email-optout'), - url(r'^ios_subscription_status/?', views.ios_subscription_status, name='profile-ios-subscription-status'), - url(r'debug/?', views.trigger_error, name='trigger-error'), + url(r"^paypal_ipn/?", views.paypal_ipn, name="paypal-ipn"), + url(r"^paypal_webhooks/?", views.paypal_webhooks, name="paypal-webhooks"), + url(r"^stripe_form/?", views.stripe_form, name="stripe-form"), + url(r"^stripe_checkout/?", views.stripe_checkout, name="stripe-checkout"), + url(r"^activities/?", views.load_activities, name="profile-activities"), + url(r"^payment_history/?", views.payment_history, name="profile-payment-history"), + url(r"^cancel_premium/?", views.cancel_premium, name="profile-cancel-premium"), + url(r"^refund_premium/?", views.refund_premium, name="profile-refund-premium"), + url(r"^never_expire_premium/?", views.never_expire_premium, name="profile-never-expire-premium"), + url(r"^upgrade_premium/?", views.upgrade_premium, name="profile-upgrade-premium"), + url(r"^save_ios_receipt/?", views.save_ios_receipt, name="save-ios-receipt"), + url(r"^save_android_receipt/?", views.save_android_receipt, name="save-android-receipt"), + url(r"^update_payment_history/?", views.update_payment_history, name="profile-update-payment-history"), + url(r"^delete_account/?", views.delete_account, name="profile-delete-account"), + url(r"^forgot_password_return/?", views.forgot_password_return, name="profile-forgot-password-return"), + url(r"^forgot_password/?", views.forgot_password, name="profile-forgot-password"), + url(r"^delete_starred_stories/?", views.delete_starred_stories, name="profile-delete-starred-stories"), + url(r"^delete_all_sites/?", views.delete_all_sites, name="profile-delete-all-sites"), + url(r"^email_optout/?", views.email_optout, name="profile-email-optout"), + url(r"^ios_subscription_status/?", views.ios_subscription_status, name="profile-ios-subscription-status"), + url(r"debug/?", views.trigger_error, name="trigger-error"), ] diff --git a/apps/profile/views.py b/apps/profile/views.py index 4700aa659..34faa2eb7 100644 --- a/apps/profile/views.py +++ b/apps/profile/views.py @@ -36,66 +36,84 @@ from vendor.paypalapi.exceptions import PayPalAPIResponseError from paypal.standard.forms import PayPalPaymentsForm from paypal.standard.ipn.views import ipn as paypal_standard_ipn -INTEGER_FIELD_PREFS = ('feed_pane_size', 'days_of_unread') -SINGLE_FIELD_PREFS = ('timezone','hide_mobile','send_emails', - 'hide_getting_started', 'has_setup_feeds', 'has_found_friends', - 'has_trained_intelligence') -SPECIAL_PREFERENCES = ('old_password', 'new_password', 'autofollow_friends', 'dashboard_date',) +INTEGER_FIELD_PREFS = ("feed_pane_size", "days_of_unread") +SINGLE_FIELD_PREFS = ( + "timezone", + "hide_mobile", + "send_emails", + "hide_getting_started", + "has_setup_feeds", + "has_found_friends", + "has_trained_intelligence", +) +SPECIAL_PREFERENCES = ( + "old_password", + "new_password", + "autofollow_friends", + "dashboard_date", +) + @ajax_login_required @require_POST @json.json_view def set_preference(request): code = 1 - message = '' + message = "" new_preferences = request.POST - + preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in list(new_preferences.items()): - if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False + if preference_value in ["true", "false"]: + preference_value = True if preference_value == "true" else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in INTEGER_FIELD_PREFS: - if preference_name == "days_of_unread" and int(preference_value) != request.user.profile.days_of_unread: + if ( + preference_name == "days_of_unread" + and int(preference_value) != request.user.profile.days_of_unread + ): UserSubscription.all_subs_needs_unread_recalc(request.user.pk) setattr(request.user.profile, preference_name, int(preference_value)) if preference_name in preferences: del preferences[preference_name] elif preference_name in SPECIAL_PREFERENCES: - if preference_name == 'autofollow_friends': + if preference_name == "autofollow_friends": social_services = MSocialServices.get_user(request.user.pk) social_services.autofollow = preference_value social_services.save() - elif preference_name == 'dashboard_date': + elif preference_name == "dashboard_date": request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value - if preference_name == 'intro_page': + if preference_name == "intro_page": logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) - + request.user.profile.preferences = json.encode(preferences) request.user.profile.save() - + logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response + @ajax_login_required @json.json_view def get_preference(request): code = 1 - preference_name = request.POST.get('preference') + preference_name = request.POST.get("preference") preferences = json.decode(request.user.profile.preferences) - + payload = preferences if preference_name: payload = preferences.get(preference_name) - + response = dict(code=code, payload=payload) return response + @csrf_protect def login(request): form = LoginForm() @@ -103,74 +121,80 @@ def login(request): if request.method == "POST": form = LoginForm(data=request.POST) if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") logging.user(form.get_user(), "~FG~BBOAuth Login~FW") - return HttpResponseRedirect(request.POST['next'] or reverse('index')) + return HttpResponseRedirect(request.POST["next"] or reverse("index")) + + return render( + request, + "accounts/login.html", + {"form": form, "next": request.POST.get("next", "") or request.GET.get("next", "")}, + ) + - return render(request, 'accounts/login.html', { - 'form': form, - 'next': request.POST.get('next', "") or request.GET.get('next', "") - }) - @csrf_exempt def signup(request): form = SignupForm(prefix="signup") - recaptcha = request.POST.get('g-recaptcha-response', None) + recaptcha = request.POST.get("g-recaptcha-response", None) recaptcha_error = None - + if settings.ENFORCE_SIGNUP_CAPTCHA: if not recaptcha: - recaptcha_error = "Please hit the \"I'm not a robot\" button." + recaptcha_error = 'Please hit the "I\'m not a robot" button.' else: - response = requests.post('https://www.google.com/recaptcha/api/siteverify', { - 'secret': settings.RECAPTCHA_SECRET_KEY, - 'response': recaptcha, - }) + response = requests.post( + "https://www.google.com/recaptcha/api/siteverify", + { + "secret": settings.RECAPTCHA_SECRET_KEY, + "response": recaptcha, + }, + ) result = response.json() - if not result['success']: - recaptcha_error = "Really, please hit the \"I'm not a robot\" button." + if not result["success"]: + recaptcha_error = 'Really, please hit the "I\'m not a robot" button.' if request.method == "POST": form = SignupForm(data=request.POST, prefix="signup") if form.is_valid() and not recaptcha_error: new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email) new_user.profile.activate_free() - return HttpResponseRedirect(request.POST['next'] or reverse('index')) + return HttpResponseRedirect(request.POST["next"] or reverse("index")) + + return render( + request, + "accounts/signup.html", + {"form": form, "recaptcha_error": recaptcha_error, "next": request.POST.get("next", "")}, + ) - return render(request, 'accounts/signup.html', { - 'form': form, - 'recaptcha_error': recaptcha_error, - 'next': request.POST.get('next', "") - }) @login_required @csrf_protect def redeem_code(request): - code = request.GET.get('code', None) - form = RedeemCodeForm(initial={'gift_code': code}) + code = request.GET.get("code", None) + form = RedeemCodeForm(initial={"gift_code": code}) if request.method == "POST": form = RedeemCodeForm(data=request.POST) if form.is_valid(): - gift_code = request.POST['gift_code'] + gift_code = request.POST["gift_code"] MRedeemedCode.redeem(user=request.user, gift_code=gift_code) - return render(request, 'reader/paypal_return.xhtml') + return render(request, "reader/paypal_return.xhtml") + + return render( + request, + "accounts/redeem_code.html", + {"form": form, "code": request.POST.get("code", ""), "next": request.POST.get("next", "")}, + ) - return render(request, 'accounts/redeem_code.html', { - 'form': form, - 'code': request.POST.get('code', ""), - 'next': request.POST.get('next', "") - }) - @ajax_login_required @require_POST @json.json_view def set_account_settings(request): code = -1 - message = 'OK' + message = "OK" form = AccountSettingsForm(user=request.user, data=request.POST) if form.is_valid(): @@ -178,100 +202,113 @@ def set_account_settings(request): code = 1 else: message = form.errors[list(form.errors.keys())[0]][0] - + payload = { "username": request.user.username, "email": request.user.email, - "social_profile": MSocialProfile.profile(request.user.pk) + "social_profile": MSocialProfile.profile(request.user.pk), } return dict(code=code, message=message, payload=payload) - + + @ajax_login_required @require_POST @json.json_view def set_view_setting(request): code = 1 - feed_id = request.POST['feed_id'] - feed_view_setting = request.POST.get('feed_view_setting') - feed_order_setting = request.POST.get('feed_order_setting') - feed_read_filter_setting = request.POST.get('feed_read_filter_setting') - feed_layout_setting = request.POST.get('feed_layout_setting') - feed_dashboard_count_setting = request.POST.get('feed_dashboard_count_setting') + feed_id = request.POST["feed_id"] + feed_view_setting = request.POST.get("feed_view_setting") + feed_order_setting = request.POST.get("feed_order_setting") + feed_read_filter_setting = request.POST.get("feed_read_filter_setting") + feed_layout_setting = request.POST.get("feed_layout_setting") + feed_dashboard_count_setting = request.POST.get("feed_dashboard_count_setting") view_settings = json.decode(request.user.profile.view_settings) - + setting = view_settings.get(feed_id, {}) - if isinstance(setting, str): setting = {'v': setting} - if feed_view_setting: setting['v'] = feed_view_setting - if feed_order_setting: setting['o'] = feed_order_setting - if feed_read_filter_setting: setting['r'] = feed_read_filter_setting - if feed_dashboard_count_setting: setting['d'] = feed_dashboard_count_setting - if feed_layout_setting: setting['l'] = feed_layout_setting - + if isinstance(setting, str): + setting = {"v": setting} + if feed_view_setting: + setting["v"] = feed_view_setting + if feed_order_setting: + setting["o"] = feed_order_setting + if feed_read_filter_setting: + setting["r"] = feed_read_filter_setting + if feed_dashboard_count_setting: + setting["d"] = feed_dashboard_count_setting + if feed_layout_setting: + setting["l"] = feed_layout_setting + view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() - - logging.user(request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, - feed_order_setting, feed_read_filter_setting, feed_layout_setting)) + + logging.user( + request, + "~FMView settings: %s/%s/%s/%s" + % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting), + ) response = dict(code=code) return response + @ajax_login_required @require_POST @json.json_view def clear_view_setting(request): code = 1 - view_setting_type = request.POST.get('view_setting_type') + view_setting_type = request.POST.get("view_setting_type") view_settings = json.decode(request.user.profile.view_settings) new_view_settings = {} removed = 0 for feed_id, view_setting in list(view_settings.items()): - if view_setting_type == 'layout' and 'l' in view_setting: - del view_setting['l'] + if view_setting_type == "layout" and "l" in view_setting: + del view_setting["l"] removed += 1 - if view_setting_type == 'view' and 'v' in view_setting: - del view_setting['v'] + if view_setting_type == "view" and "v" in view_setting: + del view_setting["v"] removed += 1 - if view_setting_type == 'order' and 'o' in view_setting: - del view_setting['o'] + if view_setting_type == "order" and "o" in view_setting: + del view_setting["o"] removed += 1 - if view_setting_type == 'order' and 'r' in view_setting: - del view_setting['r'] + if view_setting_type == "order" and "r" in view_setting: + del view_setting["r"] removed += 1 new_view_settings[feed_id] = view_setting request.user.profile.view_settings = json.encode(new_view_settings) request.user.profile.save() - + logging.user(request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed)) response = dict(code=code, view_settings=view_settings, removed=removed) return response - + + @ajax_login_required @json.json_view def get_view_setting(request): code = 1 - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] view_settings = json.decode(request.user.profile.view_settings) - + response = dict(code=code, payload=view_settings.get(feed_id)) return response - + @ajax_login_required @require_POST @json.json_view def set_collapsed_folders(request): code = 1 - collapsed_folders = request.POST['collapsed_folders'] - + collapsed_folders = request.POST["collapsed_folders"] + request.user.profile.collapsed_folders = collapsed_folders request.user.profile.save() - + logging.user(request, "~FMCollapsing folder: %s" % collapsed_folders) response = dict(code=code) return response + def paypal_ipn(request): try: return paypal_standard_ipn(request) @@ -279,23 +316,24 @@ def paypal_ipn(request): # Paypal may have sent webhooks to ipn, so redirect logging.user(request, f" ---> Paypal IPN to webhooks redirect: {request.body}") return paypal_webhooks(request) - + + def paypal_webhooks(request): try: data = json.decode(request.body) except python_json.decoder.JSONDecodeError: # Kick it over to paypal ipn return paypal_standard_ipn(request) - + logging.user(request, f" ---> Paypal webhooks {data.get('event_type', '')} data: {data}") - - if data['event_type'] == "BILLING.SUBSCRIPTION.CREATED": + + if data["event_type"] == "BILLING.SUBSCRIPTION.CREATED": # Don't start a subscription but save it in case the payment comes before the subscription activation - user = User.objects.get(pk=int(data['resource']['custom_id'])) - user.profile.store_paypal_sub_id(data['resource']['id'], skip_save_primary=True) - elif data['event_type'] in ["BILLING.SUBSCRIPTION.ACTIVATED", "BILLING.SUBSCRIPTION.UPDATED"]: - user = User.objects.get(pk=int(data['resource']['custom_id'])) - user.profile.store_paypal_sub_id(data['resource']['id']) + user = User.objects.get(pk=int(data["resource"]["custom_id"])) + user.profile.store_paypal_sub_id(data["resource"]["id"], skip_save_primary=True) + elif data["event_type"] in ["BILLING.SUBSCRIPTION.ACTIVATED", "BILLING.SUBSCRIPTION.UPDATED"]: + user = User.objects.get(pk=int(data["resource"]["custom_id"])) + user.profile.store_paypal_sub_id(data["resource"]["id"]) # plan_id = data['resource']['plan_id'] # if plan_id == Profile.plan_to_paypal_plan_id('premium'): # user.profile.activate_premium() @@ -305,43 +343,44 @@ def paypal_webhooks(request): # user.profile.activate_pro() user.profile.cancel_premium_stripe() user.profile.setup_premium_history() - if data['event_type'] == "BILLING.SUBSCRIPTION.ACTIVATED": + if data["event_type"] == "BILLING.SUBSCRIPTION.ACTIVATED": user.profile.cancel_and_prorate_existing_paypal_subscriptions(data) - elif data['event_type'] == "PAYMENT.SALE.COMPLETED": - user = User.objects.get(pk=int(data['resource']['custom'])) + elif data["event_type"] == "PAYMENT.SALE.COMPLETED": + user = User.objects.get(pk=int(data["resource"]["custom"])) user.profile.setup_premium_history() - elif data['event_type'] == "PAYMENT.CAPTURE.REFUNDED": - user = User.objects.get(pk=int(data['resource']['custom_id'])) + elif data["event_type"] == "PAYMENT.CAPTURE.REFUNDED": + user = User.objects.get(pk=int(data["resource"]["custom_id"])) user.profile.setup_premium_history() - elif data['event_type'] in ["BILLING.SUBSCRIPTION.CANCELLED", "BILLING.SUBSCRIPTION.SUSPENDED"]: - custom_id = data['resource'].get('custom_id', None) + elif data["event_type"] in ["BILLING.SUBSCRIPTION.CANCELLED", "BILLING.SUBSCRIPTION.SUSPENDED"]: + custom_id = data["resource"].get("custom_id", None) if custom_id: user = User.objects.get(pk=int(custom_id)) else: - paypal_id = PaypalIds.objects.get(paypal_sub_id=data['resource']['id']) + paypal_id = PaypalIds.objects.get(paypal_sub_id=data["resource"]["id"]) user = paypal_id.user user.profile.setup_premium_history() return HttpResponse("OK") + def paypal_form(request): domain = Site.objects.get_current().domain if settings.DEBUG: domain = "73ee-71-233-245-159.ngrok.io" - + paypal_dict = { "cmd": "_xclick-subscriptions", "business": "samuel@ofbrooklyn.com", - "a3": "12.00", # price - "p3": 1, # duration of each unit (depends on unit) - "t3": "Y", # duration unit ("M for Month") - "src": "1", # make payments recur - "sra": "1", # reattempt payment on payment error - "no_note": "1", # remove extra notes (optional) + "a3": "12.00", # price + "p3": 1, # duration of each unit (depends on unit) + "t3": "Y", # duration unit ("M for Month") + "src": "1", # make payments recur + "sra": "1", # reattempt payment on payment error + "no_note": "1", # remove extra notes (optional) "item_name": "NewsBlur Premium Account", - "notify_url": "https://%s%s" % (domain, reverse('paypal-ipn')), - "return_url": "https://%s%s" % (domain, reverse('paypal-return')), - "cancel_return": "https://%s%s" % (domain, reverse('index')), + "notify_url": "https://%s%s" % (domain, reverse("paypal-ipn")), + "return_url": "https://%s%s" % (domain, reverse("paypal-return")), + "cancel_return": "https://%s%s" % (domain, reverse("index")), "custom": request.user.username, } @@ -351,303 +390,360 @@ def paypal_form(request): logging.user(request, "~FBLoading paypal/feedchooser") # Output the button. - return HttpResponse(form.render(), content_type='text/html') + return HttpResponse(form.render(), content_type="text/html") + @login_required def paypal_return(request): + return render( + request, + "reader/paypal_return.xhtml", + { + "user_profile": request.user.profile, + }, + ) - return render(request, 'reader/paypal_return.xhtml', { - 'user_profile': request.user.profile, - }) @login_required def paypal_archive_return(request): + return render( + request, + "reader/paypal_archive_return.xhtml", + { + "user_profile": request.user.profile, + }, + ) - return render(request, 'reader/paypal_archive_return.xhtml', { - 'user_profile': request.user.profile, - }) @login_required def activate_premium(request): - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + + @ajax_login_required @json.json_view def profile_is_premium(request): # Check tries code = 0 - retries = int(request.GET['retries']) - + retries = int(request.GET["retries"]) + subs = UserSubscription.objects.filter(user=request.user) total_subs = subs.count() activated_subs = subs.filter(active=True).count() - + if retries >= 30: code = -1 if not request.user.profile.is_premium: subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) - message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) + message = """User: %s (%s) -- Email: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + ) mail_admins(subject, message) request.user.profile.activate_premium() - + profile = Profile.objects.get(user=request.user) return { - 'is_premium': profile.is_premium, - 'is_premium_archive': profile.is_archive, - 'code': code, - 'activated_subs': activated_subs, - 'total_subs': total_subs, + "is_premium": profile.is_premium, + "is_premium_archive": profile.is_archive, + "code": code, + "activated_subs": activated_subs, + "total_subs": total_subs, } + @ajax_login_required @json.json_view def profile_is_premium_archive(request): # Check tries code = 0 - retries = int(request.GET['retries']) + retries = int(request.GET["retries"]) subs = UserSubscription.objects.filter(user=request.user) total_subs = subs.count() activated_subs = subs.filter(feed__archive_subscribers__gte=1).count() - + if retries >= 30: code = -1 if not request.user.profile.is_premium_archive: - subject = "Premium archive activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) - message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) + subject = "Premium archive activation failed: %s (%s/%s)" % ( + request.user, + activated_subs, + total_subs, + ) + message = """User: %s (%s) -- Email: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + ) mail_admins(subject, message) request.user.profile.activate_archive() profile = Profile.objects.get(user=request.user) return { - 'is_premium': profile.is_premium, - 'is_premium_archive': profile.is_archive, - 'code': code, - 'activated_subs': activated_subs, - 'total_subs': total_subs, + "is_premium": profile.is_premium, + "is_premium_archive": profile.is_archive, + "code": code, + "activated_subs": activated_subs, + "total_subs": total_subs, } + @ajax_login_required @json.json_view def save_ios_receipt(request): - receipt = request.POST.get('receipt') - product_identifier = request.POST.get('product_identifier') - transaction_identifier = request.POST.get('transaction_identifier') - + receipt = request.POST.get("receipt") + product_identifier = request.POST.get("product_identifier") + transaction_identifier = request.POST.get("transaction_identifier") + logging.user(request, "~BM~FBSaving iOS Receipt: %s %s" % (product_identifier, transaction_identifier)) - + paid = request.user.profile.activate_ios_premium(transaction_identifier) if paid: - logging.user(request, "~BM~FBSending iOS Receipt email: %s %s" % (product_identifier, transaction_identifier)) + logging.user( + request, "~BM~FBSending iOS Receipt email: %s %s" % (product_identifier, transaction_identifier) + ) subject = "iOS Premium: %s (%s)" % (request.user.profile, product_identifier) - message = """User: %s (%s) -- Email: %s, product: %s, txn: %s, receipt: %s""" % (request.user.username, request.user.pk, request.user.email, product_identifier, transaction_identifier, receipt) + message = """User: %s (%s) -- Email: %s, product: %s, txn: %s, receipt: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + product_identifier, + transaction_identifier, + receipt, + ) mail_admins(subject, message) else: - logging.user(request, "~BM~FBNot sending iOS Receipt email, already paid: %s %s" % (product_identifier, transaction_identifier)) - - + logging.user( + request, + "~BM~FBNot sending iOS Receipt email, already paid: %s %s" + % (product_identifier, transaction_identifier), + ) + return request.user.profile - + + @ajax_login_required @json.json_view def save_android_receipt(request): - order_id = request.POST.get('order_id') - product_id = request.POST.get('product_id') - + order_id = request.POST.get("order_id") + product_id = request.POST.get("product_id") + logging.user(request, "~BM~FBSaving Android Receipt: %s %s" % (product_id, order_id)) - + paid = request.user.profile.activate_android_premium(order_id) if paid: logging.user(request, "~BM~FBSending Android Receipt email: %s %s" % (product_id, order_id)) subject = "Android Premium: %s (%s)" % (request.user.profile, product_id) - message = """User: %s (%s) -- Email: %s, product: %s, order: %s""" % (request.user.username, request.user.pk, request.user.email, product_id, order_id) + message = """User: %s (%s) -- Email: %s, product: %s, order: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + product_id, + order_id, + ) mail_admins(subject, message) else: - logging.user(request, "~BM~FBNot sending Android Receipt email, already paid: %s %s" % (product_id, order_id)) - - + logging.user( + request, "~BM~FBNot sending Android Receipt email, already paid: %s %s" % (product_id, order_id) + ) + return request.user.profile - + + @login_required def stripe_form(request): user = request.user success_updating = False stripe.api_key = settings.STRIPE_SECRET plan = PLANS[0][0] - renew = is_true(request.GET.get('renew', False)) + renew = is_true(request.GET.get("renew", False)) error = None - - if request.method == 'POST': + + if request.method == "POST": zebra_form = StripePlusPaymentForm(request.POST, email=user.email) if zebra_form.is_valid(): - user.email = zebra_form.cleaned_data['email'] + user.email = zebra_form.cleaned_data["email"] user.save() customer = None - current_premium = (user.profile.is_premium and - user.profile.premium_expire and - user.profile.premium_expire > datetime.datetime.now()) - + current_premium = ( + user.profile.is_premium + and user.profile.premium_expire + and user.profile.premium_expire > datetime.datetime.now() + ) + # Are they changing their existing card? if user.profile.stripe_id: customer = stripe.Customer.retrieve(user.profile.stripe_id) try: - card = customer.sources.create(source=zebra_form.cleaned_data['stripe_token']) + card = customer.sources.create(source=zebra_form.cleaned_data["stripe_token"]) except stripe.error.CardError: error = "This card was declined." else: customer.default_card = card.id customer.save() - user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] + user.profile.strip_4_digits = zebra_form.cleaned_data["last_4_digits"] user.profile.save() - user.profile.activate_premium() # TODO: Remove, because webhooks are slow + user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True else: try: - customer = stripe.Customer.create(**{ - 'source': zebra_form.cleaned_data['stripe_token'], - 'plan': zebra_form.cleaned_data['plan'], - 'email': user.email, - 'description': user.username, - }) + customer = stripe.Customer.create( + **{ + "source": zebra_form.cleaned_data["stripe_token"], + "plan": zebra_form.cleaned_data["plan"], + "email": user.email, + "description": user.username, + } + ) except stripe.error.CardError: error = "This card was declined." else: - user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] + user.profile.strip_4_digits = zebra_form.cleaned_data["last_4_digits"] user.profile.stripe_id = customer.id user.profile.save() - user.profile.activate_premium() # TODO: Remove, because webhooks are slow + user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True - + # Check subscription to ensure latest plan, otherwise cancel it and subscribe if success_updating and customer and customer.subscriptions.total_count == 1: subscription = customer.subscriptions.data[0] - if subscription['plan']['id'] != "newsblur-premium-36": + if subscription["plan"]["id"] != "newsblur-premium-36": for sub in customer.subscriptions: sub.delete() customer = stripe.Customer.retrieve(user.profile.stripe_id) - + if success_updating and customer and customer.subscriptions.total_count == 0: params = dict( - customer=customer.id, - items=[ - { - "plan": "newsblur-premium-36", - }, - ]) + customer=customer.id, + items=[ + { + "plan": "newsblur-premium-36", + }, + ], + ) premium_expire = user.profile.premium_expire if current_premium and premium_expire: if premium_expire < (datetime.datetime.now() + datetime.timedelta(days=365)): - params['billing_cycle_anchor'] = premium_expire.strftime('%s') - params['trial_end'] = premium_expire.strftime('%s') + params["billing_cycle_anchor"] = premium_expire.strftime("%s") + params["trial_end"] = premium_expire.strftime("%s") stripe.Subscription.create(**params) else: zebra_form = StripePlusPaymentForm(email=user.email, plan=plan) - + if success_updating: - return render(request, 'reader/paypal_return.xhtml') - + return render(request, "reader/paypal_return.xhtml") + new_user_queue_count = RNewUserQueue.user_count() new_user_queue_position = RNewUserQueue.user_position(request.user.pk) new_user_queue_behind = 0 if new_user_queue_position >= 0: - new_user_queue_behind = new_user_queue_count - new_user_queue_position + new_user_queue_behind = new_user_queue_count - new_user_queue_position new_user_queue_position -= 1 - + immediate_charge = True if user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now(): immediate_charge = False - + logging.user(request, "~BM~FBLoading Stripe form") - return render(request, 'profile/stripe_form.xhtml', + return render( + request, + "profile/stripe_form.xhtml", { - 'zebra_form': zebra_form, - 'publishable': settings.STRIPE_PUBLISHABLE, - 'success_updating': success_updating, - 'new_user_queue_count': new_user_queue_count - 1, - 'new_user_queue_position': new_user_queue_position, - 'new_user_queue_behind': new_user_queue_behind, - 'renew': renew, - 'immediate_charge': immediate_charge, - 'error': error, - } + "zebra_form": zebra_form, + "publishable": settings.STRIPE_PUBLISHABLE, + "success_updating": success_updating, + "new_user_queue_count": new_user_queue_count - 1, + "new_user_queue_position": new_user_queue_position, + "new_user_queue_behind": new_user_queue_behind, + "renew": renew, + "immediate_charge": immediate_charge, + "error": error, + }, ) + @login_required def switch_stripe_subscription(request): - plan = request.POST['plan'] + plan = request.POST["plan"] if plan == "change_stripe": return stripe_checkout(request) elif plan == "change_paypal": paypal_url = request.user.profile.paypal_change_billing_details_url() return HttpResponseRedirect(paypal_url) - + switch_successful = request.user.profile.switch_stripe_subscription(plan) - - logging.user(request, "~FCSwitching subscription to ~SB%s~SN~FC (%s)" %( - plan, - '~FGsucceeded~FC' if switch_successful else '~FRfailed~FC' - )) - + + logging.user( + request, + "~FCSwitching subscription to ~SB%s~SN~FC (%s)" + % (plan, "~FGsucceeded~FC" if switch_successful else "~FRfailed~FC"), + ) + if switch_successful: - return HttpResponseRedirect(reverse('stripe-return')) - + return HttpResponseRedirect(reverse("stripe-return")) + return stripe_checkout(request) + def switch_paypal_subscription(request): - plan = request.POST['plan'] + plan = request.POST["plan"] if plan == "change_stripe": return stripe_checkout(request) elif plan == "change_paypal": paypal_url = request.user.profile.paypal_change_billing_details_url() return HttpResponseRedirect(paypal_url) - + approve_url = request.user.profile.switch_paypal_subscription_approval_url(plan) - - logging.user(request, "~FCSwitching subscription to ~SB%s~SN~FC (%s)" %( - plan, - '~FGsucceeded~FC' if approve_url else '~FRfailed~FC' - )) - + + logging.user( + request, + "~FCSwitching subscription to ~SB%s~SN~FC (%s)" + % (plan, "~FGsucceeded~FC" if approve_url else "~FRfailed~FC"), + ) + if approve_url: return HttpResponseRedirect(approve_url) - paypal_return = reverse('paypal-return') + paypal_return = reverse("paypal-return") if plan == "archive": - paypal_return = reverse('paypal-archive-return') + paypal_return = reverse("paypal-archive-return") return HttpResponseRedirect(paypal_return) + @login_required def stripe_checkout(request): stripe.api_key = settings.STRIPE_SECRET domain = Site.objects.get_current().domain - plan = request.POST['plan'] - + plan = request.POST["plan"] + if plan == "change_stripe": checkout_session = stripe.billing_portal.Session.create( customer=request.user.profile.stripe_id, - return_url="http://%s%s?next=payments" % (domain, reverse('index')), + return_url="http://%s%s?next=payments" % (domain, reverse("index")), ) return HttpResponseRedirect(checkout_session.url, status=303) - + price = Profile.plan_to_stripe_price(plan) - + session_dict = { "line_items": [ { - 'price': price, - 'quantity': 1, + "price": price, + "quantity": 1, }, ], - "mode": 'subscription', + "mode": "subscription", "metadata": {"newsblur_user_id": request.user.pk}, - "success_url": "http://%s%s" % (domain, reverse('stripe-return')), - "cancel_url": "http://%s%s" % (domain, reverse('index')), + "success_url": "http://%s%s" % (domain, reverse("stripe-return")), + "cancel_url": "http://%s%s" % (domain, reverse("index")), } if request.user.profile.stripe_id: - session_dict['customer'] = request.user.profile.stripe_id + session_dict["customer"] = request.user.profile.stripe_id else: session_dict["customer_email"] = request.user.email @@ -657,25 +753,27 @@ def stripe_checkout(request): return HttpResponseRedirect(checkout_session.url, status=303) -@render_to('reader/activities_module.xhtml') + +@render_to("reader/activities_module.xhtml") def load_activities(request): user = get_user(request) - page = max(1, int(request.GET.get('page', 1))) + page = max(1, int(request.GET.get("page", 1))) activities, has_next_page = MActivity.user(user.pk, page=page) return { - 'activities': activities, - 'page': page, - 'has_next_page': has_next_page, - 'username': 'You', + "activities": activities, + "page": page, + "has_next_page": has_next_page, + "username": "You", } + @ajax_login_required @json.json_view def payment_history(request): user = request.user if request.user.is_staff: - user_id = request.GET.get('user_id', request.user.pk) + user_id = request.GET.get("user_id", request.user.pk) user = User.objects.get(pk=user_id) history = PaymentHistory.objects.filter(user=user) @@ -690,19 +788,19 @@ def payment_history(request): "feeds": UserSubscription.objects.filter(user=user).count(), "email": user.email, "read_story_count": RUserStory.read_story_count(user.pk), - "feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'], + "feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum("feed_opens"))["sum"], "training": { - 'title_ps': MClassifierTitle.objects.filter(user_id=user.pk, score__gt=0).count(), - 'title_ng': MClassifierTitle.objects.filter(user_id=user.pk, score__lt=0).count(), - 'tag_ps': MClassifierTag.objects.filter(user_id=user.pk, score__gt=0).count(), - 'tag_ng': MClassifierTag.objects.filter(user_id=user.pk, score__lt=0).count(), - 'author_ps': MClassifierAuthor.objects.filter(user_id=user.pk, score__gt=0).count(), - 'author_ng': MClassifierAuthor.objects.filter(user_id=user.pk, score__lt=0).count(), - 'feed_ps': MClassifierFeed.objects.filter(user_id=user.pk, score__gt=0).count(), - 'feed_ng': MClassifierFeed.objects.filter(user_id=user.pk, score__lt=0).count(), - } + "title_ps": MClassifierTitle.objects.filter(user_id=user.pk, score__gt=0).count(), + "title_ng": MClassifierTitle.objects.filter(user_id=user.pk, score__lt=0).count(), + "tag_ps": MClassifierTag.objects.filter(user_id=user.pk, score__gt=0).count(), + "tag_ng": MClassifierTag.objects.filter(user_id=user.pk, score__lt=0).count(), + "author_ps": MClassifierAuthor.objects.filter(user_id=user.pk, score__gt=0).count(), + "author_ng": MClassifierAuthor.objects.filter(user_id=user.pk, score__lt=0).count(), + "feed_ps": MClassifierFeed.objects.filter(user_id=user.pk, score__gt=0).count(), + "feed_ng": MClassifierFeed.objects.filter(user_id=user.pk, score__lt=0).count(), + }, } - + next_invoice = None stripe_customer = user.profile.stripe_customer() paypal_api = user.profile.paypal_api() @@ -710,48 +808,54 @@ def payment_history(request): try: invoice = stripe.Invoice.upcoming(customer=stripe_customer.id) for lines in invoice.lines.data: - next_invoice = dict(payment_date=datetime.datetime.fromtimestamp(lines.period.start), - payment_amount=invoice.amount_due/100.0, - payment_provider="(scheduled)", - scheduled=True) + next_invoice = dict( + payment_date=datetime.datetime.fromtimestamp(lines.period.start), + payment_amount=invoice.amount_due / 100.0, + payment_provider="(scheduled)", + scheduled=True, + ) break except stripe.error.InvalidRequestError: pass - + if paypal_api and not next_invoice and user.profile.premium_renewal and len(history): - next_invoice = dict(payment_date=history[0].payment_date+dateutil.relativedelta.relativedelta(years=1), - payment_amount=history[0].payment_amount, - payment_provider="(scheduled)", - scheduled=True) - + next_invoice = dict( + payment_date=history[0].payment_date + dateutil.relativedelta.relativedelta(years=1), + payment_amount=history[0].payment_amount, + payment_provider="(scheduled)", + scheduled=True, + ) + return { - 'is_premium': user.profile.is_premium, - 'is_archive': user.profile.is_archive, - 'is_pro': user.profile.is_pro, - 'premium_expire': user.profile.premium_expire, - 'premium_renewal': user.profile.premium_renewal, - 'active_provider': user.profile.active_provider, - 'payments': history, - 'statistics': statistics, - 'next_invoice': next_invoice, + "is_premium": user.profile.is_premium, + "is_archive": user.profile.is_archive, + "is_pro": user.profile.is_pro, + "premium_expire": user.profile.premium_expire, + "premium_renewal": user.profile.premium_renewal, + "active_provider": user.profile.active_provider, + "payments": history, + "statistics": statistics, + "next_invoice": next_invoice, } + @ajax_login_required @json.json_view def cancel_premium(request): canceled = request.user.profile.cancel_premium() - + return { - 'code': 1 if canceled else -1, + "code": 1 if canceled else -1, } + @staff_member_required @ajax_login_required @json.json_view def refund_premium(request): - user_id = request.POST.get('user_id') - partial = request.POST.get('partial', False) - provider = request.POST.get('provider', None) + user_id = request.POST.get("user_id") + partial = request.POST.get("partial", False) + provider = request.POST.get("provider", None) user = User.objects.get(pk=user_id) try: refunded = user.profile.refund_premium(partial=partial, provider=provider) @@ -760,179 +864,185 @@ def refund_premium(request): except PayPalAPIResponseError as e: refunded = e - return {'code': 1 if type(refunded) == int else -1, 'refunded': refunded} + return {"code": 1 if type(refunded) == int else -1, "refunded": refunded} + @staff_member_required @ajax_login_required @json.json_view def upgrade_premium(request): - user_id = request.POST.get('user_id') + user_id = request.POST.get("user_id") user = User.objects.get(pk=user_id) - - gift = MGiftCode.add(gifting_user_id=User.objects.get(username='samuel').pk, - receiving_user_id=user.pk) + + gift = MGiftCode.add(gifting_user_id=User.objects.get(username="samuel").pk, receiving_user_id=user.pk) MRedeemedCode.redeem(user, gift.gift_code) - - return {'code': user.profile.is_premium} + + return {"code": user.profile.is_premium} + @staff_member_required @ajax_login_required @json.json_view def never_expire_premium(request): - user_id = request.POST.get('user_id') - years = int(request.POST.get('years', 0)) + user_id = request.POST.get("user_id") + years = int(request.POST.get("years", 0)) user = User.objects.get(pk=user_id) if user.profile.is_premium: if years: - user.profile.premium_expire = datetime.datetime.now() + datetime.timedelta(days=365*years) + user.profile.premium_expire = datetime.datetime.now() + datetime.timedelta(days=365 * years) else: user.profile.premium_expire = None user.profile.save() - return {'code': 1} - - return {'code': -1} + return {"code": 1} + + return {"code": -1} + @staff_member_required @ajax_login_required @json.json_view def update_payment_history(request): - user_id = request.POST.get('user_id') + user_id = request.POST.get("user_id") user = User.objects.get(pk=user_id) user.profile.setup_premium_history(set_premium_expire=False) - - return {'code': 1} - + + return {"code": 1} + + @login_required -@render_to('profile/delete_account.xhtml') +@render_to("profile/delete_account.xhtml") def delete_account(request): - if request.method == 'POST': + if request.method == "POST": form = DeleteAccountForm(request.POST, user=request.user) if form.is_valid(): - logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % - request.user.username) + logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % request.user.username) request.user.profile.delete_user(confirm=True) logout_user(request) - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % - request.user.username) + logging.user( + request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % request.user.username + ) else: - logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % - request.user.username) + logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % request.user.username) form = DeleteAccountForm(user=request.user) return { - 'delete_form': form, + "delete_form": form, } - -@render_to('profile/forgot_password.xhtml') + +@render_to("profile/forgot_password.xhtml") def forgot_password(request): - if request.method == 'POST': + if request.method == "POST": form = ForgotPasswordForm(request.POST) if form.is_valid(): - logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST['email']) + logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST["email"]) try: - user = User.objects.get(email__iexact=request.POST['email']) + user = User.objects.get(email__iexact=request.POST["email"]) except User.MultipleObjectsReturned: - user = User.objects.filter(email__iexact=request.POST['email'])[0] + user = User.objects.filter(email__iexact=request.POST["email"])[0] user.profile.send_forgot_password_email() - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" % - request.POST['email']) + logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" % request.POST["email"]) else: logging.user(request.user, "~BC~FRAttempting to retrieve forgotton password.") form = ForgotPasswordForm() return { - 'forgot_password_form': form, + "forgot_password_form": form, } - + + @login_required -@render_to('profile/forgot_password_return.xhtml') +@render_to("profile/forgot_password_return.xhtml") def forgot_password_return(request): - if request.method == 'POST': - logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." % - request.user.username) - new_password = request.POST.get('password', '') + if request.method == "POST": + logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." % request.user.username) + new_password = request.POST.get("password", "") request.user.set_password(new_password) request.user.save() - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." % - request.user.username) + logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." % request.user.username) form = ForgotPasswordReturnForm() return { - 'forgot_password_return_form': form, + "forgot_password_return_form": form, } + @ajax_login_required @json.json_view def delete_starred_stories(request): - timestamp = request.POST.get('timestamp', None) + timestamp = request.POST.get("timestamp", None) if timestamp: delete_date = datetime.datetime.fromtimestamp(int(timestamp)) else: delete_date = datetime.datetime.now() - starred_stories = MStarredStory.objects.filter(user_id=request.user.pk, - starred_date__lte=delete_date) + starred_stories = MStarredStory.objects.filter(user_id=request.user.pk, starred_date__lte=delete_date) stories_deleted = starred_stories.count() starred_stories.delete() MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) - - logging.user(request.user, "~BC~FRDeleting %s/%s starred stories (%s)" % (stories_deleted, - stories_deleted+starred_count, delete_date)) - return dict(code=1, stories_deleted=stories_deleted, starred_counts=starred_counts, - starred_count=starred_count) + logging.user( + request.user, + "~BC~FRDeleting %s/%s starred stories (%s)" + % (stories_deleted, stories_deleted + starred_count, delete_date), + ) + + return dict( + code=1, stories_deleted=stories_deleted, starred_counts=starred_counts, starred_count=starred_count + ) @ajax_login_required @json.json_view def delete_all_sites(request): - request.user.profile.send_opml_export_email(reason="You have deleted all of your sites, so here's a backup of all of your subscriptions just in case.") - + request.user.profile.send_opml_export_email( + reason="You have deleted all of your sites, so here's a backup of all of your subscriptions just in case." + ) + subs = UserSubscription.objects.filter(user=request.user) sub_count = subs.count() subs.delete() - + usf = UserSubscriptionFolders.objects.get(user=request.user) - usf.folders = '[]' + usf.folders = "[]" usf.save() - + logging.user(request.user, "~BC~FRDeleting %s sites" % sub_count) return dict(code=1) @login_required -@render_to('profile/email_optout.xhtml') +@render_to("profile/email_optout.xhtml") def email_optout(request): user = request.user user.profile.send_emails = False user.profile.save() - + return { "user": user, } + @json.json_view def ios_subscription_status(request): logging.debug(" ---> iOS Subscription Status: %s" % request.body) data = json.decode(request.body) - subject = "iOS Subscription Status: %s" % data.get('notification_type', "[missing]") + subject = "iOS Subscription Status: %s" % data.get("notification_type", "[missing]") message = """%s""" % (request.body) mail_admins(subject, message) - - return { - "code": 1 - } + + return {"code": 1} + def trigger_error(request): logging.user(request.user, "~BR~FW~SBTriggering divison by zero") division_by_zero = 1 / 0 - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) diff --git a/apps/push/migrations/0001_initial.py b/apps/push/migrations/0001_initial.py index 4792b60bc..9baa20c60 100644 --- a/apps/push/migrations/0001_initial.py +++ b/apps/push/migrations/0001_initial.py @@ -6,24 +6,31 @@ import django.db.models.deletion class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), ] operations = [ migrations.CreateModel( - name='PushSubscription', + name="PushSubscription", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('hub', models.URLField(db_index=True)), - ('topic', models.URLField(db_index=True)), - ('verified', models.BooleanField(default=False)), - ('verify_token', models.CharField(max_length=60)), - ('lease_expires', models.DateTimeField(default=datetime.datetime.now)), - ('feed', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='push', to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("hub", models.URLField(db_index=True)), + ("topic", models.URLField(db_index=True)), + ("verified", models.BooleanField(default=False)), + ("verify_token", models.CharField(max_length=60)), + ("lease_expires", models.DateTimeField(default=datetime.datetime.now)), + ( + "feed", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="push", to="rss_feeds.Feed" + ), + ), ], ), ] diff --git a/apps/push/models.py b/apps/push/models.py index d7b4c1c31..a128db5d1 100644 --- a/apps/push/models.py +++ b/apps/push/models.py @@ -15,22 +15,20 @@ from apps.rss_feeds.models import Feed from utils import log as logging from utils.feed_functions import timelimit, TimeoutError -DEFAULT_LEASE_SECONDS = (10 * 24 * 60 * 60) # 10 days +DEFAULT_LEASE_SECONDS = 10 * 24 * 60 * 60 # 10 days + class PushSubscriptionManager(models.Manager): - @timelimit(5) - def subscribe(self, topic, feed, hub=None, callback=None, - lease_seconds=None, force_retry=False): + def subscribe(self, topic, feed, hub=None, callback=None, lease_seconds=None, force_retry=False): if hub is None: hub = self._get_hub(topic) if hub is None: - raise TypeError('hub cannot be None if the feed does not provide it') + raise TypeError("hub cannot be None if the feed does not provide it") if lease_seconds is None: - lease_seconds = getattr(settings, 'PUBSUBHUBBUB_LEASE_SECONDS', - DEFAULT_LEASE_SECONDS) + lease_seconds = getattr(settings, "PUBSUBHUBBUB_LEASE_SECONDS", DEFAULT_LEASE_SECONDS) feed = Feed.get_by_id(feed.id) subscription, created = self.get_or_create(feed=feed) signals.pre_subscribe.send(sender=subscription, created=created) @@ -41,38 +39,44 @@ class PushSubscriptionManager(models.Manager): subscription.topic = feed.feed_link[:200] subscription.hub = hub subscription.save() - + if callback is None: - callback_path = reverse('push-callback', args=(subscription.pk,)) - callback = 'https://' + settings.PUSH_DOMAIN + callback_path + callback_path = reverse("push-callback", args=(subscription.pk,)) + callback = "https://" + settings.PUSH_DOMAIN + callback_path # callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path try: - response = self._send_request(hub, { - 'hub.mode' : 'subscribe', - 'hub.callback' : callback, - 'hub.topic' : topic, - 'hub.verify' : ['async', 'sync'], - 'hub.verify_token' : subscription.generate_token('subscribe'), - 'hub.lease_seconds' : lease_seconds, - }) + response = self._send_request( + hub, + { + "hub.mode": "subscribe", + "hub.callback": callback, + "hub.topic": topic, + "hub.verify": ["async", "sync"], + "hub.verify_token": subscription.generate_token("subscribe"), + "hub.lease_seconds": lease_seconds, + }, + ) except (requests.ConnectionError, requests.exceptions.MissingSchema): response = None if response and response.status_code == 204: subscription.verified = True - elif response and response.status_code == 202: # async verification + elif response and response.status_code == 202: # async verification subscription.verified = False else: error = response and response.text or "" - if not force_retry and 'You may only subscribe to' in error: + if not force_retry and "You may only subscribe to" in error: extracted_topic = re.search("You may only subscribe to (.*?) ", error) if extracted_topic: - subscription = self.subscribe(extracted_topic.group(1), - feed=feed, hub=hub, force_retry=True) + subscription = self.subscribe( + extracted_topic.group(1), feed=feed, hub=hub, force_retry=True + ) else: - logging.debug(u' ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)' % ( - subscription.feed.log_title[:30], error[:100], response and response.status_code)) + logging.debug( + " ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)" + % (subscription.feed.log_title[:30], error[:100], response and response.status_code) + ) subscription.save() feed.setup_push() @@ -80,18 +84,18 @@ class PushSubscriptionManager(models.Manager): signals.verified.send(sender=subscription) return subscription - def _get_hub(self, topic): parsed = feedparser.parse(topic) for link in parsed.feed.links: - if link['rel'] == 'hub': - return link['href'] + if link["rel"] == "hub": + return link["href"] def _send_request(self, url, data): return requests.post(url, data=data) + class PushSubscription(models.Model): - feed = models.OneToOneField(Feed, db_index=True, related_name='push', on_delete=models.CASCADE) + feed = models.OneToOneField(Feed, db_index=True, related_name="push", on_delete=models.CASCADE) hub = models.URLField(db_index=True) topic = models.URLField(db_index=True) verified = models.BooleanField(default=False) @@ -104,43 +108,45 @@ class PushSubscription(models.Model): # unique_together = [ # ('hub', 'topic') # ] - + def unsubscribe(self): feed = self.feed self.delete() feed.setup_push() - + def set_expiration(self, lease_seconds): - self.lease_expires = datetime.now() + timedelta( - seconds=lease_seconds) + self.lease_expires = datetime.now() + timedelta(seconds=lease_seconds) self.save() def generate_token(self, mode): - assert self.pk is not None, \ - 'Subscription must be saved before generating token' - token = mode[:20] + hashlib.sha1(('%s%i%s' % ( - settings.SECRET_KEY, self.pk, mode)).encode(encoding='utf-8')).hexdigest() + assert self.pk is not None, "Subscription must be saved before generating token" + token = ( + mode[:20] + + hashlib.sha1( + ("%s%i%s" % (settings.SECRET_KEY, self.pk, mode)).encode(encoding="utf-8") + ).hexdigest() + ) self.verify_token = token self.save() return token - + def check_urls_against_pushed_data(self, parsed): - if hasattr(parsed.feed, 'links'): # single notification + if hasattr(parsed.feed, "links"): # single notification hub_url = self.hub self_url = self.topic for link in parsed.feed.links: - href = link.get('href', '') - if any(w in href for w in ['wp-admin', 'wp-cron']): + href = link.get("href", "") + if any(w in href for w in ["wp-admin", "wp-cron"]): continue - - if link['rel'] == 'hub': - hub_url = link['href'] - elif link['rel'] == 'self': - self_url = link['href'] - - if hub_url and hub_url.startswith('//'): + + if link["rel"] == "hub": + hub_url = link["href"] + elif link["rel"] == "self": + self_url = link["href"] + + if hub_url and hub_url.startswith("//"): hub_url = "http:%s" % hub_url - + needs_update = False if hub_url and self.hub != hub_url: # hub URL has changed; let's update our subscription @@ -150,23 +156,24 @@ class PushSubscription(models.Model): needs_update = True if needs_update: - logging.debug(u' ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s' % ( - self.feed, hub_url, self_url)) + logging.debug( + " ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s" % (self.feed, hub_url, self_url) + ) expiration_time = self.lease_expires - datetime.now() - seconds = expiration_time.days*86400 + expiration_time.seconds + seconds = expiration_time.days * 86400 + expiration_time.seconds try: PushSubscription.objects.subscribe( - self_url, feed=self.feed, hub=hub_url, - lease_seconds=seconds) + self_url, feed=self.feed, hub=hub_url, lease_seconds=seconds + ) except TimeoutError: - logging.debug(u' ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s' % ( - self.feed, hub_url, self_url)) - - + logging.debug( + " ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s" + % (self.feed, hub_url, self_url) + ) + def __str__(self): if self.verified: - verified = u'verified' + verified = "verified" else: - verified = u'unverified' - return u'to %s on %s: %s' % ( - self.topic, self.hub, verified) + verified = "unverified" + return "to %s on %s: %s" % (self.topic, self.hub, verified) diff --git a/apps/push/signals.py b/apps/push/signals.py index 2f2aa7d3d..8c915dc80 100644 --- a/apps/push/signals.py +++ b/apps/push/signals.py @@ -2,6 +2,6 @@ from django.dispatch import Signal -pre_subscribe = Signal(providing_args=['created']) +pre_subscribe = Signal(providing_args=["created"]) verified = Signal() -updated = Signal(providing_args=['update']) +updated = Signal(providing_args=["update"]) diff --git a/apps/push/test_push.py b/apps/push/test_push.py index 8aac0d882..cfbef8bfe 100644 --- a/apps/push/test_push.py +++ b/apps/push/test_push.py @@ -1,17 +1,17 @@ # Copyright 2009 - Participatory Culture Foundation -# +# # This file is part of djpubsubhubbub. -# +# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: -# +# # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. -# +# # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. @@ -32,6 +32,7 @@ from django.test import TestCase from apps.push.models import PushSubscription, PushSubscriptionManager from apps.push.signals import pre_subscribe, verified, updated + class MockResponse(object): def __init__(self, status, data=None): self.status = status @@ -42,13 +43,13 @@ class MockResponse(object): def read(self): if self.data is None: - return '' + return "" data, self.data = self.data, None return data -class PSHBTestBase: - urls = 'apps.push.urls' +class PSHBTestBase: + urls = "apps.push.urls" def setUp(self): self._old_send_request = PushSubscriptionManager._send_request @@ -57,8 +58,10 @@ class PSHBTestBase: self.requests = [] self.signals = [] for connecter in pre_subscribe, verified, updated: + def callback(signal=None, **kwargs): self.signals.append((signal, kwargs)) + connecter.connect(callback, dispatch_uid=connecter, weak=False) def tearDown(self): @@ -71,34 +74,32 @@ class PSHBTestBase: self.requests.append((url, data)) return self.responses.pop() -class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): +class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): def test_sync_verify(self): """ If the hub returns a 204 response, the subscription is verified and active. """ self.responses.append(MockResponse(204)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) + sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000) self.assertEquals(len(self.signals), 2) - self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, - 'created': True})) - self.assertEquals(self.signals[1], (verified, {'sender': sub})) - self.assertEquals(sub.hub, 'hub') - self.assertEquals(sub.topic, 'topic') + self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True})) + self.assertEquals(self.signals[1], (verified, {"sender": sub})) + self.assertEquals(sub.hub, "hub") + self.assertEquals(sub.topic, "topic") self.assertEquals(sub.verified, True) rough_expires = datetime.now() + timedelta(seconds=2000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[0], 'hub') - self.assertEquals(request[1]['mode'], 'subscribe') - self.assertEquals(request[1]['topic'], 'topic') - self.assertEquals(request[1]['callback'], 'callback') - self.assertEquals(request[1]['verify'], ('async', 'sync')) - self.assertEquals(request[1]['verify_token'], sub.verify_token) - self.assertEquals(request[1]['lease_seconds'], 2000) + self.assertEquals(request[0], "hub") + self.assertEquals(request[1]["mode"], "subscribe") + self.assertEquals(request[1]["topic"], "topic") + self.assertEquals(request[1]["callback"], "callback") + self.assertEquals(request[1]["verify"], ("async", "sync")) + self.assertEquals(request[1]["verify_token"], sub.verify_token) + self.assertEquals(request[1]["lease_seconds"], 2000) def test_async_verify(self): """ @@ -106,25 +107,23 @@ class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): subscription is verified. """ self.responses.append(MockResponse(202)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) + sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000) self.assertEquals(len(self.signals), 1) - self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, - 'created': True})) - self.assertEquals(sub.hub, 'hub') - self.assertEquals(sub.topic, 'topic') + self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True})) + self.assertEquals(sub.hub, "hub") + self.assertEquals(sub.topic, "topic") self.assertEquals(sub.verified, False) rough_expires = datetime.now() + timedelta(seconds=2000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[0], 'hub') - self.assertEquals(request[1]['mode'], 'subscribe') - self.assertEquals(request[1]['topic'], 'topic') - self.assertEquals(request[1]['callback'], 'callback') - self.assertEquals(request[1]['verify'], ('async', 'sync')) - self.assertEquals(request[1]['verify_token'], sub.verify_token) - self.assertEquals(request[1]['lease_seconds'], 2000) + self.assertEquals(request[0], "hub") + self.assertEquals(request[1]["mode"], "subscribe") + self.assertEquals(request[1]["topic"], "topic") + self.assertEquals(request[1]["callback"], "callback") + self.assertEquals(request[1]["verify"], ("async", "sync")) + self.assertEquals(request[1]["verify_token"], sub.verify_token) + self.assertEquals(request[1]["lease_seconds"], 2000) def test_least_seconds_default(self): """ @@ -132,53 +131,51 @@ class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): should default to 2592000 (30 days). """ self.responses.append(MockResponse(202)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback') + sub = PushSubscription.objects.subscribe("topic", "hub", "callback") rough_expires = datetime.now() + timedelta(seconds=2592000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[1]['lease_seconds'], 2592000) + self.assertEquals(request[1]["lease_seconds"], 2592000) def test_error_on_subscribe_raises_URLError(self): """ If a non-202/204 status is returned, raise a URLError. """ - self.responses.append(MockResponse(500, 'error data')) + self.responses.append(MockResponse(500, "error data")) try: - PushSubscription.objects.subscribe('topic', 'hub', 'callback') + PushSubscription.objects.subscribe("topic", "hub", "callback") except urllib.error.URLError as e: - self.assertEquals(e.reason, - 'error subscribing to topic on hub:\nerror data') + self.assertEquals(e.reason, "error subscribing to topic on hub:\nerror data") else: - self.fail('subscription did not raise URLError exception') + self.fail("subscription did not raise URLError exception") + class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase): - def test_verify(self): """ Getting the callback from the server should verify the subscription. """ - sub = PushSubscription.objects.create( - topic='topic', - hub='hub', - verified=False) - verify_token = sub.generate_token('subscribe') + sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False) + verify_token = sub.generate_token("subscribe") - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token, + }, + ) self.assertEquals(response.status_code, 200) - self.assertEquals(response.content, 'challenge') + self.assertEquals(response.content, "challenge") sub = PushSubscription.objects.get(pk=sub.pk) self.assertEquals(sub.verified, True) self.assertEquals(len(self.signals), 1) - self.assertEquals(self.signals[0], (verified, {'sender': sub})) + self.assertEquals(self.signals[0], (verified, {"sender": sub})) def test_404(self): """ @@ -189,54 +186,63 @@ class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase): * subscription doesn't exist * token doesn't match the subscription """ - sub = PushSubscription.objects.create( - topic='topic', - hub='hub', - verified=False) - verify_token = sub.generate_token('subscribe') + sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False) + verify_token = sub.generate_token("subscribe") - response = self.client.get(reverse('pubsubhubbub_callback', - args=(0,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[1:]}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(0,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[1:], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[1:]}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[1:], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic + 'extra', - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic + "extra", + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token, + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[:-5]}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[:-5], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) + class Test_PSHBUpdateCase(PSHBTestBase, TestCase): - def test_update(self): # this data comes from # http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.1.html#anchor3 @@ -293,32 +299,27 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase): """ sub = PushSubscription.objects.create( - hub="http://myhub.example.com/endpoint", - topic="http://publisher.example.com/happycats.xml") + hub="http://myhub.example.com/endpoint", topic="http://publisher.example.com/happycats.xml" + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals(len(callback_data), 1) sender, update = callback_data[0] self.assertEquals(sender, sub) self.assertEquals(len(update.entries), 4) - self.assertEquals(update.entries[0].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[1].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[2].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[3].id, - 'http://publisher.example.com/happycat25.xml') + self.assertEquals(update.entries[0].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[1].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[2].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[3].id, "http://publisher.example.com/happycat25.xml") def test_update_with_changed_hub(self): update_data = """ @@ -343,31 +344,32 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase): sub = PushSubscription.objects.create( hub="hub", topic="http://publisher.example.com/happycats.xml", - lease_expires=datetime.now() + timedelta(days=1)) + lease_expires=datetime.now() + timedelta(days=1), + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) def test_update_with_changed_self(self): update_data = """ @@ -392,30 +394,32 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase): sub = PushSubscription.objects.create( hub="http://myhub.example.com/endpoint", topic="topic", - lease_expires=datetime.now() + timedelta(days=1)) + lease_expires=datetime.now() + timedelta(days=1), + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', kwargs={'push_id': sub.pk}), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", kwargs={"push_id": sub.pk}), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) def test_update_with_changed_hub_and_self(self): update_data = """ @@ -438,30 +442,29 @@ class Test_PSHBUpdateCase(PSHBTestBase, TestCase): """ sub = PushSubscription.objects.create( - hub="hub", - topic="topic", - lease_expires=datetime.now() + timedelta(days=1)) + hub="hub", topic="topic", lease_expires=datetime.now() + timedelta(days=1) + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) diff --git a/apps/push/urls.py b/apps/push/urls.py index 223e40b29..08a4d2a46 100644 --- a/apps/push/urls.py +++ b/apps/push/urls.py @@ -2,5 +2,5 @@ from django.conf.urls import * from apps.push import views urlpatterns = [ - url(r'^(?P\d+)/?$', views.push_callback, name='push-callback'), + url(r"^(?P\d+)/?$", views.push_callback, name="push-callback"), ] diff --git a/apps/push/views.py b/apps/push/views.py index 78460008d..71451b80e 100644 --- a/apps/push/views.py +++ b/apps/push/views.py @@ -13,43 +13,49 @@ from apps.push.signals import verified from apps.rss_feeds.models import MFetchHistory from utils import log as logging -def push_callback(request, push_id): - if request.method == 'GET': - mode = request.GET['hub.mode'] - topic = request.GET['hub.topic'] - challenge = request.GET.get('hub.challenge', '') - lease_seconds = request.GET.get('hub.lease_seconds') - verify_token = request.GET.get('hub.verify_token', '') - if mode == 'subscribe': - if not verify_token.startswith('subscribe'): +def push_callback(request, push_id): + if request.method == "GET": + mode = request.GET["hub.mode"] + topic = request.GET["hub.topic"] + challenge = request.GET.get("hub.challenge", "") + lease_seconds = request.GET.get("hub.lease_seconds") + verify_token = request.GET.get("hub.verify_token", "") + + if mode == "subscribe": + if not verify_token.startswith("subscribe"): raise Http404 - subscription = get_object_or_404(PushSubscription, - pk=push_id, - topic=topic, - verify_token=verify_token) + subscription = get_object_or_404( + PushSubscription, pk=push_id, topic=topic, verify_token=verify_token + ) subscription.verified = True subscription.set_expiration(int(lease_seconds)) subscription.save() subscription.feed.setup_push() - logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (subscription.feed, subscription.feed_id)) + logging.debug(" ---> [%-30s] [%s] ~BBVerified PuSH" % (subscription.feed, subscription.feed_id)) verified.send(sender=subscription) - return HttpResponse(challenge, content_type='text/plain') - elif request.method == 'POST': + return HttpResponse(challenge, content_type="text/plain") + elif request.method == "POST": subscription = get_object_or_404(PushSubscription, pk=push_id) fetch_history = MFetchHistory.feed(subscription.feed_id) latest_push_date_delta = None - if fetch_history and fetch_history.get('push_history'): - latest_push = fetch_history['push_history'][0]['push_date'] - latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S') + if fetch_history and fetch_history.get("push_history"): + latest_push = fetch_history["push_history"][0]["push_date"] + latest_push_date = datetime.datetime.strptime(latest_push, "%Y-%m-%d %H:%M:%S") latest_push_date_delta = datetime.datetime.now() - latest_push_date if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1): - logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (subscription.feed, latest_push_date_delta.seconds)) - return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429) - + logging.debug( + " ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago" + % (subscription.feed, latest_push_date_delta.seconds) + ) + return HttpResponse( + "Slow down, you just pushed %s seconds ago..." % latest_push_date_delta.seconds, + status=429, + ) + # XXX TODO: Optimize this by removing feedparser. It just needs to find out # the hub_url or topic has changed. ElementTree could do it. if random.random() < 0.1: @@ -63,10 +69,12 @@ def push_callback(request, push_id): # subscription.feed.queue_pushed_feed_xml(request.body) if subscription.feed.active_subscribers >= 1: subscription.feed.queue_pushed_feed_xml("Fetch me", latest_push_date_delta=latest_push_date_delta) - MFetchHistory.add(feed_id=subscription.feed_id, - fetch_type='push') + MFetchHistory.add(feed_id=subscription.feed_id, fetch_type="push") else: - logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (subscription.feed, subscription.feed)) - - return HttpResponse('OK') + logging.debug( + " ---> [%-30s] ~FBSkipping feed fetch, no actives: %s" + % (subscription.feed, subscription.feed) + ) + + return HttpResponse("OK") return Http404 diff --git a/apps/reader/admin.py b/apps/reader/admin.py index 03daf35fc..fca4f8aca 100644 --- a/apps/reader/admin.py +++ b/apps/reader/admin.py @@ -3,4 +3,4 @@ from django.contrib import admin admin.site.register(UserSubscription) admin.site.register(UserSubscriptionFolders) -admin.site.register(Feature) \ No newline at end of file +admin.site.register(Feature) diff --git a/apps/reader/factories.py b/apps/reader/factories.py index ba7f22c22..edba6c281 100644 --- a/apps/reader/factories.py +++ b/apps/reader/factories.py @@ -8,6 +8,7 @@ from apps.profile.factories import UserFactory fake = Faker() + def generate_folder(): string = '{"' string += " ".join(fake.words(2)) @@ -18,12 +19,13 @@ def generate_folder(): string += "]}," return string + def generate_folders(): """ "folders": "[5299728, 644144, 1187026, {\"Brainiacs & Opinion\": [569, 38, 3581, 183139, 1186180, 15]}, {\"Science & Technology\": [731503, 140145, 1272495, 76, 161, 39, {\"Hacker\": [5985150, 3323431]}]}, {\"Humor\": [212379, 3530, 5994357]}, {\"Videos\": [3240, 5168]}]" """ string = '"folders":[' - + for _ in range(3): string += f"{fake.pyint()}, " for _ in range(3): @@ -32,6 +34,7 @@ def generate_folders(): string = string[:-1] + "]" return string + class UserSubscriptionFoldersFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) folders = FuzzyAttribute(generate_folders) @@ -39,18 +42,19 @@ class UserSubscriptionFoldersFactory(DjangoModelFactory): class Meta: model = UserSubscriptionFolders - + class UserSubscriptionFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) feed = FuzzyAttribute(FeedFactory) - last_read_date = factory.Faker('date_time') + last_read_date = factory.Faker("date_time") class Meta: model = UserSubscription class FeatureFactory(DjangoModelFactory): - description = factory.Faker('text') - date = factory.Faker('date_time') + description = factory.Faker("text") + date = factory.Faker("date_time") + class Meta: model = Feature diff --git a/apps/reader/forms.py b/apps/reader/forms.py index c0fe51d48..a09cc0475 100644 --- a/apps/reader/forms.py +++ b/apps/reader/forms.py @@ -15,13 +15,18 @@ from dns.resolver import NoResolverConfiguration class LoginForm(forms.Form): - username = forms.CharField(label=_("Username or Email"), max_length=30, - widget=forms.TextInput(attrs={'tabindex': 1, 'class': 'NB-input'}), - error_messages={'required': 'Please enter a username.'}) - password = forms.CharField(label=_("Password"), - widget=forms.PasswordInput(attrs={'tabindex': 2, 'class': 'NB-input'}), - required=False) - # error_messages={'required': 'Please enter a password.'}) + username = forms.CharField( + label=_("Username or Email"), + max_length=30, + widget=forms.TextInput(attrs={"tabindex": 1, "class": "NB-input"}), + error_messages={"required": "Please enter a username."}, + ) + password = forms.CharField( + label=_("Password"), + widget=forms.PasswordInput(attrs={"tabindex": 2, "class": "NB-input"}), + required=False, + ) + # error_messages={'required': 'Please enter a password.'}) add = forms.CharField(required=False, widget=forms.HiddenInput()) def __init__(self, *args, **kwargs): @@ -29,10 +34,10 @@ class LoginForm(forms.Form): super(LoginForm, self).__init__(*args, **kwargs) def clean(self): - username = self.cleaned_data.get('username', '').lower() - password = self.cleaned_data.get('password', '') - - if '@' in username: + username = self.cleaned_data.get("username", "").lower() + password = self.cleaned_data.get("password", "") + + if "@" in username: user = User.objects.filter(email=username) if not user: user = User.objects.filter(email__iexact=username) @@ -60,13 +65,15 @@ class LoginForm(forms.Form): if blank: email_user.set_password(email_user.username) email_user.save() - self.user_cache = authenticate(username=email_user.username, password=email_user.username) + self.user_cache = authenticate( + username=email_user.username, password=email_user.username + ) if self.user_cache is None: logging.info(" ***> [%s] Bad Login" % username) raise forms.ValidationError(_("Whoopsy-daisy, wrong password. Try again.")) elif username and not user: raise forms.ValidationError(_("That username is not registered. Please try again.")) - + return self.cleaned_data def get_user_id(self): @@ -81,113 +88,135 @@ class LoginForm(forms.Form): class SignupForm(forms.Form): use_required_attribute = False - username = forms.RegexField(regex=r'^\w+$', - max_length=30, - widget=forms.TextInput(attrs={'class': 'NB-input'}), - label=_('Username'), - error_messages={ - 'required': 'Please enter a username.', - 'invalid': "Your username may only contain letters and numbers." - }) - email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), - label=_('Email'), - required=True, - error_messages={'required': 'Please enter an email.'}) - password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}, - render_value=True,), - label=_('Password'), - required=False) - # error_messages={'required': 'Please enter a password.'}) - + username = forms.RegexField( + regex=r"^\w+$", + max_length=30, + widget=forms.TextInput(attrs={"class": "NB-input"}), + label=_("Username"), + error_messages={ + "required": "Please enter a username.", + "invalid": "Your username may only contain letters and numbers.", + }, + ) + email = forms.EmailField( + widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}), + label=_("Email"), + required=True, + error_messages={"required": "Please enter an email."}, + ) + password = forms.CharField( + widget=forms.PasswordInput( + attrs={"class": "NB-input"}, + render_value=True, + ), + label=_("Password"), + required=False, + ) + # error_messages={'required': 'Please enter a password.'}) + def clean_username(self): - username = self.cleaned_data['username'] + username = self.cleaned_data["username"] return username def clean_password(self): - if not self.cleaned_data['password']: + if not self.cleaned_data["password"]: return "" - return self.cleaned_data['password'] - + return self.cleaned_data["password"] + def clean_email(self): - email = self.cleaned_data.get('email', None) + email = self.cleaned_data.get("email", None) if email: email_exists = User.objects.filter(email__iexact=email).count() if email_exists: - raise forms.ValidationError(_('Someone is already using that email address.')) - if any([banned in email for banned in ['mailwire24', 'mailbox9', 'scintillamail', 'bluemailboxes', 'devmailing']]): - logging.info(" ***> [%s] Spammer signup banned: %s/%s" % (self.cleaned_data.get('username', None), self.cleaned_data.get('password', None), email)) - raise forms.ValidationError('Seriously, fuck off spammer.') + raise forms.ValidationError(_("Someone is already using that email address.")) + if any( + [ + banned in email + for banned in ["mailwire24", "mailbox9", "scintillamail", "bluemailboxes", "devmailing"] + ] + ): + logging.info( + " ***> [%s] Spammer signup banned: %s/%s" + % ( + self.cleaned_data.get("username", None), + self.cleaned_data.get("password", None), + email, + ) + ) + raise forms.ValidationError("Seriously, fuck off spammer.") try: - domain = email.rsplit('@', 1)[-1] - if not query(domain, 'MX'): - raise forms.ValidationError('Sorry, that email is invalid.') + domain = email.rsplit("@", 1)[-1] + if not query(domain, "MX"): + raise forms.ValidationError("Sorry, that email is invalid.") except (NXDOMAIN, NoNameservers, NoAnswer): - raise forms.ValidationError('Sorry, that email is invalid.') + raise forms.ValidationError("Sorry, that email is invalid.") except NoResolverConfiguration as e: logging.info(f" ***> ~FRFailed to check spamminess of domain: ~FY{domain} ~FR{e}") pass - return self.cleaned_data['email'] - + return self.cleaned_data["email"] + def clean(self): - username = self.cleaned_data.get('username', '') - password = self.cleaned_data.get('password', '') - email = self.cleaned_data.get('email', None) - - exists = User.objects.filter(username__iexact=username).count() - if exists: - user_auth = authenticate(username=username, password=password) - if not user_auth: - raise forms.ValidationError(_('Someone is already using that username.')) - - return self.cleaned_data - - def save(self, profile_callback=None): - username = self.cleaned_data['username'] - password = self.cleaned_data['password'] - email = self.cleaned_data['email'] + username = self.cleaned_data.get("username", "") + password = self.cleaned_data.get("password", "") + email = self.cleaned_data.get("email", None) exists = User.objects.filter(username__iexact=username).count() if exists: user_auth = authenticate(username=username, password=password) if not user_auth: - raise forms.ValidationError(_('Someone is already using that username.')) + raise forms.ValidationError(_("Someone is already using that username.")) + + return self.cleaned_data + + def save(self, profile_callback=None): + username = self.cleaned_data["username"] + password = self.cleaned_data["password"] + email = self.cleaned_data["email"] + + exists = User.objects.filter(username__iexact=username).count() + if exists: + user_auth = authenticate(username=username, password=password) + if not user_auth: + raise forms.ValidationError(_("Someone is already using that username.")) else: return user_auth - + if not password: password = username - + new_user = User(username=username) new_user.set_password(password) - if not getattr(settings, 'AUTO_ENABLE_NEW_USERS', True): + if not getattr(settings, "AUTO_ENABLE_NEW_USERS", True): new_user.is_active = False new_user.email = email new_user.last_login = datetime.datetime.now() new_user.save() - new_user = authenticate(username=username, - password=password) + new_user = authenticate(username=username, password=password) new_user = User.objects.get(username=username) MActivity.new_signup(user_id=new_user.pk) - + RNewUserQueue.add_user(new_user.pk) - + if new_user.email: EmailNewUser.delay(user_id=new_user.pk) - - if getattr(settings, 'AUTO_PREMIUM_NEW_USERS', False): + + if getattr(settings, "AUTO_PREMIUM_NEW_USERS", False): new_user.profile.activate_premium() - elif getattr(settings, 'AUTO_ENABLE_NEW_USERS', False): + elif getattr(settings, "AUTO_ENABLE_NEW_USERS", False): new_user.profile.activate_free() - + return new_user + class FeatureForm(forms.Form): use_required_attribute = False description = forms.CharField(required=True) - + def save(self): - feature = Feature(description=self.cleaned_data['description'], - date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1)) + feature = Feature( + description=self.cleaned_data["description"], + date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1), + ) feature.save() return feature diff --git a/apps/reader/http.py b/apps/reader/http.py index 6fee2f04b..af0edbff0 100644 --- a/apps/reader/http.py +++ b/apps/reader/http.py @@ -1,8 +1,9 @@ from django.shortcuts import render + def respond(request, template_name, context_dict, **kwargs): """ Use this function rather than render_to_response directly. The idea is to ensure that we're always using RequestContext. It's too easy to forget. """ - return render(request, template_name, context_dict, **kwargs) \ No newline at end of file + return render(request, template_name, context_dict, **kwargs) diff --git a/apps/reader/managers.py b/apps/reader/managers.py index 697fda287..adafc88fa 100644 --- a/apps/reader/managers.py +++ b/apps/reader/managers.py @@ -4,32 +4,36 @@ from django.contrib.auth.models import User from apps.rss_feeds.models import DuplicateFeed from utils import log as logging + class UserSubscriptionManager(models.Manager): def get(self, *args, **kwargs): try: return super(UserSubscriptionManager, self).get(*args, **kwargs) except self.model.DoesNotExist as exception: - if isinstance(kwargs.get('feed'), int): - feed_id = kwargs.get('feed') - elif 'feed' in kwargs: - feed_id = kwargs['feed'].pk - elif 'feed__pk' in kwargs: - feed_id = kwargs['feed__pk'] - elif 'feed_id' in kwargs: - feed_id = kwargs['feed_id'] + if isinstance(kwargs.get("feed"), int): + feed_id = kwargs.get("feed") + elif "feed" in kwargs: + feed_id = kwargs["feed"].pk + elif "feed__pk" in kwargs: + feed_id = kwargs["feed__pk"] + elif "feed_id" in kwargs: + feed_id = kwargs["feed_id"] dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if dupe_feed: feed = dupe_feed[0].feed - if 'feed' in kwargs: - kwargs['feed'] = feed - elif 'feed__pk' in kwargs: - kwargs['feed__pk'] = feed.pk - elif 'feed_id' in kwargs: - kwargs['feed_id'] = feed.pk - user = kwargs.get('user') + if "feed" in kwargs: + kwargs["feed"] = feed + elif "feed__pk" in kwargs: + kwargs["feed__pk"] = feed.pk + elif "feed_id" in kwargs: + kwargs["feed_id"] = feed.pk + user = kwargs.get("user") if isinstance(user, int): user = User.objects.get(pk=user) - logging.debug(" ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" % (user and user.username, feed, feed_id)) + logging.debug( + " ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" + % (user and user.username, feed, feed_id) + ) return super(UserSubscriptionManager, self).get(*args, **kwargs) else: raise exception diff --git a/apps/reader/migrations/0001_initial.py b/apps/reader/migrations/0001_initial.py index da718c584..61e86b9e5 100644 --- a/apps/reader/migrations/0001_initial.py +++ b/apps/reader/migrations/0001_initial.py @@ -8,60 +8,87 @@ import django.db.models.deletion class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Feature', + name="Feature", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('description', models.TextField(default='')), - ('date', models.DateTimeField(default=datetime.datetime.now)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("description", models.TextField(default="")), + ("date", models.DateTimeField(default=datetime.datetime.now)), ], options={ - 'ordering': ['-date'], + "ordering": ["-date"], }, ), migrations.CreateModel( - name='UserSubscription', + name="UserSubscription", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user_title', models.CharField(blank=True, max_length=255, null=True)), - ('active', models.BooleanField(default=False)), - ('last_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), - ('mark_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), - ('unread_count_neutral', models.IntegerField(default=0)), - ('unread_count_positive', models.IntegerField(default=0)), - ('unread_count_negative', models.IntegerField(default=0)), - ('unread_count_updated', models.DateTimeField(default=datetime.datetime.now)), - ('oldest_unread_story_date', models.DateTimeField(default=datetime.datetime.now)), - ('needs_unread_recalc', models.BooleanField(default=False)), - ('feed_opens', models.IntegerField(default=0)), - ('is_trained', models.BooleanField(default=False)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscribers', to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("user_title", models.CharField(blank=True, max_length=255, null=True)), + ("active", models.BooleanField(default=False)), + ("last_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), + ("mark_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), + ("unread_count_neutral", models.IntegerField(default=0)), + ("unread_count_positive", models.IntegerField(default=0)), + ("unread_count_negative", models.IntegerField(default=0)), + ("unread_count_updated", models.DateTimeField(default=datetime.datetime.now)), + ("oldest_unread_story_date", models.DateTimeField(default=datetime.datetime.now)), + ("needs_unread_recalc", models.BooleanField(default=False)), + ("feed_opens", models.IntegerField(default=0)), + ("is_trained", models.BooleanField(default=False)), + ( + "feed", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="subscribers", + to="rss_feeds.Feed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="subscriptions", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='UserSubscriptionFolders', + name="UserSubscriptionFolders", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('folders', models.TextField(default='[]')), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("folders", models.TextField(default="[]")), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], options={ - 'verbose_name': 'folder', - 'verbose_name_plural': 'folders', + "verbose_name": "folder", + "verbose_name_plural": "folders", }, ), migrations.AlterUniqueTogether( - name='usersubscription', - unique_together={('user', 'feed')}, + name="usersubscription", + unique_together={("user", "feed")}, ), ] diff --git a/apps/reader/models.py b/apps/reader/models.py index 3ff6f0d38..ca3d3b872 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -22,24 +22,31 @@ from apps.reader.managers import UserSubscriptionManager from apps.rss_feeds.models import Feed, MStory, DuplicateFeed from apps.rss_feeds.tasks import NewFeeds from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.analyzer.tfidf import tfidf from utils.feed_functions import add_object_to_folder, chunks + def unread_cutoff_default(): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + + class UserSubscription(models.Model): """ A feed which a user has subscribed to. Carries all of the cached information about the subscription, including unread counts of the three primary scores. - + Also has a dirty flag (needs_unread_recalc) which means that the unread counts are not accurate and need to be calculated with `self.calculate_feed_scores()`. """ - - user = models.ForeignKey(User, related_name='subscriptions', on_delete=models.CASCADE) - feed = models.ForeignKey(Feed, related_name='subscribers', on_delete=models.CASCADE) + + user = models.ForeignKey(User, related_name="subscriptions", on_delete=models.CASCADE) + feed = models.ForeignKey(Feed, related_name="subscribers", on_delete=models.CASCADE) user_title = models.CharField(max_length=255, null=True, blank=True) active = models.BooleanField(default=False) last_read_date = models.DateTimeField(default=unread_cutoff_default) @@ -52,32 +59,31 @@ class UserSubscription(models.Model): needs_unread_recalc = models.BooleanField(default=False) feed_opens = models.IntegerField(default=0) is_trained = models.BooleanField(default=False) - + objects = UserSubscriptionManager() def __str__(self): - return '[%s (%s): %s (%s)] ' % (self.user.username, self.user.pk, - self.feed.feed_title, self.feed.pk) - + return "[%s (%s): %s (%s)] " % (self.user.username, self.user.pk, self.feed.feed_title, self.feed.pk) + class Meta: unique_together = ("user", "feed") - + def canonical(self, full=False, include_favicon=True, classifiers=None): - feed = self.feed.canonical(full=full, include_favicon=include_favicon) - feed['feed_title'] = self.user_title or feed['feed_title'] - feed['ps'] = self.unread_count_positive - feed['nt'] = self.unread_count_neutral - feed['ng'] = self.unread_count_negative - feed['active'] = self.active - feed['feed_opens'] = self.feed_opens - feed['subscribed'] = True + feed = self.feed.canonical(full=full, include_favicon=include_favicon) + feed["feed_title"] = self.user_title or feed["feed_title"] + feed["ps"] = self.unread_count_positive + feed["nt"] = self.unread_count_neutral + feed["ng"] = self.unread_count_negative + feed["active"] = self.active + feed["feed_opens"] = self.feed_opens + feed["subscribed"] = True if classifiers: - feed['classifiers'] = classifiers + feed["classifiers"] = classifiers return feed - + def save(self, *args, **kwargs): - user_title_max = self._meta.get_field('user_title').max_length + user_title_max = self._meta.get_field("user_title").max_length if self.user_title and len(self.user_title) > user_title_max: self.user_title = self.user_title[:user_title_max] try: @@ -91,37 +97,50 @@ class UserSubscription(models.Model): super(UserSubscription, self).save(*args, **kwargs) break else: - if self and self.id: self.delete() - + if self and self.id: + self.delete() + @classmethod def subs_for_feeds(cls, user_id, feed_ids=None, read_filter="unread"): usersubs = cls.objects if read_filter == "unread": - usersubs = usersubs.filter(Q(unread_count_neutral__gt=0) | - Q(unread_count_positive__gt=0)) + usersubs = usersubs.filter(Q(unread_count_neutral__gt=0) | Q(unread_count_positive__gt=0)) if not feed_ids: - usersubs = usersubs.filter(user=user_id, - active=True).only('feed', 'mark_read_date', 'is_trained', 'needs_unread_recalc') + usersubs = usersubs.filter(user=user_id, active=True).only( + "feed", "mark_read_date", "is_trained", "needs_unread_recalc" + ) else: - usersubs = usersubs.filter(user=user_id, - active=True, - feed__in=feed_ids).only('feed', 'mark_read_date', 'is_trained', 'needs_unread_recalc') - + usersubs = usersubs.filter(user=user_id, active=True, feed__in=feed_ids).only( + "feed", "mark_read_date", "is_trained", "needs_unread_recalc" + ) + return usersubs - + @classmethod - def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread", order="newest", - include_timestamps=False, group_by_feed=False, cutoff_date=None, - across_all_feeds=True, store_stories_key=None, offset=0, limit=500): + def story_hashes( + cls, + user_id, + feed_ids=None, + usersubs=None, + read_filter="unread", + order="newest", + include_timestamps=False, + group_by_feed=False, + cutoff_date=None, + across_all_feeds=True, + store_stories_key=None, + offset=0, + limit=500, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) pipeline = r.pipeline() user = User.objects.get(pk=user_id) story_hashes = {} if group_by_feed else [] is_archive = user.profile.is_archive - + if not feed_ids and not across_all_feeds: return story_hashes - + if not usersubs: usersubs = cls.subs_for_feeds(user_id, feed_ids=feed_ids, read_filter=read_filter) if not usersubs: @@ -130,12 +149,12 @@ class UserSubscription(models.Model): if not feed_ids: return story_hashes - current_time = int(time.time() + 60*60*24) + current_time = int(time.time() + 60 * 60 * 24) if not cutoff_date: cutoff_date = user.profile.unread_cutoff feed_counter = 0 unread_ranked_stories_keys = [] - + read_dates = dict() needs_unread_recalc = dict() manual_unread_pipeline = r.pipeline() @@ -143,9 +162,9 @@ class UserSubscription(models.Model): oldest_manual_unread = None # usersub_count = len(usersubs) for us in usersubs: - read_dates[us.feed_id] = int(max(us.mark_read_date, cutoff_date).strftime('%s')) + read_dates[us.feed_id] = int(max(us.mark_read_date, cutoff_date).strftime("%s")) if read_filter == "unread": - needs_unread_recalc[us.feed_id] = us.needs_unread_recalc # or usersub_count == 1 + needs_unread_recalc[us.feed_id] = us.needs_unread_recalc # or usersub_count == 1 user_manual_unread_stories_feed_key = f"uU:{user_id}:{us.feed_id}" manual_unread_pipeline.exists(user_manual_unread_stories_feed_key) user_unread_ranked_stories_key = f"zU:{user_id}:{us.feed_id}" @@ -153,25 +172,27 @@ class UserSubscription(models.Model): if read_filter == "unread": results = manual_unread_pipeline.execute() for i, us in enumerate(usersubs): - if results[i*2]: # user_manual_unread_stories_feed_key + if results[i * 2]: # user_manual_unread_stories_feed_key user_manual_unread_stories_feed_key = f"uU:{user_id}:{us.feed_id}" - oldest_manual_unread = r.zrevrange(user_manual_unread_stories_feed_key, -1, -1, withscores=True) + oldest_manual_unread = r.zrevrange( + user_manual_unread_stories_feed_key, -1, -1, withscores=True + ) manual_unread_feed_oldest_date[us.feed_id] = int(oldest_manual_unread[0][1]) - if read_filter == "unread" and not results[i*2+1]: # user_unread_ranked_stories_key + if read_filter == "unread" and not results[i * 2 + 1]: # user_unread_ranked_stories_key needs_unread_recalc[us.feed_id] = True - + for feed_id_group in chunks(feed_ids, 500): pipeline = r.pipeline() for feed_id in feed_id_group: - stories_key = 'F:%s' % feed_id - sorted_stories_key = 'zF:%s' % feed_id - read_stories_key = 'RS:%s:%s' % (user_id, feed_id) - unread_stories_key = 'U:%s:%s' % (user_id, feed_id) - unread_ranked_stories_key = 'zU:%s:%s' % (user_id, feed_id) + stories_key = "F:%s" % feed_id + sorted_stories_key = "zF:%s" % feed_id + read_stories_key = "RS:%s:%s" % (user_id, feed_id) + unread_stories_key = "U:%s:%s" % (user_id, feed_id) + unread_ranked_stories_key = "zU:%s:%s" % (user_id, feed_id) user_manual_unread_stories_feed_key = f"uU:{user_id}:{feed_id}" - + max_score = current_time - if read_filter == 'unread': + if read_filter == "unread": min_score = read_dates[feed_id] # if needs_unread_recalc[feed_id]: # pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key) @@ -180,53 +201,73 @@ class UserSubscription(models.Model): else: min_score = 0 - if order == 'oldest': + if order == "oldest": byscorefunc = pipeline.zrangebyscore else: byscorefunc = pipeline.zrevrangebyscore min_score, max_score = max_score, min_score ranked_stories_key = unread_ranked_stories_key - if read_filter == 'unread': + if read_filter == "unread": if needs_unread_recalc[feed_id]: pipeline.zdiffstore(unread_ranked_stories_key, [sorted_stories_key, read_stories_key]) # pipeline.expire(unread_ranked_stories_key, unread_cutoff_diff.days*24*60*60) - pipeline.expire(unread_ranked_stories_key, 1*60*60) # 1 hours - if order == 'oldest': - pipeline.zremrangebyscore(ranked_stories_key, 0, min_score-1) - pipeline.zremrangebyscore(ranked_stories_key, max_score+1, 2*max_score) + pipeline.expire(unread_ranked_stories_key, 1 * 60 * 60) # 1 hours + if order == "oldest": + pipeline.zremrangebyscore(ranked_stories_key, 0, min_score - 1) + pipeline.zremrangebyscore(ranked_stories_key, max_score + 1, 2 * max_score) else: - pipeline.zremrangebyscore(ranked_stories_key, 0, max_score-1) - pipeline.zremrangebyscore(ranked_stories_key, min_score+1, 2*min_score) + pipeline.zremrangebyscore(ranked_stories_key, 0, max_score - 1) + pipeline.zremrangebyscore(ranked_stories_key, min_score + 1, 2 * min_score) else: ranked_stories_key = sorted_stories_key - + # If archive premium user has manually marked an older story as unread if is_archive and feed_id in manual_unread_feed_oldest_date and read_filter == "unread": - if order == 'oldest': + if order == "oldest": min_score = manual_unread_feed_oldest_date[feed_id] else: max_score = manual_unread_feed_oldest_date[feed_id] - - pipeline.zunionstore(unread_ranked_stories_key, [unread_ranked_stories_key, user_manual_unread_stories_feed_key], aggregate="MAX") - + + pipeline.zunionstore( + unread_ranked_stories_key, + [unread_ranked_stories_key, user_manual_unread_stories_feed_key], + aggregate="MAX", + ) + if settings.DEBUG and False: debug_stories = r.zrevrange(unread_ranked_stories_key, 0, -1, withscores=True) - print((" ---> Story hashes (%s/%s - %s/%s) %s stories: %s" % ( - min_score, datetime.datetime.fromtimestamp(min_score).strftime('%Y-%m-%d %T'), - max_score, datetime.datetime.fromtimestamp(max_score).strftime('%Y-%m-%d %T'), - len(debug_stories), - debug_stories))) + print( + ( + " ---> Story hashes (%s/%s - %s/%s) %s stories: %s" + % ( + min_score, + datetime.datetime.fromtimestamp(min_score).strftime("%Y-%m-%d %T"), + max_score, + datetime.datetime.fromtimestamp(max_score).strftime("%Y-%m-%d %T"), + len(debug_stories), + debug_stories, + ) + ) + ) if not store_stories_key: - byscorefunc(ranked_stories_key, min_score, max_score, withscores=include_timestamps, start=offset, num=limit) + byscorefunc( + ranked_stories_key, + min_score, + max_score, + withscores=include_timestamps, + start=offset, + num=limit, + ) unread_ranked_stories_keys.append(ranked_stories_key) - + results = pipeline.execute() if not store_stories_key: for hashes in results: - if not isinstance(hashes, list): continue + if not isinstance(hashes, list): + continue if group_by_feed: story_hashes[feed_ids[feed_counter]] = hashes feed_counter += 1 @@ -241,10 +282,18 @@ class UserSubscription(models.Model): else: pipeline = r.pipeline() for unread_ranked_stories_keys_group in chunks(unread_ranked_stories_keys, chunk_size): - pipeline.zunionstore(f"{store_stories_key}-chunk{chunk_count}", unread_ranked_stories_keys_group, aggregate="MAX") + pipeline.zunionstore( + f"{store_stories_key}-chunk{chunk_count}", + unread_ranked_stories_keys_group, + aggregate="MAX", + ) chunk_count += 1 pipeline.execute() - r.zunionstore(store_stories_key, [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], aggregate="MAX") + r.zunionstore( + store_stories_key, + [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], + aggregate="MAX", + ) pipeline = r.pipeline() for i in range(chunk_count): pipeline.delete(f"{store_stories_key}-chunk{i}") @@ -252,39 +301,54 @@ class UserSubscription(models.Model): if not store_stories_key: return story_hashes - - def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', cutoff_date=None): + + def get_stories(self, offset=0, limit=6, order="newest", read_filter="all", cutoff_date=None): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - unread_ranked_stories_key = 'zU:%s:%s' % (self.user_id, self.feed_id) + unread_ranked_stories_key = "zU:%s:%s" % (self.user_id, self.feed_id) if offset and r.exists(unread_ranked_stories_key): byscorefunc = r.zrevrange if order == "oldest": byscorefunc = r.zrange - story_hashes = byscorefunc(unread_ranked_stories_key, start=offset, end=offset+limit)[:limit] + story_hashes = byscorefunc(unread_ranked_stories_key, start=offset, end=offset + limit)[:limit] else: - story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk], - order=order, read_filter=read_filter, - offset=offset, limit=limit, - cutoff_date=cutoff_date) - - story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-') + story_hashes = UserSubscription.story_hashes( + self.user.pk, + feed_ids=[self.feed.pk], + order=order, + read_filter=read_filter, + offset=offset, + limit=limit, + cutoff_date=cutoff_date, + ) + + story_date_order = "%sstory_date" % ("" if order == "oldest" else "-") mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order) stories = Feed.format_stories(mstories) return stories - + @classmethod - def feed_stories(cls, user_id, feed_ids=None, offset=0, limit=6, - order='newest', read_filter='all', usersubs=None, cutoff_date=None, - all_feed_ids=None, cache_prefix=""): + def feed_stories( + cls, + user_id, + feed_ids=None, + offset=0, + limit=6, + order="newest", + read_filter="all", + usersubs=None, + cutoff_date=None, + all_feed_ids=None, + cache_prefix="", + ): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) across_all_feeds = False - - if order == 'oldest': + + if order == "oldest": range_func = rt.zrange else: range_func = rt.zrevrange - + if feed_ids is None: across_all_feeds = True feed_ids = [] @@ -292,17 +356,17 @@ class UserSubscription(models.Model): all_feed_ids = [f for f in feed_ids] # feeds_string = "" - feeds_string = ','.join(str(f) for f in sorted(all_feed_ids))[:30] - ranked_stories_keys = '%szU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) - unread_ranked_stories_keys = '%szhU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) + feeds_string = ",".join(str(f) for f in sorted(all_feed_ids))[:30] + ranked_stories_keys = "%szU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) + unread_ranked_stories_keys = "%szhU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) stories_cached = rt.exists(ranked_stories_keys) unreads_cached = True if read_filter == "unread" else rt.exists(unread_ranked_stories_keys) if offset and stories_cached: - story_hashes = range_func(ranked_stories_keys, offset, offset+limit) + story_hashes = range_func(ranked_stories_keys, offset, offset + limit) if read_filter == "unread": unread_story_hashes = story_hashes elif unreads_cached: - unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset+limit) + unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset + limit) else: unread_story_hashes = [] return story_hashes, unread_story_hashes @@ -310,47 +374,55 @@ class UserSubscription(models.Model): rt.delete(ranked_stories_keys) rt.delete(unread_ranked_stories_keys) - cls.story_hashes(user_id, feed_ids=feed_ids, - read_filter=read_filter, order=order, - include_timestamps=False, - usersubs=usersubs, - cutoff_date=cutoff_date, - across_all_feeds=across_all_feeds, - store_stories_key=ranked_stories_keys) + cls.story_hashes( + user_id, + feed_ids=feed_ids, + read_filter=read_filter, + order=order, + include_timestamps=False, + usersubs=usersubs, + cutoff_date=cutoff_date, + across_all_feeds=across_all_feeds, + store_stories_key=ranked_stories_keys, + ) story_hashes = range_func(ranked_stories_keys, offset, limit) if read_filter == "unread": unread_feed_story_hashes = story_hashes rt.zunionstore(unread_ranked_stories_keys, [ranked_stories_keys]) else: - cls.story_hashes(user_id, feed_ids=feed_ids, - read_filter="unread", order=order, - include_timestamps=True, - cutoff_date=cutoff_date, - store_stories_key=unread_ranked_stories_keys) + cls.story_hashes( + user_id, + feed_ids=feed_ids, + read_filter="unread", + order=order, + include_timestamps=True, + cutoff_date=cutoff_date, + store_stories_key=unread_ranked_stories_keys, + ) unread_feed_story_hashes = range_func(unread_ranked_stories_keys, offset, limit) - - rt.expire(ranked_stories_keys, 60*60) - rt.expire(unread_ranked_stories_keys, 60*60) - + + rt.expire(ranked_stories_keys, 60 * 60) + rt.expire(unread_ranked_stories_keys, 60 * 60) + return story_hashes, unread_feed_story_hashes - + def oldest_manual_unread_story_date(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + user_manual_unread_stories_feed_key = f"uU:{self.user_id}:{self.feed_id}" oldest_manual_unread = r.zrevrange(user_manual_unread_stories_feed_key, -1, -1, withscores=True) - + return oldest_manual_unread - + @classmethod def truncate_river(cls, user_id, feed_ids, read_filter, cache_prefix=""): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL) - - feeds_string = ','.join(str(f) for f in sorted(feed_ids))[:30] - ranked_stories_keys = '%szU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) - unread_ranked_stories_keys = '%szhU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) + + feeds_string = ",".join(str(f) for f in sorted(feed_ids))[:30] + ranked_stories_keys = "%szU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) + unread_ranked_stories_keys = "%szhU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) stories_cached = rt.exists(ranked_stories_keys) unreads_cached = rt.exists(unread_ranked_stories_keys) truncated = 0 @@ -359,27 +431,31 @@ class UserSubscription(models.Model): rt.delete(ranked_stories_keys) # else: # logging.debug(" ***> ~FRNo stories cached, can't truncate: %s / %s" % (User.objects.get(pk=user_id), feed_ids)) - + if unreads_cached: truncated += rt.zcard(unread_ranked_stories_keys) rt.delete(unread_ranked_stories_keys) # else: # logging.debug(" ***> ~FRNo unread stories cached, can't truncate: %s / %s" % (User.objects.get(pk=user_id), feed_ids)) - + return truncated - + @classmethod - def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True, - skip_fetch=False): + def add_subscription( + cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True, skip_fetch=False + ): feed = None us = None - - logging.user(user, "~FRAdding URL: ~SB%s (in %s) %s" % (feed_address, folder, - "~FCAUTO-ADD" if not auto_active else "")) - + + logging.user( + user, + "~FRAdding URL: ~SB%s (in %s) %s" + % (feed_address, folder, "~FCAUTO-ADD" if not auto_active else ""), + ) + feed = Feed.get_feed_from_url(feed_address, user=user) - if not feed: + if not feed: code = -1 if bookmarklet: message = "This site does not have an RSS feed. Nothing is linked to from this page." @@ -387,20 +463,19 @@ class UserSubscription(models.Model): message = "This address does not point to an RSS feed or a website with an RSS feed." else: us, subscription_created = cls.objects.get_or_create( - feed=feed, + feed=feed, user=user, defaults={ - 'needs_unread_recalc': True, - 'active': auto_active, - } + "needs_unread_recalc": True, + "active": auto_active, + }, ) code = 1 message = "" - + if us: user_sub_folders_object, created = UserSubscriptionFolders.objects.get_or_create( - user=user, - defaults={'folders': '[]'} + user=user, defaults={"folders": "[]"} ) if created: user_sub_folders = [] @@ -409,85 +484,88 @@ class UserSubscription(models.Model): user_sub_folders = add_object_to_folder(feed.pk, folder, user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() - + if auto_active or user.profile.is_premium: us.active = True us.save() - + if not skip_fetch and feed.last_update < datetime.datetime.utcnow() - datetime.timedelta(days=1): feed = feed.update(verbose=True) - + from apps.social.models import MActivity + MActivity.new_feed_subscription(user_id=user.pk, feed_id=feed.pk, feed_title=feed.title) - + feed.setup_feed_for_premium_subscribers() feed.count_subscribers() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:feeds') - - + r.publish(user.username, "reload:feeds") + return code, message, us - + @classmethod def feeds_with_updated_counts(cls, user, feed_ids=None, check_fetch_status=False, force=False): feeds = {} silent = not getattr(settings, "TEST_DEBUG", False) - + # Get subscriptions for user - user_subs = cls.objects.select_related('feed').filter(user=user, active=True) - feed_ids = [f for f in feed_ids if f and not any(f.startswith(prefix) for prefix in ['river', 'saved'])] + user_subs = cls.objects.select_related("feed").filter(user=user, active=True) + feed_ids = [ + f for f in feed_ids if f and not any(f.startswith(prefix) for prefix in ["river", "saved"]) + ] if feed_ids: user_subs = user_subs.filter(feed__in=feed_ids) - + for i, sub in enumerate(user_subs): # Count unreads if subscription is stale. - if (force or - sub.needs_unread_recalc or - sub.unread_count_updated < user.profile.unread_cutoff or - sub.oldest_unread_story_date < user.profile.unread_cutoff): + if ( + force + or sub.needs_unread_recalc + or sub.unread_count_updated < user.profile.unread_cutoff + or sub.oldest_unread_story_date < user.profile.unread_cutoff + ): sub = sub.calculate_feed_scores(silent=silent, force=force) - if not sub: continue # TODO: Figure out the correct sub and give it a new feed_id + if not sub: + continue # TODO: Figure out the correct sub and give it a new feed_id feed_id = sub.feed_id feeds[feed_id] = { - 'ps': sub.unread_count_positive, - 'nt': sub.unread_count_neutral, - 'ng': sub.unread_count_negative, - 'id': feed_id, + "ps": sub.unread_count_positive, + "nt": sub.unread_count_neutral, + "ng": sub.unread_count_negative, + "id": feed_id, } if not sub.feed.fetched_once or check_fetch_status: - feeds[feed_id]['fetched_once'] = sub.feed.fetched_once - feeds[feed_id]['not_yet_fetched'] = not sub.feed.fetched_once # Legacy. Dammit. + feeds[feed_id]["fetched_once"] = sub.feed.fetched_once + feeds[feed_id]["not_yet_fetched"] = not sub.feed.fetched_once # Legacy. Dammit. if sub.feed.favicon_fetching: - feeds[feed_id]['favicon_fetching'] = True + feeds[feed_id]["favicon_fetching"] = True if sub.feed.has_feed_exception or sub.feed.has_page_exception: - feeds[feed_id]['has_exception'] = True - feeds[feed_id]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page' - feeds[feed_id]['feed_address'] = sub.feed.feed_address - feeds[feed_id]['exception_code'] = sub.feed.exception_code + feeds[feed_id]["has_exception"] = True + feeds[feed_id]["exception_type"] = "feed" if sub.feed.has_feed_exception else "page" + feeds[feed_id]["feed_address"] = sub.feed.feed_address + feeds[feed_id]["exception_code"] = sub.feed.exception_code return feeds - + @classmethod def queue_new_feeds(cls, user, new_feeds=None): if not isinstance(user, User): user = User.objects.get(pk=user) - + if not new_feeds: - new_feeds = cls.objects.filter(user=user, - feed__fetched_once=False, - active=True).values('feed_id') - new_feeds = list(set([f['feed_id'] for f in new_feeds])) - + new_feeds = cls.objects.filter(user=user, feed__fetched_once=False, active=True).values("feed_id") + new_feeds = list(set([f["feed_id"] for f in new_feeds])) + if not new_feeds: return - + logging.user(user, "~BB~FW~SBQueueing NewFeeds: ~FC(%s) %s" % (len(new_feeds), new_feeds)) size = 4 - for t in (new_feeds[pos:pos + size] for pos in range(0, len(new_feeds), size)): + for t in (new_feeds[pos : pos + size] for pos in range(0, len(new_feeds), size)): NewFeeds.apply_async(args=(t,), queue="new_feeds") - + @classmethod def refresh_stale_feeds(cls, user, exclude_new=False): if not isinstance(user, User): @@ -496,18 +574,21 @@ class UserSubscription(models.Model): stale_cutoff = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) # TODO: Refactor below using last_update from REDIS_FEED_UPDATE_POOL - stale_feeds = UserSubscription.objects.filter(user=user, active=True, feed__last_update__lte=stale_cutoff) + stale_feeds = UserSubscription.objects.filter( + user=user, active=True, feed__last_update__lte=stale_cutoff + ) if exclude_new: stale_feeds = stale_feeds.filter(feed__fetched_once=True) - all_feeds = UserSubscription.objects.filter(user=user, active=True) - - logging.user(user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % ( - stale_feeds.count(), all_feeds.count())) + all_feeds = UserSubscription.objects.filter(user=user, active=True) + + logging.user( + user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % (stale_feeds.count(), all_feeds.count()) + ) for sub in stale_feeds: sub.feed.fetched_once = False sub.feed.save() - + if stale_feeds: stale_feeds = list(set([f.feed_id for f in stale_feeds])) cls.queue_new_feeds(user, new_feeds=stale_feeds) @@ -515,10 +596,13 @@ class UserSubscription(models.Model): @classmethod def schedule_fetch_archive_feeds_for_user(cls, user_id): from apps.profile.tasks import FetchArchiveFeedsForUser - FetchArchiveFeedsForUser.apply_async(kwargs=dict(user_id=user_id), - queue='search_indexer', - time_limit=settings.MAX_SECONDS_COMPLETE_ARCHIVE_FETCH) - + + FetchArchiveFeedsForUser.apply_async( + kwargs=dict(user_id=user_id), + queue="search_indexer", + time_limit=settings.MAX_SECONDS_COMPLETE_ARCHIVE_FETCH, + ) + # Should be run as a background task @classmethod def fetch_archive_feeds_for_user(cls, user_id): @@ -527,12 +611,11 @@ class UserSubscription(models.Model): start_time = time.time() user = User.objects.get(pk=user_id) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'fetch_archive:start') + r.publish(user.username, "fetch_archive:start") - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() - feed_ids = [] starting_story_count = 0 for sub in subscriptions: @@ -541,25 +624,31 @@ class UserSubscription(models.Model): except Feed.DoesNotExist: continue starting_story_count += MStory.objects(story_feed_id=sub.feed.pk).count() - + feed_id_chunks = [c for c in chunks(feed_ids, 1)] - logging.user(user, "~FCFetching archive stories from ~SB%s feeds~SN in %s chunks..." % - (total, len(feed_id_chunks))) - - search_chunks = [FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk, - user_id=user_id - ).set(queue='search_indexer') - .set(time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED, - soft_time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED-30) - for feed_id_chunk in feed_id_chunks] - callback = FinishFetchArchiveFeeds.s(user_id=user_id, - start_time=start_time, - starting_story_count=starting_story_count).set(queue='search_indexer') + logging.user( + user, + "~FCFetching archive stories from ~SB%s feeds~SN in %s chunks..." % (total, len(feed_id_chunks)), + ) + + search_chunks = [ + FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk, user_id=user_id) + .set(queue="search_indexer") + .set( + time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED, + soft_time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED - 30, + ) + for feed_id_chunk in feed_id_chunks + ] + callback = FinishFetchArchiveFeeds.s( + user_id=user_id, start_time=start_time, starting_story_count=starting_story_count + ).set(queue="search_indexer") celery.chord(search_chunks)(callback) @classmethod def fetch_archive_feeds_chunk(cls, user_id, feed_ids): from apps.rss_feeds.models import Feed + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=user_id) @@ -567,18 +656,18 @@ class UserSubscription(models.Model): for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.fill_out_archive_stories() - - r.publish(user.username, 'fetch_archive:feeds:%s' % - ','.join([str(f) for f in feed_ids])) + + r.publish(user.username, "fetch_archive:feeds:%s" % ",".join([str(f) for f in feed_ids])) @classmethod def finish_fetch_archive_feeds(cls, user_id, start_time, starting_story_count): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=user_id) - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() duration = time.time() - start_time @@ -592,46 +681,52 @@ class UserSubscription(models.Model): continue new_story_count = ending_story_count - starting_story_count - logging.user(user, f"~FCFinished archive feed fetches for ~SB~FG{subscriptions.count()} feeds~FC~SN: ~FG~SB{new_story_count:,} new~SB~FC, ~FG{ending_story_count:,} total (pre-archive: {pre_archive_count:,} stories)") + logging.user( + user, + f"~FCFinished archive feed fetches for ~SB~FG{subscriptions.count()} feeds~FC~SN: ~FG~SB{new_story_count:,} new~SB~FC, ~FG{ending_story_count:,} total (pre-archive: {pre_archive_count:,} stories)", + ) - logging.user(user, "~FCFetched archive stories from ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % - (total, round(duration, 2))) - r.publish(user.username, 'fetch_archive:done') + logging.user( + user, + "~FCFetched archive stories from ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." + % (total, round(duration, 2)), + ) + r.publish(user.username, "fetch_archive:done") return ending_story_count, min(pre_archive_count, starting_story_count) - - + @classmethod def identify_deleted_feed_users(cls, old_feed_id): - users = UserSubscriptionFolders.objects.filter(folders__contains=old_feed_id).only('user') + users = UserSubscriptionFolders.objects.filter(folders__contains=old_feed_id).only("user") user_ids = [usf.user_id for usf in users] - f = open('utils/backups/users.txt', 'w') - f.write('\n'.join([str(u) for u in user_ids])) + f = open("utils/backups/users.txt", "w") + f.write("\n".join([str(u) for u in user_ids])) return user_ids @classmethod def recreate_deleted_feed(cls, new_feed_id, old_feed_id=None, skip=0): - user_ids = sorted([int(u) for u in open('utils/backups/users.txt').read().split('\n') if u]) - + user_ids = sorted([int(u) for u in open("utils/backups/users.txt").read().split("\n") if u]) + count = len(user_ids) - + for i, user_id in enumerate(user_ids): - if i < skip: continue + if i < skip: + continue if i % 1000 == 0: print("\n\n ------------------------------------------------") - print("\n ---> %s/%s (%s%%)" % (i, count, round(float(i)/count))) + print("\n ---> %s/%s (%s%%)" % (i, count, round(float(i) / count))) print("\n ------------------------------------------------\n") try: user = User.objects.get(pk=user_id) except User.DoesNotExist: print(" ***> %s has no account" % user_id) continue - us, created = UserSubscription.objects.get_or_create(user_id=user_id, feed_id=new_feed_id, defaults={ - 'needs_unread_recalc': True, - 'active': True, - 'is_trained': True - }) + us, created = UserSubscription.objects.get_or_create( + user_id=user_id, + feed_id=new_feed_id, + defaults={"needs_unread_recalc": True, "active": True, "is_trained": True}, + ) if not created: print(" ***> %s already subscribed" % user.username) try: @@ -639,7 +734,7 @@ class UserSubscription(models.Model): usf.add_missing_feeds() except UserSubscriptionFolders.DoesNotExist: print(" ***> %s has no USF" % user.username) - + # Move classifiers if old_feed_id: classifier_count = 0 @@ -654,26 +749,30 @@ class UserSubscription(models.Model): continue if classifier_count: print(" Moved %s classifiers for %s" % (classifier_count, user.username)) - + def trim_read_stories(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + read_stories_key = "RS:%s:%s" % (self.user_id, self.feed_id) stale_story_hashes = r.sdiff(read_stories_key, "F:%s" % self.feed_id) if not stale_story_hashes: return - - logging.user(self.user, "~FBTrimming ~FR%s~FB read stories (~SB%s~SN)..." % (len(stale_story_hashes), self.feed_id)) + + logging.user( + self.user, + "~FBTrimming ~FR%s~FB read stories (~SB%s~SN)..." % (len(stale_story_hashes), self.feed_id), + ) r.srem(read_stories_key, *stale_story_hashes) r.srem("RS:%s" % self.feed_id, *stale_story_hashes) - + @classmethod def trim_user_read_stories(self, user_id): user = User.objects.get(pk=user_id) r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - subs = UserSubscription.objects.filter(user_id=user_id).only('feed') - if not subs: return + subs = UserSubscription.objects.filter(user_id=user_id).only("feed") + if not subs: + return key = "RS:%s" % user_id feeds = [f.feed_id for f in subs] @@ -687,10 +786,10 @@ class UserSubscription(models.Model): # r.expire("%s:backup" % key, 60*60*24) r.sunionstore(key, *["%s:%s" % (key, f) for f in feeds]) new_rs = r.smembers(key) - + missing_rs = [] missing_count = 0 - feed_re = re.compile(r'(\d+):.*?') + feed_re = re.compile(r"(\d+):.*?") for i, rs in enumerate(old_rs): if i and i % 1000 == 0: if missing_rs: @@ -704,47 +803,56 @@ class UserSubscription(models.Model): rs_feed_id = found.groups()[0] if int(rs_feed_id) not in feeds: missing_rs.append(rs) - + if missing_rs: r.sadd(key, *missing_rs) - missing_count += len(missing_rs) + missing_count += len(missing_rs) new_count = len(new_rs) new_total = new_count + missing_count - logging.user(user, "~FBTrimming ~FR%s~FB/%s (~SB%s sub'ed ~SN+ ~SB%s unsub'ed~SN saved)" % - (old_count - new_total, old_count, new_count, missing_count)) - - + logging.user( + user, + "~FBTrimming ~FR%s~FB/%s (~SB%s sub'ed ~SN+ ~SB%s unsub'ed~SN saved)" + % (old_count - new_total, old_count, new_count, missing_count), + ) + def mark_feed_read(self, cutoff_date=None): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + recount = True # Use the latest story to get last read time. if cutoff_date: cutoff_date = cutoff_date + datetime.timedelta(seconds=1) else: now = datetime.datetime.now() - latest_story = MStory.objects(story_feed_id=self.feed.pk, - story_date__lte=now)\ - .order_by('-story_date').only('story_date').limit(1) + latest_story = ( + MStory.objects(story_feed_id=self.feed.pk, story_date__lte=now) + .order_by("-story_date") + .only("story_date") + .limit(1) + ) if latest_story and len(latest_story) >= 1: - cutoff_date = (latest_story[0]['story_date'] - + datetime.timedelta(seconds=1)) + cutoff_date = latest_story[0]["story_date"] + datetime.timedelta(seconds=1) else: cutoff_date = datetime.datetime.utcnow() recount = False - + if cutoff_date > self.mark_read_date or cutoff_date > self.oldest_unread_story_date: self.last_read_date = cutoff_date self.mark_read_date = cutoff_date self.oldest_unread_story_date = cutoff_date else: - logging.user(self.user, "Not marking %s as read: %s > %s/%s" % - (self, cutoff_date, self.mark_read_date, self.oldest_unread_story_date)) - + logging.user( + self.user, + "Not marking %s as read: %s > %s/%s" + % (self, cutoff_date, self.mark_read_date, self.oldest_unread_story_date), + ) + if not recount: self.unread_count_negative = 0 self.unread_count_positive = 0 @@ -753,58 +861,63 @@ class UserSubscription(models.Model): self.needs_unread_recalc = False else: self.needs_unread_recalc = True - + self.save() - + return True - + def mark_newer_stories_read(self, cutoff_date): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + cutoff_date = cutoff_date - datetime.timedelta(seconds=1) - story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk], - order="newest", read_filter="unread", - cutoff_date=cutoff_date) + story_hashes = UserSubscription.story_hashes( + self.user.pk, + feed_ids=[self.feed.pk], + order="newest", + read_filter="unread", + cutoff_date=cutoff_date, + ) data = self.mark_story_ids_as_read(story_hashes, aggregated=True) return data - - + def mark_story_ids_as_read(self, story_hashes, request=None, aggregated=False): data = dict(code=0, payload=story_hashes) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - + if not request: request = self.user - + if not self.needs_unread_recalc: self.needs_unread_recalc = True - self.save(update_fields=['needs_unread_recalc']) - + self.save(update_fields=["needs_unread_recalc"]) + if len(story_hashes) > 1: logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_hashes), self.feed)) else: logging.user(request, "~FYRead story (%s) in feed: %s" % (story_hashes, self.feed)) RUserStory.aggregate_mark_read(self.feed_id) - - for story_hash in set(story_hashes): + + for story_hash in set(story_hashes): # logging.user(request, "~FYRead story: %s" % (story_hash)) RUserStory.mark_read(self.user_id, self.feed_id, story_hash, aggregated=aggregated) - r.publish(self.user.username, 'story:read:%s' % story_hash) + r.publish(self.user.username, "story:read:%s" % story_hash) if self.user.profile.is_archive: RUserUnreadStory.mark_read(self.user_id, story_hash) - r.publish(self.user.username, 'feed:%s' % self.feed_id) - + r.publish(self.user.username, "feed:%s" % self.feed_id) + self.last_read_date = datetime.datetime.now() - self.save(update_fields=['last_read_date']) - + self.save(update_fields=["last_read_date"]) + return data - + def invert_read_stories_after_unread_story(self, story, request=None): data = dict(code=1) unread_cutoff = self.user.profile.unread_cutoff @@ -820,33 +933,32 @@ class UserSubscription(models.Model): story_hash=story.story_hash, story_date=story.story_date, ) - data['story_hashes'] = [story.story_hash] + data["story_hashes"] = [story.story_hash] return data - + # Story is outside the mark as read range, so invert all stories before. - newer_stories = MStory.objects(story_feed_id=story.story_feed_id, - story_date__gte=story.story_date, - story_date__lte=unread_cutoff - ).only('story_hash') + newer_stories = MStory.objects( + story_feed_id=story.story_feed_id, story_date__gte=story.story_date, story_date__lte=unread_cutoff + ).only("story_hash") newer_stories = [s.story_hash for s in newer_stories] self.mark_read_date = story.story_date - datetime.timedelta(minutes=1) self.needs_unread_recalc = True self.save() - + # Mark stories as read only after the mark_read_date has been moved, otherwise # these would be ignored. data = self.mark_story_ids_as_read(newer_stories, request=request, aggregated=True) - + return data - + def calculate_feed_scores(self, silent=False, stories=None, force=False): # now = datetime.datetime.strptime("2009-07-06 22:30:03", "%Y-%m-%d %H:%M:%S") now = datetime.datetime.now() oldest_unread_story_date = now - + if self.user.profile.last_seen_on < self.user.profile.unread_cutoff and not force: if not silent and settings.DEBUG: - logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed)) + logging.info(" ---> [%s] SKIPPING Computing scores: %s (1 week+)" % (self.user, self.feed)) return self ong = self.unread_count_negative ont = self.unread_count_neutral @@ -855,32 +967,35 @@ class UserSubscription(models.Model): ucu = self.unread_count_updated onur = self.needs_unread_recalc oit = self.is_trained - + # if not self.feed.fetched_once: # if not silent: # logging.info(' ---> [%s] NOT Computing scores: %s' % (self.user, self.feed)) # self.needs_unread_recalc = False # self.save() # return - + feed_scores = dict(negative=0, neutral=0, positive=0) - + # Two weeks in age. If mark_read_date is older, mark old stories as read. date_delta = self.user.profile.unread_cutoff if date_delta < self.mark_read_date: date_delta = self.mark_read_date else: self.mark_read_date = date_delta - + if self.is_trained: if not stories: - stories = cache.get('S:v3:%s' % self.feed_id) - - unread_story_hashes = self.story_hashes(user_id=self.user_id, feed_ids=[self.feed_id], - usersubs=[self], - read_filter='unread', - cutoff_date=self.user.profile.unread_cutoff) - + stories = cache.get("S:v3:%s" % self.feed_id) + + unread_story_hashes = self.story_hashes( + user_id=self.user_id, + feed_ids=[self.feed_id], + usersubs=[self], + read_filter="unread", + cutoff_date=self.user.profile.unread_cutoff, + ) + if not stories: try: stories_db = MStory.objects(story_hash__in=unread_story_hashes) @@ -891,112 +1006,144 @@ class UserSubscription(models.Model): except pymongo.errors.OperationFailure as e: stories_db = MStory.objects(story_hash__in=unread_story_hashes)[:25] stories = Feed.format_stories(stories_db, self.feed_id) - + unread_stories = [] for story in stories: # if story['story_date'] < date_delta: # continue - if story['story_hash'] in unread_story_hashes: + if story["story_hash"] in unread_story_hashes: unread_stories.append(story) - if story['story_date'] < oldest_unread_story_date: - oldest_unread_story_date = story['story_date'] + if story["story_date"] < oldest_unread_story_date: + oldest_unread_story_date = story["story_date"] # if not silent: # logging.info(' ---> [%s] Format stories: %s' % (self.user, datetime.datetime.now() - now)) - - classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0)) + + classifier_feeds = list( + MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0) + ) classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) - classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) - - if (not len(classifier_feeds) and - not len(classifier_authors) and - not len(classifier_titles) and - not len(classifier_tags)): + classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) + + if ( + not len(classifier_feeds) + and not len(classifier_authors) + and not len(classifier_titles) + and not len(classifier_tags) + ): self.is_trained = False - + # if not silent: # logging.info(' ---> [%s] Classifiers: %s (%s)' % (self.user, datetime.datetime.now() - now, classifier_feeds.count() + classifier_authors.count() + classifier_tags.count() + classifier_titles.count())) - + scores = { - 'feed': apply_classifier_feeds(classifier_feeds, self.feed), + "feed": apply_classifier_feeds(classifier_feeds, self.feed), } - + for story in unread_stories: - scores.update({ - 'author' : apply_classifier_authors(classifier_authors, story), - 'tags' : apply_classifier_tags(classifier_tags, story), - 'title' : apply_classifier_titles(classifier_titles, story), - }) - - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) + scores.update( + { + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), + } + ) + + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) if max_score > 0: - feed_scores['positive'] += 1 + feed_scores["positive"] += 1 elif min_score < 0: - feed_scores['negative'] += 1 + feed_scores["negative"] += 1 else: - if scores['feed'] > 0: - feed_scores['positive'] += 1 - elif scores['feed'] < 0: - feed_scores['negative'] += 1 + if scores["feed"] > 0: + feed_scores["positive"] += 1 + elif scores["feed"] < 0: + feed_scores["negative"] += 1 else: - feed_scores['neutral'] += 1 + feed_scores["neutral"] += 1 else: - unread_story_hashes = self.story_hashes(user_id=self.user_id, feed_ids=[self.feed_id], - usersubs=[self], - read_filter='unread', - include_timestamps=True, - cutoff_date=date_delta) + unread_story_hashes = self.story_hashes( + user_id=self.user_id, + feed_ids=[self.feed_id], + usersubs=[self], + read_filter="unread", + include_timestamps=True, + cutoff_date=date_delta, + ) - feed_scores['neutral'] = len(unread_story_hashes) - if feed_scores['neutral']: + feed_scores["neutral"] = len(unread_story_hashes) + if feed_scores["neutral"]: oldest_unread_story_date = datetime.datetime.fromtimestamp(unread_story_hashes[-1][1]) - - if not silent or settings.DEBUG: - logging.user(self.user, '~FBUnread count (~SB%s~SN%s): ~SN(~FC%s~FB/~FC%s~FB/~FC%s~FB) ~SBto~SN (~FC%s~FB/~FC%s~FB/~FC%s~FB)' % (self.feed_id, '/~FMtrained~FB' if self.is_trained else '', ong, ont, ops, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) - self.unread_count_positive = feed_scores['positive'] - self.unread_count_neutral = feed_scores['neutral'] - self.unread_count_negative = feed_scores['negative'] + if not silent or settings.DEBUG: + logging.user( + self.user, + "~FBUnread count (~SB%s~SN%s): ~SN(~FC%s~FB/~FC%s~FB/~FC%s~FB) ~SBto~SN (~FC%s~FB/~FC%s~FB/~FC%s~FB)" + % ( + self.feed_id, + "/~FMtrained~FB" if self.is_trained else "", + ong, + ont, + ops, + feed_scores["negative"], + feed_scores["neutral"], + feed_scores["positive"], + ), + ) + + self.unread_count_positive = feed_scores["positive"] + self.unread_count_neutral = feed_scores["neutral"] + self.unread_count_negative = feed_scores["negative"] self.unread_count_updated = datetime.datetime.now() self.oldest_unread_story_date = oldest_unread_story_date self.needs_unread_recalc = False - + update_fields = [] - if self.unread_count_positive != ops: update_fields.append('unread_count_positive') - if self.unread_count_neutral != ont: update_fields.append('unread_count_neutral') - if self.unread_count_negative != ong: update_fields.append('unread_count_negative') - if self.unread_count_updated != ucu: update_fields.append('unread_count_updated') - if self.oldest_unread_story_date != oousd: update_fields.append('oldest_unread_story_date') - if self.needs_unread_recalc != onur: update_fields.append('needs_unread_recalc') - if self.is_trained != oit: update_fields.append('is_trained') + if self.unread_count_positive != ops: + update_fields.append("unread_count_positive") + if self.unread_count_neutral != ont: + update_fields.append("unread_count_neutral") + if self.unread_count_negative != ong: + update_fields.append("unread_count_negative") + if self.unread_count_updated != ucu: + update_fields.append("unread_count_updated") + if self.oldest_unread_story_date != oousd: + update_fields.append("oldest_unread_story_date") + if self.needs_unread_recalc != onur: + update_fields.append("needs_unread_recalc") + if self.is_trained != oit: + update_fields.append("is_trained") if len(update_fields): self.save(update_fields=update_fields) - - if (self.unread_count_positive == 0 and - self.unread_count_neutral == 0): + + if self.unread_count_positive == 0 and self.unread_count_neutral == 0: self.mark_feed_read() - + if not silent: - logging.user(self.user, '~FC~SNComputing scores: %s (~SB%s~SN/~SB%s~SN/~SB%s~SN)' % (self.feed, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) - + logging.user( + self.user, + "~FC~SNComputing scores: %s (~SB%s~SN/~SB%s~SN/~SB%s~SN)" + % (self.feed, feed_scores["negative"], feed_scores["neutral"], feed_scores["positive"]), + ) + self.trim_read_stories() - + return self - + @staticmethod def score_story(scores): - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) if max_score > 0: return 1 elif min_score < 0: return -1 - return scores['feed'] - + return scores["feed"] + def switch_feed(self, new_feed, old_feed): # Rewrite feed in subscription folders try: @@ -1004,14 +1151,12 @@ class UserSubscription(models.Model): except Exception as e: logging.info(" *** ---> UserSubscriptionFolders error: %s" % e) return - + logging.info(" ===> %s " % self.user) # Switch read stories - RUserStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, - new_feed_id=new_feed.pk) - RUserUnreadStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, - new_feed_id=new_feed.pk) + RUserStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, new_feed_id=new_feed.pk) + RUserUnreadStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, new_feed_id=new_feed.pk) def switch_feed_for_classifier(model): duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user_id) @@ -1027,7 +1172,7 @@ class UserSubscription(models.Model): except (IntegrityError, OperationError): logging.info(" !!!!> %s already exists" % duplicate) duplicate.delete() - + switch_feed_for_classifier(MClassifierTitle) switch_feed_for_classifier(MClassifierAuthor) switch_feed_for_classifier(MClassifierFeed) @@ -1046,7 +1191,7 @@ class UserSubscription(models.Model): logging.info(" !!!!> %s already subscribed" % self.user) self.delete() return - + @classmethod def collect_orphan_feeds(cls, user): us = cls.objects.filter(user=user) @@ -1056,7 +1201,7 @@ class UserSubscription(models.Model): return us_feed_ids = set([sub.feed_id for sub in us]) folders = json.decode(usf.folders) - + def collect_ids(folders, found_ids): for item in folders: # print ' --> %s' % item @@ -1071,10 +1216,14 @@ class UserSubscription(models.Model): found_ids.update(collect_ids(item, found_ids)) # print ' --> Returning: %s' % found_ids return found_ids + found_ids = collect_ids(folders, set()) diff = len(us_feed_ids) - len(found_ids) if diff > 0: - logging.info(" ---> Collecting orphans on %s. %s feeds with %s orphans" % (user.username, len(us_feed_ids), diff)) + logging.info( + " ---> Collecting orphans on %s. %s feeds with %s orphans" + % (user.username, len(us_feed_ids), diff) + ) orphan_ids = us_feed_ids - found_ids folders.extend(list(orphan_ids)) usf.folders = json.encode(folders) @@ -1092,7 +1241,7 @@ class UserSubscription(models.Model): needed_recalc += 1 logging.debug(f" ---> Relcaculated {needed_recalc} of {total} subscriptions for user_id: {user_id}") - + @classmethod def verify_feeds_scheduled(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -1102,13 +1251,13 @@ class UserSubscription(models.Model): p = r.pipeline() for feed_id in feed_ids: - p.zscore('scheduled_updates', feed_id) - p.zscore('error_feeds', feed_id) + p.zscore("scheduled_updates", feed_id) + p.zscore("error_feeds", feed_id) results = p.execute() - + p = r.pipeline() for feed_id in feed_ids: - p.zscore('queued_feeds', feed_id) + p.zscore("queued_feeds", feed_id) try: results_queued = p.execute() except: @@ -1116,13 +1265,14 @@ class UserSubscription(models.Model): safety_net = [] for f, feed_id in enumerate(feed_ids): - scheduled_updates = results[f*2] - error_feeds = results[f*2+1] + scheduled_updates = results[f * 2] + error_feeds = results[f * 2 + 1] queued_feeds = results_queued[f] if not scheduled_updates and not queued_feeds and not error_feeds: safety_net.append(feed_id) - if not safety_net: return + if not safety_net: + return logging.user(user, "~FBFound ~FR%s unscheduled feeds~FB, scheduling immediately..." % len(safety_net)) for feed_id in safety_net: @@ -1132,12 +1282,18 @@ class UserSubscription(models.Model): @classmethod def count_subscribers_to_other_subscriptions(cls, feed_id): # feeds = defaultdict(int) - subscribing_users = cls.objects.filter(feed=feed_id).values('user', 'feed_opens').order_by('-feed_opens')[:25] + subscribing_users = ( + cls.objects.filter(feed=feed_id).values("user", "feed_opens").order_by("-feed_opens")[:25] + ) print("Got subscribing users") - subscribing_user_ids = [sub['user'] for sub in subscribing_users] + subscribing_user_ids = [sub["user"] for sub in subscribing_users] print("Got subscribing user ids") - cofeeds = cls.objects.filter(user__in=subscribing_user_ids).values('feed').annotate( - user_count=Count('user')).order_by('-user_count')[:200] + cofeeds = ( + cls.objects.filter(user__in=subscribing_user_ids) + .values("feed") + .annotate(user_count=Count("user")) + .order_by("-user_count")[:200] + ) print("Got cofeeds: %s" % len(cofeeds)) # feed_subscribers = Feed.objects.filter(pk__in=[f['feed'] for f in cofeeds]).values('pk', 'num_subscribers') # max_local_subscribers = float(max([f['user_count'] for f in cofeeds])) @@ -1155,24 +1311,25 @@ class UserSubscription(models.Model): # pprint([(Feed.get_by_id(o[0]), o[1], o[2], o[3], o[4]) for o in orderedpctfeeds]) users_by_feeds = {} - for feed in [f['feed'] for f in cofeeds]: - users_by_feeds[feed] = [u['user'] for u in cls.objects.filter(feed=feed, user__in=subscribing_user_ids).values('user')] + for feed in [f["feed"] for f in cofeeds]: + users_by_feeds[feed] = [ + u["user"] for u in cls.objects.filter(feed=feed, user__in=subscribing_user_ids).values("user") + ] print("Got users_by_feeds") - + table = tfidf() for feed in list(users_by_feeds.keys()): table.addDocument(feed, users_by_feeds[feed]) print("Got table") - + sorted_table = sorted(table.similarities(subscribing_user_ids), key=itemgetter(1), reverse=True)[:8] pprint([(Feed.get_by_id(o[0]), o[1]) for o in sorted_table]) - + return table # return cofeeds class RUserStory: - @classmethod def mark_story_hashes_read(cls, user_id, story_hashes, username=None, r=None, s=None): if not r: @@ -1182,32 +1339,40 @@ class RUserStory: ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) if not username: username = User.objects.get(pk=user_id).username - + p = r.pipeline() feed_ids = set() friend_ids = set() - + if not isinstance(story_hashes, list): story_hashes = [story_hashes] - + single_story = len(story_hashes) == 1 - + for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) feed_ids.add(feed_id) - + if single_story: cls.aggregate_mark_read(feed_id) - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (user_id) share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)] friend_ids.update(friends_with_shares) - cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, username=username, ps=ps) - + cls.mark_read( + user_id, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + r=p, + username=username, + ps=ps, + ) + p.execute() - + return list(feed_ids), list(friend_ids) @classmethod @@ -1218,7 +1383,7 @@ class RUserStory: s = redis.Redis(connection_pool=settings.REDIS_POOL) if not ps: ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - + friend_ids = set() feed_id, _ = MStory.split_story_hash(story_hash) @@ -1227,52 +1392,69 @@ class RUserStory: share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)] friend_ids.update(friends_with_shares) - cls.mark_unread(user.pk, feed_id, story_hash, social_user_ids=friends_with_shares, r=r, - username=user.username, ps=ps) - + cls.mark_unread( + user.pk, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + r=r, + username=user.username, + ps=ps, + ) + return feed_id, list(friend_ids) - + @classmethod def aggregate_mark_read(cls, feed_id): if not feed_id: logging.debug(" ***> ~BR~FWNo feed_id on aggregate mark read. Ignoring.") return - + r = redis.Redis(connection_pool=settings.REDIS_FEED_READ_POOL) - week_of_year = datetime.datetime.now().strftime('%Y-%U') + week_of_year = datetime.datetime.now().strftime("%Y-%U") feed_read_key = "fR:%s:%s" % (feed_id, week_of_year) - + r.incr(feed_read_key) # This settings.DAYS_OF_STORY_HASHES doesn't need to consider potential pro subscribers # because the feed_read_key is really only used for statistics and not unreads - r.expire(feed_read_key, 2*settings.DAYS_OF_STORY_HASHES*24*60*60) - + r.expire(feed_read_key, 2 * settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) + @classmethod - def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None, - aggregated=False, r=None, username=None, ps=None): + def mark_read( + cls, + user_id, + story_feed_id, + story_hash, + social_user_ids=None, + aggregated=False, + r=None, + username=None, + ps=None, + ): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id) - if not story_hash: return - + if not story_hash: + return + def redis_commands(key): r.sadd(key, story_hash) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) - all_read_stories_key = 'RS:%s' % (user_id) + all_read_stories_key = "RS:%s" % (user_id) redis_commands(all_read_stories_key) - - read_story_key = 'RS:%s:%s' % (user_id, story_feed_id) + + read_story_key = "RS:%s:%s" % (user_id, story_feed_id) redis_commands(read_story_key) - + if ps and username: - ps.publish(username, 'story:read:%s' % story_hash) - + ps.publish(username, "story:read:%s" % story_hash) + if social_user_ids: for social_user_id in social_user_ids: - social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id) + social_read_story_key = "RS:%s:B:%s" % (user_id, social_user_id) redis_commands(social_read_story_key) feed_id, _ = MStory.split_story_hash(story_hash) @@ -1282,13 +1464,13 @@ class RUserStory: # unread_ranked_stories_key = f"zU:{user_id}:{story_feed_id}" # r.srem(unread_stories_key, story_hash) # r.zrem(unread_ranked_stories_key, story_hash) - + if not aggregated: - key = 'lRS:%s' % user_id + key = "lRS:%s" % user_id r.lpush(key, story_hash) r.ltrim(key, 0, 1000) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) - + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) + @staticmethod def story_can_be_marked_unread_by_user(story, user): message = None @@ -1297,44 +1479,51 @@ class RUserStory: # message = "Story is more than %s days old, change your days of unreads under Preferences." % ( # user.profile.days_of_unread) if user.profile.is_premium: - message = "Story is more than %s days old. Premium Archive accounts can mark any story as unread." % ( - settings.DAYS_OF_UNREAD) + message = ( + "Story is more than %s days old. Premium Archive accounts can mark any story as unread." + % (settings.DAYS_OF_UNREAD) + ) elif story.story_date > user.profile.unread_cutoff_premium: - message = "Story is older than %s days. Premium has %s days, and Premium Archive can mark anything unread." % ( - settings.DAYS_OF_UNREAD_FREE, settings.DAYS_OF_UNREAD) + message = ( + "Story is older than %s days. Premium has %s days, and Premium Archive can mark anything unread." + % (settings.DAYS_OF_UNREAD_FREE, settings.DAYS_OF_UNREAD) + ) else: - message = "Story is more than %s days old, only Premium Archive can mark older stories unread." % ( - settings.DAYS_OF_UNREAD_FREE) + message = ( + "Story is more than %s days old, only Premium Archive can mark older stories unread." + % (settings.DAYS_OF_UNREAD_FREE) + ) return message - + @staticmethod def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None, username=None, ps=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id) - - if not story_hash: return - + + if not story_hash: + return + def redis_commands(key): r.srem(key, story_hash) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) - all_read_stories_key = 'RS:%s' % (user_id) + all_read_stories_key = "RS:%s" % (user_id) redis_commands(all_read_stories_key) - - read_story_key = 'RS:%s:%s' % (user_id, story_feed_id) + + read_story_key = "RS:%s:%s" % (user_id, story_feed_id) redis_commands(read_story_key) - - read_stories_list_key = 'lRS:%s' % user_id + + read_stories_list_key = "lRS:%s" % user_id r.lrem(read_stories_list_key, 1, story_hash) - + if ps and username: - ps.publish(username, 'story:unread:%s' % story_hash) - + ps.publish(username, "story:unread:%s" % story_hash) + if social_user_ids: for social_user_id in social_user_ids: - social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id) + social_read_story_key = "RS:%s:B:%s" % (user_id, social_user_id) redis_commands(social_read_story_key) @staticmethod @@ -1343,51 +1532,52 @@ class RUserStory: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) story_hashes = r.smembers("RS:%s:%s" % (user_id, feed_id)) return story_hashes - + @staticmethod def get_read_stories(user_id, offset=0, limit=12, order="newest"): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) key = "lRS:%s" % user_id - + if order == "oldest": count = r.llen(key) - if offset >= count: return [] - offset = max(0, count - (offset+limit)) - story_hashes = r.lrange(key, offset, offset+limit) + if offset >= count: + return [] + offset = max(0, count - (offset + limit)) + story_hashes = r.lrange(key, offset, offset + limit) elif order == "newest": - story_hashes = r.lrange(key, offset, offset+limit) - + story_hashes = r.lrange(key, offset, offset + limit) + return story_hashes - + @classmethod def switch_feed(cls, user_id, old_feed_id, new_feed_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + story_hashes = UserSubscription.story_hashes(user_id, feed_ids=[old_feed_id]) # story_hashes = cls.get_stories(user_id, old_feed_id, r=r) - + for story_hash in story_hashes: _, hash_story = MStory.split_story_hash(story_hash) new_story_hash = "%s:%s" % (new_feed_id, hash_story) read_feed_key = "RS:%s:%s" % (user_id, new_feed_id) p.sadd(read_feed_key, new_story_hash) - p.expire(read_feed_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60) + p.expire(read_feed_key, Feed.days_of_story_hashes_for_feed(new_feed_id) * 24 * 60 * 60) read_user_key = "RS:%s" % (user_id) p.sadd(read_user_key, new_story_hash) - p.expire(read_user_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60) - + p.expire(read_user_key, Feed.days_of_story_hashes_for_feed(new_feed_id) * 24 * 60 * 60) + p.execute() - + if len(story_hashes) > 0: logging.info(" ---> %s read stories" % len(story_hashes)) - + @classmethod def switch_hash(cls, feed, old_hash, new_hash): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + usersubs = UserSubscription.objects.filter(feed_id=feed.pk, last_read_date__gte=feed.unread_cutoff) logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs)) for sub in usersubs: @@ -1395,14 +1585,14 @@ class RUserStory: read = r.sismember(rs_key, old_hash) if read: p.sadd(rs_key, new_hash) - p.expire(rs_key, feed.days_of_story_hashes*24*60*60) - + p.expire(rs_key, feed.days_of_story_hashes * 24 * 60 * 60) + read_user_key = "RS:%s" % sub.user.pk p.sadd(read_user_key, new_hash) - p.expire(read_user_key, feed.days_of_story_hashes*24*60*60) - + p.expire(read_user_key, feed.days_of_story_hashes * 24 * 60 * 60) + p.execute() - + @classmethod def read_story_count(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) @@ -1410,22 +1600,27 @@ class RUserStory: count = r.scard(key) return count + class UserSubscriptionFolders(models.Model): """ A JSON list of folders and feeds for while a user has subscribed. The list is a recursive descent of feeds and folders in folders. Used to layout the feeds and folders in the Reader's feed navigation pane. """ + user = models.OneToOneField(User, on_delete=models.CASCADE) folders = models.TextField(default="[]") - + def __str__(self): - return "[%s]: %s" % (self.user, len(self.folders),) - + return "[%s]: %s" % ( + self.user, + len(self.folders), + ) + class Meta: verbose_name_plural = "folders" verbose_name = "folder" - + @classmethod def compact_for_user(cls, user_id): user = User.objects.get(pk=user_id) @@ -1433,12 +1628,12 @@ class UserSubscriptionFolders(models.Model): usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return - + usf.compact() - + def compact(self): folders = json.decode(self.folders) - + def _compact(folder): new_folder = [] for item in folder: @@ -1449,7 +1644,9 @@ class UserSubscriptionFolders(models.Model): # Check every existing folder at that level to see if it already exists for ef, existing_folder in enumerate(new_folder): if type(existing_folder) == dict and list(existing_folder.keys())[0] == f_k: - existing_folder_feed_ids = [f for f in list(existing_folder.values())[0] if type(f) == int] + existing_folder_feed_ids = [ + f for f in list(existing_folder.values())[0] if type(f) == int + ] merged = [] for merge_val in existing_folder_feed_ids: merged.append(merge_val) @@ -1460,19 +1657,23 @@ class UserSubscriptionFolders(models.Model): else: merged.append(merge_val) if f_v != existing_folder_feed_ids: - logging.info(f" ---> ~FRFound repeat folder: {f_k} \n\t" - f"~FBExisting: {f_v}\n\t" - f"~FCMerging: {list(existing_folder.values())[0]}\n\t" - f"~FYBecomes: {merged}") + logging.info( + f" ---> ~FRFound repeat folder: {f_k} \n\t" + f"~FBExisting: {f_v}\n\t" + f"~FCMerging: {list(existing_folder.values())[0]}\n\t" + f"~FYBecomes: {merged}" + ) new_folder[ef] = {f_k: _compact(merged)} else: - logging.info(f" ---> ~FRFound repeat folder ~FY{f_k}~FR, no difference in feeds") + logging.info( + f" ---> ~FRFound repeat folder ~FY{f_k}~FR, no difference in feeds" + ) break else: # If no match, then finally we can add the folder new_folder.append({f_k: _compact(f_v)}) return new_folder - + new_folders = _compact(folders) compact_msg = " ---> Compacting from %s to %s" % (folders, new_folders) new_folders = json.encode(new_folders) @@ -1481,7 +1682,7 @@ class UserSubscriptionFolders(models.Model): logging.info(" ---> Compacting from %s bytes to %s bytes" % (len(self.folders), len(new_folders))) self.folders = new_folders self.save() - + def add_folder(self, parent_folder, folder): if self.folders: user_sub_folders = json.decode(self.folders) @@ -1491,9 +1692,10 @@ class UserSubscriptionFolders(models.Model): user_sub_folders = add_object_to_folder(obj, parent_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + def arranged_folders(self): user_sub_folders = json.decode(self.folders) + def _arrange_folder(folder): folder_feeds = [] folder_folders = [] @@ -1507,22 +1709,20 @@ class UserSubscriptionFolders(models.Model): arranged_folder = folder_feeds + folder_folders return arranged_folder - + return _arrange_folder(user_sub_folders) - + def flatten_folders(self, feeds=None, inactive_feeds=None): folders = json.decode(self.folders) flat_folders = {" ": []} if feeds and not inactive_feeds: inactive_feeds = [] - + def _flatten_folders(items, parent_folder="", depth=0): for item in items: - if (isinstance(item, int) and - (not feeds or - (item in feeds or item in inactive_feeds))): + if isinstance(item, int) and (not feeds or (item in feeds or item in inactive_feeds)): if not parent_folder: - parent_folder = ' ' + parent_folder = " " if parent_folder in flat_folders: flat_folders[parent_folder].append(item) else: @@ -1531,42 +1731,49 @@ class UserSubscriptionFolders(models.Model): for folder_name in item: folder = item[folder_name] flat_folder_name = "%s%s%s" % ( - parent_folder if parent_folder and parent_folder != ' ' else "", - " - " if parent_folder and parent_folder != ' ' else "", - folder_name + parent_folder if parent_folder and parent_folder != " " else "", + " - " if parent_folder and parent_folder != " " else "", + folder_name, ) flat_folders[flat_folder_name] = [] - _flatten_folders(folder, flat_folder_name, depth+1) - + _flatten_folders(folder, flat_folder_name, depth + 1) + _flatten_folders(folders) - + return flat_folders def delete_feed(self, feed_id, in_folder, commit_delete=True): feed_id = int(feed_id) - def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False): + + def _find_feed_in_folders(old_folders, folder_name="", multiples_found=False, deleted=False): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): - if (folder == feed_id and in_folder is not None and ( - (in_folder not in folder_name) or - (in_folder in folder_name and deleted))): + if ( + folder == feed_id + and in_folder is not None + and ((in_folder not in folder_name) or (in_folder in folder_name and deleted)) + ): multiples_found = True - logging.user(self.user, "~FB~SBDeleting feed, and a multiple has been found in '%s' / '%s' %s" % (folder_name, in_folder, '(deleted)' if deleted else '')) - if (folder == feed_id and - (in_folder is None or in_folder in folder_name) and - not deleted): - logging.user(self.user, "~FBDelete feed: %s'th item: %s folders/feeds" % ( - k, len(old_folders) - )) + logging.user( + self.user, + "~FB~SBDeleting feed, and a multiple has been found in '%s' / '%s' %s" + % (folder_name, in_folder, "(deleted)" if deleted else ""), + ) + if folder == feed_id and (in_folder is None or in_folder in folder_name) and not deleted: + logging.user( + self.user, "~FBDelete feed: %s'th item: %s folders/feeds" % (k, len(old_folders)) + ) deleted = True else: new_folders.append(folder) elif isinstance(folder, dict): for f_k, f_v in list(folder.items()): - nf, multiples_found, deleted = _find_feed_in_folders(f_v, f_k, multiples_found, deleted) + nf, multiples_found, deleted = _find_feed_in_folders( + f_v, f_k, multiples_found, deleted + ) new_folders.append({f_k: nf}) - + return new_folders, multiples_found, deleted user_sub_folders = self.arranged_folders() @@ -1582,8 +1789,7 @@ class UserSubscriptionFolders(models.Model): duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: try: - user_sub = UserSubscription.objects.get(user=self.user, - feed=duplicate_feed[0].feed) + user_sub = UserSubscription.objects.get(user=self.user, feed=duplicate_feed[0].feed) except (Feed.DoesNotExist, UserSubscription.DoesNotExist): return if user_sub: @@ -1600,30 +1806,38 @@ class UserSubscriptionFolders(models.Model): elif isinstance(folder, dict): for f_k, f_v in list(folder.items()): if f_k == folder_to_delete and (in_folder in folder_name or in_folder is None): - logging.user(self.user, "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder)) + logging.user( + self.user, + "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder), + ) deleted_folder = folder else: - nf, feeds_to_delete, deleted_folder = _find_folder_in_folders(f_v, f_k, feeds_to_delete, deleted_folder) + nf, feeds_to_delete, deleted_folder = _find_folder_in_folders( + f_v, f_k, feeds_to_delete, deleted_folder + ) new_folders.append({f_k: nf}) - + return new_folders, feeds_to_delete, deleted_folder - + user_sub_folders = json.decode(self.folders) - user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders(user_sub_folders, '', feed_ids_in_folder) + user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders( + user_sub_folders, "", feed_ids_in_folder + ) self.folders = json.encode(user_sub_folders) self.save() if commit_delete: UserSubscription.objects.filter(user=self.user, feed__in=feeds_to_delete).delete() - + return deleted_folder def delete_feeds_by_folder(self, feeds_by_folder): - logging.user(self.user, "~FBDeleting ~FR~SB%s~SN feeds~FB: ~SB%s" % ( - len(feeds_by_folder), feeds_by_folder)) + logging.user( + self.user, "~FBDeleting ~FR~SB%s~SN feeds~FB: ~SB%s" % (len(feeds_by_folder), feeds_by_folder) + ) for feed_id, in_folder in feeds_by_folder: self.delete_feed(feed_id, in_folder) - + return self def rename_folder(self, folder_to_rename, new_folder_name, in_folder): @@ -1636,21 +1850,25 @@ class UserSubscriptionFolders(models.Model): for f_k, f_v in list(folder.items()): nf = _find_folder_in_folders(f_v, f_k) if f_k == folder_to_rename and in_folder in folder_name: - logging.user(self.user, "~FBRenaming folder '~SB%s~SN' in '%s' to: ~SB%s" % ( - f_k, folder_name, new_folder_name)) + logging.user( + self.user, + "~FBRenaming folder '~SB%s~SN' in '%s' to: ~SB%s" + % (f_k, folder_name, new_folder_name), + ) f_k = new_folder_name new_folders.append({f_k: nf}) - + return new_folders - + user_sub_folders = json.decode(self.folders) - user_sub_folders = _find_folder_in_folders(user_sub_folders, '') + user_sub_folders = _find_folder_in_folders(user_sub_folders, "") self.folders = json.encode(user_sub_folders) self.save() - + def move_feed_to_folders(self, feed_id, in_folders=None, to_folders=None): - logging.user(self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % ( - feed_id, in_folders, to_folders)) + logging.user( + self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % (feed_id, in_folders, to_folders) + ) user_sub_folders = json.decode(self.folders) for in_folder in in_folders: self.delete_feed(feed_id, in_folder, commit_delete=False) @@ -1659,46 +1877,49 @@ class UserSubscriptionFolders(models.Model): user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self def move_feed_to_folder(self, feed_id, in_folder=None, to_folder=None): - logging.user(self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % ( - feed_id, in_folder, to_folder)) + logging.user( + self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % (feed_id, in_folder, to_folder) + ) user_sub_folders = json.decode(self.folders) self.delete_feed(feed_id, in_folder, commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self def move_folder_to_folder(self, folder_name, in_folder=None, to_folder=None): - logging.user(self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % ( - folder_name, in_folder, to_folder)) + logging.user( + self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % (folder_name, in_folder, to_folder) + ) user_sub_folders = json.decode(self.folders) deleted_folder = self.delete_folder(folder_name, in_folder, [], commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(deleted_folder, to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self - + def move_feeds_by_folder_to_folder(self, feeds_by_folder, to_folder): - logging.user(self.user, "~FBMoving ~SB%s~SN feeds to folder: ~SB%s" % ( - len(feeds_by_folder), to_folder)) + logging.user( + self.user, "~FBMoving ~SB%s~SN feeds to folder: ~SB%s" % (len(feeds_by_folder), to_folder) + ) for feed_id, in_folder in feeds_by_folder: feed_id = int(feed_id) self.move_feed_to_folder(feed_id, in_folder, to_folder) - + return self - + def rewrite_feed(self, original_feed, duplicate_feed): def rewrite_folders(folders, original_feed, duplicate_feed): new_folders = [] - + for k, folder in enumerate(folders): if isinstance(folder, int): if folder == duplicate_feed.pk: @@ -1711,15 +1932,15 @@ class UserSubscriptionFolders(models.Model): new_folders.append({f_k: rewrite_folders(f_v, original_feed, duplicate_feed)}) return new_folders - + folders = json.decode(self.folders) folders = rewrite_folders(folders, original_feed, duplicate_feed) self.folders = json.encode(folders) self.save() - + def flat(self): folders = json.decode(self.folders) - + def _flat(folder, feeds=None): if not feeds: feeds = [] @@ -1732,10 +1953,10 @@ class UserSubscriptionFolders(models.Model): return feeds return _flat(folders) - + def feed_ids_under_folder_slug(self, slug): folders = json.decode(self.folders) - + def _feeds(folder, found=False, folder_title=None): feeds = [] local_found = False @@ -1756,16 +1977,16 @@ class UserSubscriptionFolders(models.Model): return feeds, folder_title return _feeds(folders) - + @classmethod def add_all_missing_feeds(cls): - usf = cls.objects.all().order_by('pk') + usf = cls.objects.all().order_by("pk") total = usf.count() - + for i, f in enumerate(usf): print("%s/%s: %s" % (i, total, f)) f.add_missing_feeds() - + @classmethod def add_missing_feeds_for_user(cls, user_id): user = User.objects.get(pk=user_id) @@ -1773,62 +1994,67 @@ class UserSubscriptionFolders(models.Model): usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return - + usf.add_missing_feeds() - + def add_missing_feeds(self): all_feeds = self.flat() - subs = [us.feed_id for us in - UserSubscription.objects.filter(user=self.user).only('feed')] - + subs = [us.feed_id for us in UserSubscription.objects.filter(user=self.user).only("feed")] + missing_subs = set(all_feeds) - set(subs) if missing_subs: - logging.debug(" ---> %s is missing %s subs. Adding %s..." % ( - self.user, len(missing_subs), missing_subs)) + logging.debug( + " ---> %s is missing %s subs. Adding %s..." % (self.user, len(missing_subs), missing_subs) + ) for feed_id in missing_subs: feed = Feed.get_by_id(feed_id) if feed: if feed_id != feed.pk: - logging.debug(" ---> %s doesn't match %s, rewriting to remove %s..." % ( - feed_id, feed.pk, feed_id)) + logging.debug( + " ---> %s doesn't match %s, rewriting to remove %s..." + % (feed_id, feed.pk, feed_id) + ) # Clear out duplicate sub in folders before subscribing to feed duplicate_feed = Feed.get_by_id(feed_id) duplicate_feed.pk = feed_id self.rewrite_feed(feed, duplicate_feed) - us, _ = UserSubscription.objects.get_or_create(user=self.user, feed=feed, defaults={ - 'needs_unread_recalc': True - }) + us, _ = UserSubscription.objects.get_or_create( + user=self.user, feed=feed, defaults={"needs_unread_recalc": True} + ) if not us.needs_unread_recalc: us.needs_unread_recalc = True us.save() elif feed_id and not feed: # No feed found for subscription, remove subscription - logging.debug(" ---> %s: No feed found, removing subscription: %s" % ( - self.user, feed_id)) + logging.debug(" ---> %s: No feed found, removing subscription: %s" % (self.user, feed_id)) self.delete_feed(feed_id, None, commit_delete=False) - missing_folder_feeds = set(subs) - set(all_feeds) if missing_folder_feeds: user_sub_folders = json.decode(self.folders) - logging.debug(" ---> %s is missing %s folder feeds. Adding %s..." % ( - self.user, len(missing_folder_feeds), missing_folder_feeds)) + logging.debug( + " ---> %s is missing %s folder feeds. Adding %s..." + % (self.user, len(missing_folder_feeds), missing_folder_feeds) + ) for feed_id in missing_folder_feeds: feed = Feed.get_by_id(feed_id) if feed and feed.pk == feed_id: user_sub_folders = add_object_to_folder(feed_id, "", user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + def auto_activate(self): - if self.user.profile.is_premium: return - + if self.user.profile.is_premium: + return + active_count = UserSubscription.objects.filter(user=self.user, active=True).count() - if active_count: return - + if active_count: + return + all_feeds = self.flat() - if not all_feeds: return - + if not all_feeds: + return + for feed in all_feeds[:64]: try: sub = UserSubscription.objects.get(user=self.user, feed=feed) @@ -1844,20 +2070,22 @@ class Feature(models.Model): """ Simple blog-like feature board shown to all users on the home page. """ + description = models.TextField(default="") date = models.DateTimeField(default=datetime.datetime.now) - + def __str__(self): return "[%s] %s" % (self.date, self.description[:50]) - + class Meta: ordering = ["-date"] + class RUserUnreadStory: - """Model to store manually unread stories that are older than a user's unread_cutoff + """Model to store manually unread stories that are older than a user's unread_cutoff (same as days_of_unread). This is built for Premium Archive purposes. - If a story is marked as unread but is within the unread_cutoff, no need to add a + If a story is marked as unread but is within the unread_cutoff, no need to add a UserUnreadStory instance as it will be automatically marked as read according to the user's days_of_unread preference. """ @@ -1884,7 +2112,7 @@ class RUserUnreadStory: story_hashes = [story_hashes] if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + pipeline = r.pipeline() for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) @@ -1895,7 +2123,7 @@ class RUserUnreadStory: pipeline.zrem(user_manual_unread_stories_key, story_hash) pipeline.zrem(user_manual_unread_stories_feed_key, story_hash) pipeline.execute() - + @classmethod def unreads(cls, user_id, story_hash): if not isinstance(story_hash, list): @@ -1920,16 +2148,15 @@ class RUserUnreadStory: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() story_hashes = cls.get_stories_and_dates(user_id, old_feed_id, r=r) - - for (story_hash, story_timestamp) in story_hashes: + + for story_hash, story_timestamp in story_hashes: _, hash_story = MStory.split_story_hash(story_hash) new_story_hash = "%s:%s" % (new_feed_id, hash_story) # read_feed_key = "RS:%s:%s" % (user_id, new_feed_id) # user_manual_unread_stories_feed_key = f"uU:{user_id}:{new_feed_id}" cls.mark_unread(user_id, new_story_hash, story_timestamp, r=p) - + p.execute() - + if len(story_hashes) > 0: logging.info(" ---> %s archived unread stories" % len(story_hashes)) - diff --git a/apps/reader/tasks.py b/apps/reader/tasks.py index 0294bdf8d..179500b6b 100644 --- a/apps/reader/tasks.py +++ b/apps/reader/tasks.py @@ -6,13 +6,14 @@ from django.conf import settings from apps.reader.models import UserSubscription from apps.social.models import MSocialSubscription -@app.task(name='freshen-homepage') + +@app.task(name="freshen-homepage") def FreshenHomepage(): day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) user = User.objects.get(username=settings.HOMEPAGE_USERNAME) user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.save() - + usersubs = UserSubscription.objects.filter(user=user) logging.debug(" ---> %s has %s feeds, freshening..." % (user.username, usersubs.count())) for sub in usersubs: @@ -20,7 +21,7 @@ def FreshenHomepage(): sub.needs_unread_recalc = True sub.save() sub.calculate_feed_scores(silent=True) - + socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) logging.debug(" ---> %s has %s socialsubs, freshening..." % (user.username, socialsubs.count())) for sub in socialsubs: @@ -29,12 +30,16 @@ def FreshenHomepage(): sub.save() sub.calculate_feed_scores(silent=True) -@app.task(name='clean-analytics', time_limit=720*10) + +@app.task(name="clean-analytics", time_limit=720 * 10) def CleanAnalytics(): - logging.debug(" ---> Cleaning analytics... %s feed fetches" % ( - settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(), - )) + logging.debug( + " ---> Cleaning analytics... %s feed fetches" + % (settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(),) + ) day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) - settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many({ - "date": {"$lt": day_ago}, - }) + settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many( + { + "date": {"$lt": day_ago}, + } + ) diff --git a/apps/reader/test_reader.py b/apps/reader/test_reader.py index 7bde8b38d..cd01cc84f 100644 --- a/apps/reader/test_reader.py +++ b/apps/reader/test_reader.py @@ -5,129 +5,193 @@ from django.urls import reverse from django.conf import settings from mongoengine.connection import connect, disconnect + class Test_Reader(TestCase): fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', - 'apps/rss_feeds/fixtures/rss_feeds.json', - 'subscriptions.json', #'stories.json', - 'apps/rss_feeds/fixtures/gawker1.json'] - + "apps/rss_feeds/fixtures/initial_data.json", + "apps/rss_feeds/fixtures/rss_feeds.json", + "subscriptions.json", #'stories.json', + "apps/rss_feeds/fixtures/gawker1.json", + ] + def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') + settings.MONGODB = connect("test_newsblur") self.client = Client() def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') - + settings.MONGODB.drop_database("test_newsblur") + def test_api_feeds(self): - self.client.login(username='conesus', password='test') - - response = self.client.get(reverse('load-feeds')) + self.client.login(username="conesus", password="test") + + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(len(content['feeds']), 10) - self.assertEqual(content['feeds']['1']['feed_title'], 'The NewsBlur Blog') - self.assertEqual(content['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - + self.assertEqual(len(content["feeds"]), 10) + self.assertEqual(content["feeds"]["1"]["feed_title"], "The NewsBlur Blog") + self.assertEqual( + content["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + def test_delete_feed(self): - self.client.login(username='conesus', password='test') - response = self.client.get(reverse('load-feeds')) + self.client.login(username="conesus", password="test") + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - + self.assertEqual( + feeds["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 1, 'in_folder': ''}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 1, "in_folder": ""}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8, 9]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 9, 'in_folder': 'Blogs'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 9, "in_folder": "Blogs"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 5, 'in_folder': 'Tech'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 5, "in_folder": "Tech"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 4, 'in_folder': 'Tech'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 4, "in_folder": "Tech"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual(feeds["folders"], [2, 3, 8, 9, {"Tech": [1, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}]) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 8, 'in_folder': ''}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 8, "in_folder": ""}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 9, {'Tech': [1, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) + self.assertEqual(feeds["folders"], [2, 3, 9, {"Tech": [1, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}]) def test_delete_feed__multiple_folders(self): - self.client.login(username='conesus', password='test') - - response = self.client.get(reverse('load-feeds')) - feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - - # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 1}) - response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) - feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}]) - - def test_move_feeds_by_folder(self): - self.client.login(username='Dejal', password='test') + self.client.login(username="conesus", password="test") - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379, 3530, 5994357]}, {"Videos": [3240, 5168]}]) - - # Move feeds by folder - response = self.client.post(reverse('move-feeds-by-folder-to-folder'), {'feeds_by_folder': '[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', 'to_folder': 'Brainiacs & Opinion'}) + self.assertEqual( + feeds["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + + # Delete feed + response = self.client.post(reverse("delete-feed"), {"feed_id": 1}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379]}, {"Videos": [3240, 5168]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8, 9]}] + ) + + def test_move_feeds_by_folder(self): + self.client.login(username="Dejal", password="test") + + response = self.client.get(reverse("load-feeds")) + feeds = json.decode(response.content) + self.assertEqual( + feeds["folders"], + [ + 5299728, + 644144, + 1187026, + {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, + { + "Science & Technology": [ + 731503, + 140145, + 1272495, + 76, + 161, + 39, + {"Hacker": [5985150, 3323431]}, + ] + }, + {"Humor": [212379, 3530, 5994357]}, + {"Videos": [3240, 5168]}, + ], + ) + + # Move feeds by folder + response = self.client.post( + reverse("move-feeds-by-folder-to-folder"), + { + "feeds_by_folder": '[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', + "to_folder": "Brainiacs & Opinion", + }, + ) + response = json.decode(response.content) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) + feeds = json.decode(response.content) + self.assertEqual( + feeds["folders"], + [ + 5299728, + 644144, + 1187026, + {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, + { + "Science & Technology": [ + 731503, + 140145, + 1272495, + 76, + 161, + 39, + {"Hacker": [5985150, 3323431]}, + ] + }, + {"Humor": [212379]}, + {"Videos": [3240, 5168]}, + ], + ) + def test_load_single_feed(self): # from django.conf import settings # from django.db import connection # settings.DEBUG = True # connection.queries = [] - self.client.login(username='conesus', password='test') - url = reverse('load-single-feed', kwargs=dict(feed_id=1)) + self.client.login(username="conesus", password="test") + url = reverse("load-single-feed", kwargs=dict(feed_id=1)) response = self.client.get(url) feed = json.decode(response.content) - self.assertEqual(len(feed['feed_tags']), 0) - self.assertEqual(len(feed['classifiers']['tags']), 0) + self.assertEqual(len(feed["feed_tags"]), 0) + self.assertEqual(len(feed["classifiers"]["tags"]), 0) # self.assert_(connection.queries) - + # settings.DEBUG = False - + def test_compact_user_subscription_folders(self): usf = UserSubscriptionFolders.objects.get(user=User.objects.all()[0]) usf.folders = '[2, 3, {"Bloglets": [423, 424, 425]}, {"Blogs": [426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 867, 946, 947, 948]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [557, 558, 559]}, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 522, 523, 524, 525, 526, 527, 528, 507, 520, 867]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [558, 559, 557]}, 943, {"Link Blogs": [467, 468, 469, 470]}, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 504, 505, 506, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 522, 523, 525, 526, 527, 528]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [558, 559]}]}, {"Code": [560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583]}, {"Cooking": [584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 873, 953]}, {"Meta": [598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608]}, {"New York": [609, 610, 611, 612, 613, 614]}, {"San Francisco": [615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 875]}, {"Tech": [635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 184, 661, 662, 663, 664, 665, 666]}, {"Comics & Cartoons": [667, 668, 669, 670, 671, 672, 673, 63, 674, 675, 676, 677, 678, 679, 680, 681, 682, 109, 683, 684, 685, 958]}, {"Hardware": [686, 687, 688, 689, 690, 691, 692]}, {"Wood": []}, {"Newsletters": [693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 724, 719, 720, 721, 722, 723, 725, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 895]}, {"Woodworking": [784, 785, 786, 787, 788, 789, 790, 791, 792, 793]}, {"Twitter": [794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 838, 915]}, {"News": [808, 809, 810, 811, 812, 813, 814, 815, 816, 817]}, {"Home": [818, 819, 820, 821, 822, 823]}, {"Facebook": [824, 825, 826]}, {"Art": [827, 828]}, {"Science": [403, 404, 405, 401, 402]}, {"Boston": [829, 830]}, {"mobility": [831, 832, 833, 834, 835, 836, 837, 963]}, {"Biking": []}, {"A Muted Folder": [1]}, 1, {"Any Broken Feeds": [916]}, {"Any Broken Feeds, Although Some of These Work Fine": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 840, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 841, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 842, 50, 51, 52, 53, 54, 843, 56, 57, 58, 59, 60, 61, 62, 63, 844, 917, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 918, 130, 131, 132, 846, 134, 135, 136, 919, 138, 139, 140, 141, 142, 143, 144, 145, 847, 147, 848, 149, 150, 151, 152, 153, 154, 849, 156, 157, 158, 936, 160, 850, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 1, 185, 186, 187, 188, 189, 851, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 852, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 853, 243, 854, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 856, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 939, 281, 282, 283, 284, 285, 940, 287, 288, 289, 857, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 858, 354, 355, 859, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 860, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, {"Ubuntu": [396, 397, 398, 399, 400]}, {"Science": [401, 402, 403, 404, 405]}, {"Music": [406, 407, 408, 409, 410, 411, 412]}, {"NYTimes": [413]}, {"Test": [414]}, {"Organizer": [415, 416, 417]}, {"Adult": [418, 419, 861, 421]}, {"Test": []}, 422]}]' @@ -137,7 +201,7 @@ class Test_Reader(TestCase): compact_folders = usf.folders self.assertNotEquals(dupe_folders, compact_folders) - + def test_compact_user_subscription_folders2(self): usf = UserSubscriptionFolders.objects.get(user=User.objects.all()[0]) usf.folders = '[2, 3, {"Bloglets": [423, 424, 425]}, {"Blogs": [426, 427, 428, 429, 430, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Photo Blogs": [551, 552, 553, 554, 555, 556]}, {"Travel": [557, 558]}, {"Travel": [557, 559]}, 943, {"Link Blogs": [467, 468, 469, 470, {"Travel": [557, 558]}, {"Travel": [557, 559]}]}, {"Link Blogs": [467, 468, 469, 470, {"Travel": [557, 558]}, {"Travel": [557, 559, 558]}]}]}]' diff --git a/apps/reader/urls.py b/apps/reader/urls.py index 9f829e59e..f81ac9873 100644 --- a/apps/reader/urls.py +++ b/apps/reader/urls.py @@ -2,67 +2,85 @@ from django.conf.urls import * from apps.reader import views urlpatterns = [ - url(r'^$', views.index), - url(r'^buster', views.iframe_buster, name='iframe-buster'), - url(r'^login_as', views.login_as, name='login_as'), - url(r'^welcome', views.welcome_req, name='welcome'), - url(r'^logout', views.logout, name='welcome-logout'), - url(r'^login', views.login, name='welcome-login'), - url(r'^autologin/(?P\w+)/(?P\w+)/?', views.autologin, name='autologin'), - url(r'^signup', views.signup, name='welcome-signup'), - url(r'^feeds/?$', views.load_feeds, name='load-feeds'), - url(r'^feed/(?P\d+)', views.load_single_feed, name='load-single-feed'), - url(r'^page/(?P\d+)', views.load_feed_page, name='load-feed-page'), - url(r'^refresh_feed/(?P\d+)', views.refresh_feed, name='refresh-feed'), - url(r'^favicons', views.load_feed_favicons, name='load-feed-favicons'), - url(r'^river_stories_widget', views.load_river_stories_widget, name='load-river-stories-widget'), - url(r'^river_stories', views.load_river_stories__redis, name='load-river-stories'), - url(r'^complete_river', views.complete_river, name='complete-river'), - url(r'^refresh_feeds', views.refresh_feeds, name='refresh-feeds'), - url(r'^interactions_count', views.interactions_count, name='interactions-count'), - url(r'^feed_unread_count', views.feed_unread_count, name='feed-unread-count'), - url(r'^starred_stories', views.load_starred_stories, name='load-starred-stories'), - url(r'^read_stories', views.load_read_stories, name='load-read-stories'), - url(r'^starred_story_hashes', views.starred_story_hashes, name='starred-story-hashes'), - url(r'^starred_rss/(?P\d+)/(?P\w+)/?$', views.starred_stories_rss_feed, name='starred-stories-rss-feed'), - url(r'^starred_rss/(?P\d+)/(?P\w+)/(?P[-\w]+)?/?$', views.starred_stories_rss_feed_tag, name='starred-stories-rss-feed-tag'), - url(r'^folder_rss/(?P\d+)/(?P\w+)/(?P\w+)/(?P[-\w]+)?/?$', views.folder_rss_feed, name='folder-rss-feed'), - url(r'^unread_story_hashes', views.unread_story_hashes, name='unread-story-hashes'), - url(r'^starred_counts', views.starred_counts, name='starred-counts'), - url(r'^mark_all_as_read', views.mark_all_as_read, name='mark-all-as-read'), - url(r'^mark_story_as_read', views.mark_story_as_read, name='mark-story-as-read'), - url(r'^mark_story_hashes_as_read', views.mark_story_hashes_as_read, name='mark-story-hashes-as-read'), - url(r'^mark_feed_stories_as_read', views.mark_feed_stories_as_read, name='mark-feed-stories-as-read'), - url(r'^mark_social_stories_as_read', views.mark_social_stories_as_read, name='mark-social-stories-as-read'), - url(r'^mark_story_as_unread', views.mark_story_as_unread), - url(r'^mark_story_hash_as_unread', views.mark_story_hash_as_unread, name='mark-story-hash-as-unread'), - url(r'^mark_story_as_starred', views.mark_story_as_starred), - url(r'^mark_story_hash_as_starred', views.mark_story_hash_as_starred), - url(r'^mark_story_as_unstarred', views.mark_story_as_unstarred), - url(r'^mark_story_hash_as_unstarred', views.mark_story_hash_as_unstarred), - url(r'^mark_feed_as_read', views.mark_feed_as_read), - url(r'^delete_feed_by_url', views.delete_feed_by_url, name='delete-feed-by-url'), - url(r'^delete_feeds_by_folder', views.delete_feeds_by_folder, name='delete-feeds-by-folder'), - url(r'^delete_feed', views.delete_feed, name='delete-feed'), - url(r'^delete_folder', views.delete_folder, name='delete-folder'), - url(r'^rename_feed', views.rename_feed, name='rename-feed'), - url(r'^rename_folder', views.rename_folder, name='rename-folder'), - url(r'^move_feed_to_folders', views.move_feed_to_folders, name='move-feed-to-folders'), - url(r'^move_feed_to_folder', views.move_feed_to_folder, name='move-feed-to-folder'), - url(r'^move_folder_to_folder', views.move_folder_to_folder, name='move-folder-to-folder'), - url(r'^move_feeds_by_folder_to_folder', views.move_feeds_by_folder_to_folder, name='move-feeds-by-folder-to-folder'), - url(r'^add_url', views.add_url), - url(r'^add_folder', views.add_folder), - url(r'^add_feature', views.add_feature, name='add-feature'), - url(r'^features', views.load_features, name='load-features'), - url(r'^save_feed_order', views.save_feed_order, name='save-feed-order'), - url(r'^feeds_trainer', views.feeds_trainer, name='feeds-trainer'), - url(r'^save_feed_chooser', views.save_feed_chooser, name='save-feed-chooser'), - url(r'^send_story_email', views.send_story_email, name='send-story-email'), - url(r'^retrain_all_sites', views.retrain_all_sites, name='retrain-all-sites'), - url(r'^load_tutorial', views.load_tutorial, name='load-tutorial'), - url(r'^save_search', views.save_search, name='save-search'), - url(r'^delete_search', views.delete_search, name='delete-search'), - url(r'^save_dashboard_river', views.save_dashboard_river, name='save-dashboard-river'), - url(r'^remove_dashboard_river', views.remove_dashboard_river, name='remove-dashboard-river'), + url(r"^$", views.index), + url(r"^buster", views.iframe_buster, name="iframe-buster"), + url(r"^login_as", views.login_as, name="login_as"), + url(r"^welcome", views.welcome_req, name="welcome"), + url(r"^logout", views.logout, name="welcome-logout"), + url(r"^login", views.login, name="welcome-login"), + url(r"^autologin/(?P\w+)/(?P\w+)/?", views.autologin, name="autologin"), + url(r"^signup", views.signup, name="welcome-signup"), + url(r"^feeds/?$", views.load_feeds, name="load-feeds"), + url(r"^feed/(?P\d+)", views.load_single_feed, name="load-single-feed"), + url(r"^page/(?P\d+)", views.load_feed_page, name="load-feed-page"), + url(r"^refresh_feed/(?P\d+)", views.refresh_feed, name="refresh-feed"), + url(r"^favicons", views.load_feed_favicons, name="load-feed-favicons"), + url(r"^river_stories_widget", views.load_river_stories_widget, name="load-river-stories-widget"), + url(r"^river_stories", views.load_river_stories__redis, name="load-river-stories"), + url(r"^complete_river", views.complete_river, name="complete-river"), + url(r"^refresh_feeds", views.refresh_feeds, name="refresh-feeds"), + url(r"^interactions_count", views.interactions_count, name="interactions-count"), + url(r"^feed_unread_count", views.feed_unread_count, name="feed-unread-count"), + url(r"^starred_stories", views.load_starred_stories, name="load-starred-stories"), + url(r"^read_stories", views.load_read_stories, name="load-read-stories"), + url(r"^starred_story_hashes", views.starred_story_hashes, name="starred-story-hashes"), + url( + r"^starred_rss/(?P\d+)/(?P\w+)/?$", + views.starred_stories_rss_feed, + name="starred-stories-rss-feed", + ), + url( + r"^starred_rss/(?P\d+)/(?P\w+)/(?P[-\w]+)?/?$", + views.starred_stories_rss_feed_tag, + name="starred-stories-rss-feed-tag", + ), + url( + r"^folder_rss/(?P\d+)/(?P\w+)/(?P\w+)/(?P[-\w]+)?/?$", + views.folder_rss_feed, + name="folder-rss-feed", + ), + url(r"^unread_story_hashes", views.unread_story_hashes, name="unread-story-hashes"), + url(r"^starred_counts", views.starred_counts, name="starred-counts"), + url(r"^mark_all_as_read", views.mark_all_as_read, name="mark-all-as-read"), + url(r"^mark_story_as_read", views.mark_story_as_read, name="mark-story-as-read"), + url(r"^mark_story_hashes_as_read", views.mark_story_hashes_as_read, name="mark-story-hashes-as-read"), + url(r"^mark_feed_stories_as_read", views.mark_feed_stories_as_read, name="mark-feed-stories-as-read"), + url( + r"^mark_social_stories_as_read", views.mark_social_stories_as_read, name="mark-social-stories-as-read" + ), + url(r"^mark_story_as_unread", views.mark_story_as_unread), + url(r"^mark_story_hash_as_unread", views.mark_story_hash_as_unread, name="mark-story-hash-as-unread"), + url(r"^mark_story_as_starred", views.mark_story_as_starred), + url(r"^mark_story_hash_as_starred", views.mark_story_hash_as_starred), + url(r"^mark_story_as_unstarred", views.mark_story_as_unstarred), + url(r"^mark_story_hash_as_unstarred", views.mark_story_hash_as_unstarred), + url(r"^mark_feed_as_read", views.mark_feed_as_read), + url(r"^delete_feed_by_url", views.delete_feed_by_url, name="delete-feed-by-url"), + url(r"^delete_feeds_by_folder", views.delete_feeds_by_folder, name="delete-feeds-by-folder"), + url(r"^delete_feed", views.delete_feed, name="delete-feed"), + url(r"^delete_folder", views.delete_folder, name="delete-folder"), + url(r"^rename_feed", views.rename_feed, name="rename-feed"), + url(r"^rename_folder", views.rename_folder, name="rename-folder"), + url(r"^move_feed_to_folders", views.move_feed_to_folders, name="move-feed-to-folders"), + url(r"^move_feed_to_folder", views.move_feed_to_folder, name="move-feed-to-folder"), + url(r"^move_folder_to_folder", views.move_folder_to_folder, name="move-folder-to-folder"), + url( + r"^move_feeds_by_folder_to_folder", + views.move_feeds_by_folder_to_folder, + name="move-feeds-by-folder-to-folder", + ), + url(r"^add_url", views.add_url), + url(r"^add_folder", views.add_folder), + url(r"^add_feature", views.add_feature, name="add-feature"), + url(r"^features", views.load_features, name="load-features"), + url(r"^save_feed_order", views.save_feed_order, name="save-feed-order"), + url(r"^feeds_trainer", views.feeds_trainer, name="feeds-trainer"), + url(r"^save_feed_chooser", views.save_feed_chooser, name="save-feed-chooser"), + url(r"^send_story_email", views.send_story_email, name="send-story-email"), + url(r"^retrain_all_sites", views.retrain_all_sites, name="retrain-all-sites"), + url(r"^load_tutorial", views.load_tutorial, name="load-tutorial"), + url(r"^save_search", views.save_search, name="save-search"), + url(r"^delete_search", views.delete_search, name="delete-search"), + url(r"^save_dashboard_river", views.save_dashboard_river, name="save-dashboard-river"), + url(r"^remove_dashboard_river", views.remove_dashboard_river, name="remove-dashboard-river"), ] diff --git a/apps/reader/views.py b/apps/reader/views.py index 29a316ec4..2c15b0f56 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -113,229 +113,234 @@ BANNED_URLS = [ "brentozar.com", ] ALLOWED_SUBDOMAINS = [ - 'dev', - 'www', - 'hwww', - 'dwww', + "dev", + "www", + "hwww", + "dwww", # 'beta', # Comment to redirect beta -> www, uncomment to allow beta -> staging (+ dns changes) - 'staging', - 'hstaging', - 'discovery', - 'debug', - 'debug3', - 'staging2', - 'staging3', - 'nb', + "staging", + "hstaging", + "discovery", + "debug", + "debug3", + "staging2", + "staging3", + "nb", ] + def get_subdomain(request): - host = request.META.get('HTTP_HOST') + host = request.META.get("HTTP_HOST") if host and host.count(".") >= 2: return host.split(".")[0] else: return None + @never_cache -@render_to('reader/dashboard.xhtml') +@render_to("reader/dashboard.xhtml") def index(request, **kwargs): - subdomain = get_subdomain(request) if request.method == "GET" and subdomain and subdomain not in ALLOWED_SUBDOMAINS: username = request.subdomain or subdomain - if '.' in username: - username = username.split('.')[0] + if "." in username: + username = username.split(".")[0] user = User.objects.filter(username=username) if not user: user = User.objects.filter(username__iexact=username) if user: user = user[0] if not user: - return HttpResponseRedirect('http://%s%s' % ( - Site.objects.get_current().domain, - reverse('index'))) + return HttpResponseRedirect("http://%s%s" % (Site.objects.get_current().domain, reverse("index"))) return load_social_page(request, user_id=user.pk, username=request.subdomain, **kwargs) if request.user.is_anonymous: return welcome(request, **kwargs) else: return dashboard(request, **kwargs) + def dashboard(request, **kwargs): - user = request.user - feed_count = UserSubscription.objects.filter(user=request.user).count() + user = request.user + feed_count = UserSubscription.objects.filter(user=request.user).count() # recommended_feeds = RecommendedFeed.objects.filter(is_public=True, # approved_date__lte=datetime.datetime.now() # ).select_related('feed')[:2] unmoderated_feeds = [] if user.is_staff: - unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False, - declined_date__isnull=True - ).select_related('feed')[:2] - statistics = MStatistics.all() - social_profile = MSocialProfile.get_user(user.pk) - custom_styling = MCustomStyling.get_user(user.pk) - dashboard_rivers = MDashboardRiver.get_user_rivers(user.pk) - preferences = json.decode(user.profile.preferences) - + unmoderated_feeds = RecommendedFeed.objects.filter( + is_public=False, declined_date__isnull=True + ).select_related("feed")[:2] + statistics = MStatistics.all() + social_profile = MSocialProfile.get_user(user.pk) + custom_styling = MCustomStyling.get_user(user.pk) + dashboard_rivers = MDashboardRiver.get_user_rivers(user.pk) + preferences = json.decode(user.profile.preferences) + if not user.is_active: - url = "https://%s%s" % (Site.objects.get_current().domain, - reverse('stripe-form')) + url = "https://%s%s" % (Site.objects.get_current().domain, reverse("stripe-form")) return HttpResponseRedirect(url) logging.user(request, "~FBLoading dashboard") return { - 'user_profile' : user.profile, - 'preferences' : preferences, - 'feed_count' : feed_count, - 'custom_styling' : custom_styling, - 'dashboard_rivers' : dashboard_rivers, - 'account_images' : list(range(1, 4)), + "user_profile": user.profile, + "preferences": preferences, + "feed_count": feed_count, + "custom_styling": custom_styling, + "dashboard_rivers": dashboard_rivers, + "account_images": list(range(1, 4)), # 'recommended_feeds' : recommended_feeds, - 'unmoderated_feeds' : unmoderated_feeds, - 'statistics' : statistics, - 'social_profile' : social_profile, - 'debug' : settings.DEBUG, - 'debug_assets' : settings.DEBUG_ASSETS, + "unmoderated_feeds": unmoderated_feeds, + "statistics": statistics, + "social_profile": social_profile, + "debug": settings.DEBUG, + "debug_assets": settings.DEBUG_ASSETS, }, "reader/dashboard.xhtml" -@render_to('reader/dashboard.xhtml') + +@render_to("reader/dashboard.xhtml") def welcome_req(request, **kwargs): return welcome(request, **kwargs) + def welcome(request, **kwargs): - user = get_user(request) - statistics = MStatistics.all() - social_profile = MSocialProfile.get_user(user.pk) - + user = get_user(request) + statistics = MStatistics.all() + social_profile = MSocialProfile.get_user(user.pk) + if request.method == "POST": - if request.POST.get('submit', '').startswith('log'): - login_form = LoginForm(request.POST, prefix='login') - signup_form = SignupForm(prefix='signup') + if request.POST.get("submit", "").startswith("log"): + login_form = LoginForm(request.POST, prefix="login") + signup_form = SignupForm(prefix="signup") else: - signup_form = SignupForm(request.POST, prefix='signup') - return { - "form": signup_form - }, "accounts/signup.html" + signup_form = SignupForm(request.POST, prefix="signup") + return {"form": signup_form}, "accounts/signup.html" else: - login_form = LoginForm(prefix='login') - signup_form = SignupForm(prefix='signup') - + login_form = LoginForm(prefix="login") + signup_form = SignupForm(prefix="signup") + logging.user(request, "~FBLoading welcome") - + return { - 'user_profile' : hasattr(user, 'profile') and user.profile, - 'login_form' : login_form, - 'signup_form' : signup_form, - 'statistics' : statistics, - 'social_profile' : social_profile, - 'post_request' : request.method == 'POST', + "user_profile": hasattr(user, "profile") and user.profile, + "login_form": login_form, + "signup_form": signup_form, + "statistics": statistics, + "social_profile": social_profile, + "post_request": request.method == "POST", }, "reader/welcome.xhtml" + @never_cache def login(request): code = -1 message = "" if request.method == "POST": - form = LoginForm(request.POST, prefix='login') + form = LoginForm(request.POST, prefix="login") if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') - if request.POST.get('api'): + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") + if request.POST.get("api"): logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW") code = 1 else: logging.user(form.get_user(), "~FG~BBLogin~FW") - next_url = request.POST.get('next', '') + next_url = request.POST.get("next", "") if next_url: return HttpResponseRedirect(next_url) - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: message = list(form.errors.items())[0][1][0] - if request.POST.get('api'): - return HttpResponse(json.encode(dict(code=code, message=message)), content_type='application/json') + if request.POST.get("api"): + return HttpResponse(json.encode(dict(code=code, message=message)), content_type="application/json") else: return index(request) - + + @never_cache -@render_to('accounts/signup.html') +@render_to("accounts/signup.html") def signup(request): if request.method == "POST": if settings.ENFORCE_SIGNUP_CAPTCHA: - signup_form = SignupForm(request.POST, prefix='signup') - return { - "form": signup_form - } + signup_form = SignupForm(request.POST, prefix="signup") + return {"form": signup_form} - form = SignupForm(prefix='signup', data=request.POST) + form = SignupForm(prefix="signup", data=request.POST) if form.is_valid(): new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email) if not new_user.is_active: - url = "https://%s%s" % (Site.objects.get_current().domain, - reverse('stripe-form')) + url = "https://%s%s" % (Site.objects.get_current().domain, reverse("stripe-form")) return HttpResponseRedirect(url) else: - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + return index(request) - + + @never_cache def logout(request): logging.user(request, "~FG~BBLogout~FW") logout_user(request) - - if request.GET.get('api'): - return HttpResponse(json.encode(dict(code=1)), content_type='application/json') + + if request.GET.get("api"): + return HttpResponse(json.encode(dict(code=1)), content_type="application/json") else: - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) + def autologin(request, username, secret): - next = request.GET.get('next', '') - + next = request.GET.get("next", "") + if not username or not secret: return HttpResponseForbidden() - + profile = Profile.objects.filter(user__username=username, secret_token=secret) if not profile: return HttpResponseForbidden() user = profile[0].user user.backend = settings.AUTHENTICATION_BACKENDS[0] - login_user(request, user, backend='django.contrib.auth.backends.ModelBackend') - logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else 'Homepage',)) - - if next and not next.startswith('/'): - next = '?next=' + next - return HttpResponseRedirect(reverse('index') + next) + login_user(request, user, backend="django.contrib.auth.backends.ModelBackend") + logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else "Homepage",)) + + if next and not next.startswith("/"): + next = "?next=" + next + return HttpResponseRedirect(reverse("index") + next) elif next: return HttpResponseRedirect(next) else: - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + + @ratelimit(minutes=1, requests=60) @never_cache @json.json_view def load_feeds(request): - user = get_user(request) - feeds = {} - include_favicons = is_true(request.GET.get('include_favicons', False)) - flat = is_true(request.GET.get('flat', False)) - update_counts = is_true(request.GET.get('update_counts', True)) - version = int(request.GET.get('v', 1)) - - if include_favicons == 'false': include_favicons = False - if update_counts == 'false': update_counts = False - if flat == 'false': flat = False - - if flat: return load_feeds_flat(request) + user = get_user(request) + feeds = {} + include_favicons = is_true(request.GET.get("include_favicons", False)) + flat = is_true(request.GET.get("flat", False)) + update_counts = is_true(request.GET.get("update_counts", True)) + version = int(request.GET.get("v", 1)) + + if include_favicons == "false": + include_favicons = False + if update_counts == "false": + update_counts = False + if flat == "false": + flat = False + + if flat: + return load_feeds_flat(request) platform = extract_user_agent(request) - if platform in ['iPhone', 'iPad', 'Androd']: + if platform in ["iPhone", "iPad", "Androd"]: # Remove this check once the iOS and Android updates go out which have update_counts=False # and then guarantee a refresh_feeds call update_counts = False - + try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: @@ -344,10 +349,10 @@ def load_feeds(request): except UserSubscriptionFolders.MultipleObjectsReturned: UserSubscriptionFolders.objects.filter(user=user)[1:].delete() folders = UserSubscriptionFolders.objects.get(user=user) - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user) + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user) notifications = MUserFeedNotification.feeds_for_user(user.pk) - + day_ago = datetime.datetime.now() - datetime.timedelta(days=1) scheduled_feeds = [] for sub in user_subs: @@ -355,8 +360,9 @@ def load_feeds(request): if update_counts and sub.needs_unread_recalc: sub.calculate_feed_scores(silent=True) feeds[pk] = sub.canonical(include_favicon=include_favicons) - - if not sub.active: continue + + if not sub.active: + continue if pk in notifications: feeds[pk].update(notifications[pk]) if not sub.feed.active and not sub.feed.has_feed_exception: @@ -365,22 +371,24 @@ def load_feeds(request): scheduled_feeds.append(sub.feed.pk) elif sub.feed.next_scheduled_update < day_ago: scheduled_feeds.append(sub.feed.pk) - + if len(scheduled_feeds) > 0 and request.user.is_authenticated: - logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + request, + "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % len(scheduled_feeds), + ) ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk)) starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) if not starred_count and len(starred_counts): starred_count = MStarredStory.objects(user_id=user.pk).count() - + saved_searches = MSavedSearch.user_searches(user.pk) - + social_params = { - 'user_id': user.pk, - 'include_favicon': include_favicons, - 'update_counts': update_counts, + "user_id": user.pk, + "include_favicon": include_favicons, + "update_counts": update_counts, } social_feeds = MSocialSubscription.feeds(**social_params) social_profile = MSocialProfile.profile(user.pk) @@ -391,74 +399,81 @@ def load_feeds(request): if not user_subs: categories = MCategory.serialize() - logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % ( - len(list(feeds.keys())), len(social_feeds), '. ~FCUpdating counts.' if update_counts else '')) + logging.user( + request, + "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" + % (len(list(feeds.keys())), len(social_feeds), ". ~FCUpdating counts." if update_counts else ""), + ) data = { - 'feeds': list(feeds.values()) if version == 2 else feeds, - 'social_feeds': social_feeds, - 'social_profile': social_profile, - 'social_services': social_services, - 'user_profile': user.profile, + "feeds": list(feeds.values()) if version == 2 else feeds, + "social_feeds": social_feeds, + "social_profile": social_profile, + "social_services": social_services, + "user_profile": user.profile, "is_staff": user.is_staff, - 'user_id': user.pk, - 'folders': json.decode(folders.folders), - 'starred_count': starred_count, - 'starred_counts': starred_counts, - 'saved_searches': saved_searches, - 'dashboard_rivers': dashboard_rivers, - 'categories': categories, - 'share_ext_token': user.profile.secret_token, + "user_id": user.pk, + "folders": json.decode(folders.folders), + "starred_count": starred_count, + "starred_counts": starred_counts, + "saved_searches": saved_searches, + "dashboard_rivers": dashboard_rivers, + "categories": categories, + "share_ext_token": user.profile.secret_token, } return data + @json.json_view def load_feed_favicons(request): user = get_user(request) - feed_ids = request.GET.getlist('feed_ids') or request.GET.getlist('feed_ids[]') - + feed_ids = request.GET.getlist("feed_ids") or request.GET.getlist("feed_ids[]") + if not feed_ids: - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) - feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')] + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) + feed_ids = [sub["feed__pk"] for sub in user_subs.values("feed__pk")] feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)]) - + return feed_icons + def load_feeds_flat(request): user = request.user - include_favicons = is_true(request.GET.get('include_favicons', False)) - update_counts = is_true(request.GET.get('update_counts', True)) - include_inactive = is_true(request.GET.get('include_inactive', False)) - background_ios = is_true(request.GET.get('background_ios', False)) - + include_favicons = is_true(request.GET.get("include_favicons", False)) + update_counts = is_true(request.GET.get("update_counts", True)) + include_inactive = is_true(request.GET.get("include_inactive", False)) + background_ios = is_true(request.GET.get("background_ios", False)) + feeds = {} inactive_feeds = {} day_ago = datetime.datetime.now() - datetime.timedelta(days=1) scheduled_feeds = [] - iphone_version = "2.1" # Preserved forever. Don't change. + iphone_version = "2.1" # Preserved forever. Don't change. latest_ios_build = "52" latest_ios_version = "5.0.0b2" - - if include_favicons == 'false': include_favicons = False - if update_counts == 'false': update_counts = False - + + if include_favicons == "false": + include_favicons = False + if update_counts == "false": + update_counts = False + if not user.is_authenticated: return HttpResponseForbidden() - + try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: folders = [] - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) notifications = MUserFeedNotification.feeds_for_user(user.pk) if not user_subs and folders: folders.auto_activate() - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) if include_inactive: - inactive_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=False) - + inactive_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=False) + for sub in user_subs: pk = sub.feed_id if update_counts and sub.needs_unread_recalc: @@ -472,28 +487,28 @@ def load_feeds_flat(request): scheduled_feeds.append(sub.feed.pk) if pk in notifications: feeds[pk].update(notifications[pk]) - - + if include_inactive: for sub in inactive_subs: inactive_feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons) - + if len(scheduled_feeds) > 0 and request.user.is_authenticated: - logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + request, + "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % len(scheduled_feeds), + ) ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk)) - + flat_folders = [] flat_folders_with_inactive = [] if folders: flat_folders = folders.flatten_folders(feeds=feeds) - flat_folders_with_inactive = folders.flatten_folders(feeds=feeds, - inactive_feeds=inactive_feeds) - + flat_folders_with_inactive = folders.flatten_folders(feeds=feeds, inactive_feeds=inactive_feeds) + social_params = { - 'user_id': user.pk, - 'include_favicon': include_favicons, - 'update_counts': update_counts, + "user_id": user.pk, + "include_favicon": include_favicons, + "update_counts": update_counts, } social_feeds = MSocialSubscription.feeds(**social_params) social_profile = MSocialProfile.profile(user.pk) @@ -508,13 +523,21 @@ def load_feeds_flat(request): saved_searches = MSavedSearch.user_searches(user.pk) - logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB/~FR%s~FB feeds/socials/inactive ~FMflat~FB%s%s" % ( - len(list(feeds.keys())), len(social_feeds), len(inactive_feeds), '. ~FCUpdating counts.' if update_counts else '', - ' ~BB(background fetch)' if background_ios else '')) + logging.user( + request, + "~FB~SBLoading ~FY%s~FB/~FM%s~FB/~FR%s~FB feeds/socials/inactive ~FMflat~FB%s%s" + % ( + len(list(feeds.keys())), + len(social_feeds), + len(inactive_feeds), + ". ~FCUpdating counts." if update_counts else "", + " ~BB(background fetch)" if background_ios else "", + ), + ) data = { - "flat_folders": flat_folders, - "flat_folders_with_inactive": flat_folders_with_inactive, + "flat_folders": flat_folders, + "flat_folders_with_inactive": flat_folders_with_inactive, "feeds": feeds, "inactive_feeds": inactive_feeds if include_inactive else {"0": "Include `include_inactive=true`"}, "social_feeds": social_feeds, @@ -528,20 +551,22 @@ def load_feeds_flat(request): "latest_ios_build": latest_ios_build, "latest_ios_version": latest_ios_version, "categories": categories, - 'starred_count': starred_count, - 'starred_counts': starred_counts, - 'saved_searches': saved_searches, - 'share_ext_token': user.profile.secret_token, + "starred_count": starred_count, + "starred_counts": starred_counts, + "saved_searches": saved_searches, + "share_ext_token": user.profile.secret_token, } return data + class ratelimit_refresh_feeds(ratelimit): def should_ratelimit(self, request): - feed_ids = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') + feed_ids = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") if len(feed_ids) == 1: return False return True + @ratelimit_refresh_feeds(minutes=1, requests=30) @never_cache @json.json_view @@ -550,33 +575,34 @@ def refresh_feeds(request): start = datetime.datetime.now() start_time = time.time() user = get_user(request) - feed_ids = get_post.getlist('feed_id') or get_post.getlist('feed_id[]') - check_fetch_status = get_post.get('check_fetch_status') - favicons_fetching = get_post.getlist('favicons_fetching') or get_post.getlist('favicons_fetching[]') - social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id] + feed_ids = get_post.getlist("feed_id") or get_post.getlist("feed_id[]") + check_fetch_status = get_post.get("check_fetch_status") + favicons_fetching = get_post.getlist("favicons_fetching") or get_post.getlist("favicons_fetching[]") + social_feed_ids = [feed_id for feed_id in feed_ids if "social:" in feed_id] feed_ids = list(set(feed_ids) - set(social_feed_ids)) - + feeds = {} if feed_ids or (not social_feed_ids and not feed_ids): - feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, - check_fetch_status=check_fetch_status) + feeds = UserSubscription.feeds_with_updated_counts( + user, feed_ids=feed_ids, check_fetch_status=check_fetch_status + ) checkpoint1 = datetime.datetime.now() social_feeds = {} if social_feed_ids or (not social_feed_ids and not feed_ids): social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids) checkpoint2 = datetime.datetime.now() - + favicons_fetching = [int(f) for f in favicons_fetching if f] feed_icons = {} if favicons_fetching: feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)]) for feed_id, feed in list(feeds.items()): if feed_id in favicons_fetching and feed_id in feed_icons: - feeds[feed_id]['favicon'] = feed_icons[feed_id].data - feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color - feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching') + feeds[feed_id]["favicon"] = feed_icons[feed_id].data + feeds[feed_id]["favicon_color"] = feed_icons[feed_id].color + feeds[feed_id]["favicon_fetching"] = feed.get("favicon_fetching") - user_subs = UserSubscription.objects.filter(user=user, active=True).only('feed') + user_subs = UserSubscription.objects.filter(user=user, active=True).only("feed") sub_feed_ids = [s.feed_id for s in user_subs] if favicons_fetching: @@ -586,15 +612,15 @@ def refresh_feeds(request): if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds: feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id] - feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id - + feeds[moved_feed_id]["dupe_feed_id"] = duplicate_feeds[0].feed_id + if check_fetch_status: missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids)) if missing_feed_ids: duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids) for duplicate_feed in duplicate_feeds: - feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed_id} - + feeds[duplicate_feed.duplicate_feed_id] = {"id": duplicate_feed.feed_id} + interactions_count = MInteraction.user_unread_count(user.pk) if True or settings.DEBUG or check_fetch_status: @@ -602,21 +628,28 @@ def refresh_feeds(request): extra_fetch = "" if check_fetch_status or favicons_fetching: extra_fetch = "(%s/%s)" % (check_fetch_status, len(favicons_fetching)) - logging.user(request, "~FBRefreshing %s+%s feeds %s (%.4s/%.4s/%.4s)" % ( - len(list(feeds.keys())), len(list(social_feeds.keys())), extra_fetch, - (checkpoint1-start).total_seconds(), - (checkpoint2-start).total_seconds(), - (end-start).total_seconds(), - )) - - MAnalyticsLoader.add(page_load=time.time()-start_time) - + logging.user( + request, + "~FBRefreshing %s+%s feeds %s (%.4s/%.4s/%.4s)" + % ( + len(list(feeds.keys())), + len(list(social_feeds.keys())), + extra_fetch, + (checkpoint1 - start).total_seconds(), + (checkpoint2 - start).total_seconds(), + (end - start).total_seconds(), + ), + ) + + MAnalyticsLoader.add(page_load=time.time() - start_time) + return { - 'feeds': feeds, - 'social_feeds': social_feeds, - 'interactions_count': interactions_count, + "feeds": feeds, + "social_feeds": social_feeds, + "interactions_count": interactions_count, } + @json.json_view def interactions_count(request): user = get_user(request) @@ -624,9 +657,10 @@ def interactions_count(request): interactions_count = MInteraction.user_unread_count(user.pk) return { - 'interactions_count': interactions_count, + "interactions_count": interactions_count, } - + + @never_cache @ajax_login_required @json.json_view @@ -634,12 +668,12 @@ def feed_unread_count(request): get_post = getattr(request, request.method) start = time.time() user = request.user - feed_ids = get_post.getlist('feed_id') or get_post.getlist('feed_id[]') - - force = request.GET.get('force', False) - social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id] + feed_ids = get_post.getlist("feed_id") or get_post.getlist("feed_id[]") + + force = request.GET.get("force", False) + social_feed_ids = [feed_id for feed_id in feed_ids if "social:" in feed_id] feed_ids = list(set(feed_ids) - set(social_feed_ids)) - + feeds = {} if feed_ids: feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, force=force) @@ -647,71 +681,74 @@ def feed_unread_count(request): social_feeds = {} if social_feed_ids: social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids) - + if len(feed_ids) == 1: if settings.DEBUG: feed_title = Feed.get_by_id(feed_ids[0]).feed_title else: feed_title = feed_ids[0] elif len(social_feed_ids) == 1: - social_profile = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace('social:', '')) + social_profile = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace("social:", "")) feed_title = social_profile.user.username if social_profile.user else "[deleted]" else: feed_title = "%s feeds" % (len(feeds) + len(social_feeds)) logging.user(request, "~FBUpdating unread count on: %s" % feed_title) - MAnalyticsLoader.add(page_load=time.time()-start) - - return {'feeds': feeds, 'social_feeds': social_feeds} - + MAnalyticsLoader.add(page_load=time.time() - start) + + return {"feeds": feeds, "social_feeds": social_feeds} + + def refresh_feed(request, feed_id): start = time.time() user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) - + feed = feed.update(force=True, compute_scores=False) usersub = UserSubscription.objects.get(user=user, feed=feed) usersub.calculate_feed_scores(silent=False) - + logging.user(request, "~FBRefreshing feed: %s" % feed) - MAnalyticsLoader.add(page_load=time.time()-start) - + MAnalyticsLoader.add(page_load=time.time() - start) + return load_single_feed(request, feed_id) - + + @never_cache @json.json_view def load_single_feed(request, feed_id): - start = time.time() - user = get_user(request) + start = time.time() + user = get_user(request) # offset = int(request.GET.get('offset', 0)) # limit = int(request.GET.get('limit', 6)) - limit = 6 - page = int(request.GET.get('page', 1)) - delay = int(request.GET.get('delay', 0)) - offset = limit * (page-1) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'all') - query = request.GET.get('query', '').strip() - include_story_content = is_true(request.GET.get('include_story_content', True)) - include_hidden = is_true(request.GET.get('include_hidden', False)) - include_feeds = is_true(request.GET.get('include_feeds', False)) - message = None - user_search = None - + limit = 6 + page = int(request.GET.get("page", 1)) + delay = int(request.GET.get("delay", 0)) + offset = limit * (page - 1) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "all") + query = request.GET.get("query", "").strip() + include_story_content = is_true(request.GET.get("include_story_content", True)) + include_hidden = is_true(request.GET.get("include_hidden", False)) + include_feeds = is_true(request.GET.get("include_feeds", False)) + message = None + user_search = None + dupe_feed_id = None user_profiles = [] now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - if not feed_id: raise Http404 + if not feed_id: + raise Http404 - feed_address = request.GET.get('feed_address') + feed_address = request.GET.get("feed_address") feed = Feed.get_by_id(feed_id, feed_address=feed_address) if not feed: raise Http404 - + try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: usersub = None - + if feed.is_newsletter and not usersub: # User must be subscribed to a newsletter in order to read it raise Http404 @@ -719,11 +756,11 @@ def load_single_feed(request, feed_id): if feed.num_subscribers == 1 and not usersub and not user.is_staff: # This feed could be private so user must be subscribed in order to read it raise Http404 - + if page > 400: logging.user(request, "~BR~FK~SBOver page 400 on single feed: %s" % page) raise Http404 - + if query: if user.profile.is_premium: user_search = MUserSearch.get_user(user.pk) @@ -732,178 +769,199 @@ def load_single_feed(request, feed_id): else: stories = [] message = "You must be a premium subscriber to search." - elif read_filter == 'starred': - mstories = MStarredStory.objects( - user_id=user.pk, - story_feed_id=feed_id - ).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit] - stories = Feed.format_stories(mstories) - elif usersub and read_filter == 'unread': + elif read_filter == "starred": + mstories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).order_by( + "%sstarred_date" % ("-" if order == "newest" else "") + )[offset : offset + limit] + stories = Feed.format_stories(mstories) + elif usersub and read_filter == "unread": stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit) else: stories = feed.get_stories(offset, limit, order=order) - + checkpoint1 = time.time() - + try: stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk) except redis.ConnectionError: logging.user(request, "~BR~FK~SBRedis is unavailable for shared stories.") checkpoint2 = time.time() - + # Get intelligence classifier for user - + if usersub and usersub.is_trained: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - classifiers = get_classifiers_for_user(user, feed_id=feed_id, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + classifiers = get_classifiers_for_user( + user, + feed_id=feed_id, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) checkpoint3 = time.time() - + unread_story_hashes = [] if stories: - if (read_filter == 'all' or query) and usersub: - unread_story_hashes = UserSubscription.story_hashes(user.pk, read_filter='unread', - feed_ids=[usersub.feed_id], - usersubs=[usersub], - cutoff_date=user.profile.unread_cutoff) - story_hashes = [story['story_hash'] for story in stories if story['story_hash']] - starred_stories = MStarredStory.objects(user_id=user.pk, - story_feed_id=feed.pk, - story_hash__in=story_hashes)\ - .hint([('user_id', 1), ('story_hash', 1)]) + if (read_filter == "all" or query) and usersub: + unread_story_hashes = UserSubscription.story_hashes( + user.pk, + read_filter="unread", + feed_ids=[usersub.feed_id], + usersubs=[usersub], + cutoff_date=user.profile.unread_cutoff, + ) + story_hashes = [story["story_hash"] for story in stories if story["story_hash"]] + starred_stories = MStarredStory.objects( + user_id=user.pk, story_feed_id=feed.pk, story_hash__in=story_hashes + ).hint([("user_id", 1), ("story_hash", 1)]) shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes) shared_stories = [] if shared_story_hashes: - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=shared_story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - starred_stories = dict([(story.story_hash, story) - for story in starred_stories]) - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=shared_story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + starred_stories = dict([(story.story_hash, story) for story in starred_stories]) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) + checkpoint4 = time.time() - + for story in stories: if not include_story_content: - del story['story_content'] - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) + del story["story_content"] + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) nowtz = localtime_for_timezone(now, user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) if usersub: - story['read_status'] = 1 - if not user.profile.is_archive and story['story_date'] < user.profile.unread_cutoff: - story['read_status'] = 1 - elif (read_filter == 'all' or query) and usersub: - story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0 - elif read_filter == 'unread' and usersub: - story['read_status'] = 0 - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_story = Feed.format_story(starred_stories[story['story_hash']]) - starred_date = localtime_for_timezone(starred_story['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['user_tags'] = starred_story['user_tags'] - story['user_notes'] = starred_story['user_notes'] - story['highlights'] = starred_story['highlights'] - if story['story_hash'] in shared_stories: - story['shared'] = True - shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'], - user.profile.timezone) - story['shared_date'] = format_story_link_date__long(shared_date, now) - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) + story["read_status"] = 1 + if not user.profile.is_archive and story["story_date"] < user.profile.unread_cutoff: + story["read_status"] = 1 + elif (read_filter == "all" or query) and usersub: + story["read_status"] = 1 if story["story_hash"] not in unread_story_hashes else 0 + elif read_filter == "unread" and usersub: + story["read_status"] = 0 + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_story = Feed.format_story(starred_stories[story["story_hash"]]) + starred_date = localtime_for_timezone(starred_story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["user_tags"] = starred_story["user_tags"] + story["user_notes"] = starred_story["user_notes"] + story["highlights"] = starred_story["highlights"] + if story["story_hash"] in shared_stories: + story["shared"] = True + shared_date = localtime_for_timezone( + shared_stories[story["story_hash"]]["shared_date"], user.profile.timezone + ) + story["shared_date"] = format_story_link_date__long(shared_date, now) + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) else: - story['read_status'] = 1 - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, feed), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 1 + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, feed), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - + story["score"] = UserSubscription.score_story(story["intelligence"]) + # Intelligence feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else [] feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else [] - + if include_feeds: - feeds = Feed.objects.filter(pk__in=set([story['story_feed_id'] for story in stories])) + feeds = Feed.objects.filter(pk__in=set([story["story_feed_id"] for story in stories])) feeds = [f.canonical(include_favicon=False) for f in feeds] - + if usersub: usersub.feed_opens += 1 usersub.needs_unread_recalc = True try: - usersub.save(update_fields=['feed_opens', 'needs_unread_recalc']) + usersub.save(update_fields=["feed_opens", "needs_unread_recalc"]) except DatabaseError as e: logging.user(request, f"~BR~FK~SBNo changes in usersub, ignoring... {e}") - - diff1 = checkpoint1-start - diff2 = checkpoint2-start - diff3 = checkpoint3-start - diff4 = checkpoint4-start - timediff = time.time()-start + + diff1 = checkpoint1 - start + diff2 = checkpoint2 - start + diff3 = checkpoint3 - start + diff4 = checkpoint4 - start + timediff = time.time() - start last_update = relative_timesince(feed.last_update) time_breakdown = "" if timediff > 1 or settings.DEBUG: - time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % ( - diff1, diff2, diff3, diff4) - + time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % (diff1, diff2, diff3, diff4) + search_log = "~SN~FG(~SB%s~SN) " % query if query else "" - logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" % ( - feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, search_log, time_breakdown)) - + logging.user( + request, + "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" + % ( + feed.feed_title[:22], + ("~SN/p%s" % page) if page > 1 else "", + order, + read_filter, + search_log, + time_breakdown, + ), + ) + MAnalyticsLoader.add(page_load=timediff) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time - RStats.add('page_load', duration=seconds) + RStats.add("page_load", duration=seconds) if not include_hidden: hidden_stories_removed = 0 new_stories = [] for story in stories: - if story['score'] >= 0: + if story["score"] >= 0: new_stories.append(story) else: hidden_stories_removed += 1 stories = new_stories - - data = dict(stories=stories, - user_profiles=user_profiles, - feed_tags=feed_tags, - feed_authors=feed_authors, - classifiers=classifiers, - updated=last_update, - user_search=user_search, - feed_id=feed.pk, - elapsed_time=round(float(timediff), 2), - message=message) - - if include_feeds: data['feeds'] = feeds - if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed - if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id + + data = dict( + stories=stories, + user_profiles=user_profiles, + feed_tags=feed_tags, + feed_authors=feed_authors, + classifiers=classifiers, + updated=last_update, + user_search=user_search, + feed_id=feed.pk, + elapsed_time=round(float(timediff), 2), + message=message, + ) + + if include_feeds: + data["feeds"] = feeds + if not include_hidden: + data["hidden_stories_removed"] = hidden_stories_removed + if dupe_feed_id: + data["dupe_feed_id"] = dupe_feed_id if not usersub: data.update(feed.canonical()) # if not usersub and feed.num_subscribers <= 1: # data = dict(code=-1, message="You must be subscribed to this feed.") - + # time.sleep(random.randint(1, 3)) if delay and user.is_staff: # time.sleep(random.randint(2, 7) / 10.0) @@ -917,13 +975,14 @@ def load_single_feed(request, feed_id): return data + def load_feed_page(request, feed_id): if not feed_id: raise Http404 - + feed = Feed.get_by_id(feed_id) if feed and feed.has_page and not feed.has_page_exception: - if settings.BACKED_BY_AWS.get('pages_on_node'): + if settings.BACKED_BY_AWS.get("pages_on_node"): domain = Site.objects.get_current().domain url = "https://%s/original_page/%s" % ( domain, @@ -936,180 +995,193 @@ def load_feed_page(request, feed_id): page_response = None if page_response and page_response.status_code == 200: response = HttpResponse(page_response.content, content_type="text/html; charset=utf-8") - response['Content-Encoding'] = 'deflate' - response['Last-Modified'] = page_response.headers.get('Last-modified') - response['Etag'] = page_response.headers.get('Etag') - response['Content-Length'] = str(len(page_response.content)) - logging.user(request, "~FYLoading original page (%s), proxied from node: ~SB%s bytes" % - (feed_id, len(page_response.content))) + response["Content-Encoding"] = "deflate" + response["Last-Modified"] = page_response.headers.get("Last-modified") + response["Etag"] = page_response.headers.get("Etag") + response["Content-Length"] = str(len(page_response.content)) + logging.user( + request, + "~FYLoading original page (%s), proxied from node: ~SB%s bytes" + % (feed_id, len(page_response.content)), + ) return response - - if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page: + + if settings.BACKED_BY_AWS["pages_on_s3"] and feed.s3_page: if settings.PROXY_S3_PAGES: key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=feed.s3_pages_key) if key: compressed_data = key.get()["Body"] response = HttpResponse(compressed_data, content_type="text/html; charset=utf-8") - response['Content-Encoding'] = 'gzip' - - logging.user(request, "~FYLoading original page, proxied: ~SB%s bytes" % - (len(compressed_data))) + response["Content-Encoding"] = "gzip" + + logging.user( + request, "~FYLoading original page, proxied: ~SB%s bytes" % (len(compressed_data)) + ) return response else: logging.user(request, "~FYLoading original page, non-proxied") - return HttpResponseRedirect('//%s/%s' % (settings.S3_PAGES_BUCKET_NAME, - feed.s3_pages_key)) - + return HttpResponseRedirect("//%s/%s" % (settings.S3_PAGES_BUCKET_NAME, feed.s3_pages_key)) + data = MFeedPage.get_data(feed_id=feed_id) - + if not data or not feed or not feed.has_page or feed.has_page_exception: logging.user(request, "~FYLoading original page, ~FRmissing") - return render(request, 'static/404_original_page.xhtml', {}, - content_type='text/html', - status=404) - + return render(request, "static/404_original_page.xhtml", {}, content_type="text/html", status=404) + logging.user(request, "~FYLoading original page, from the db") return HttpResponse(data, content_type="text/html; charset=utf-8") + @json.json_view def load_starred_stories(request): - user = get_user(request) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 10)) - page = int(request.GET.get('page', 0)) - query = request.GET.get('query', '').strip() - order = request.GET.get('order', 'newest') - tag = request.GET.get('tag') - highlights = is_true(request.GET.get('highlights', False)) - story_hashes = request.GET.getlist('h') or request.GET.getlist('h[]') + user = get_user(request) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 10)) + page = int(request.GET.get("page", 0)) + query = request.GET.get("query", "").strip() + order = request.GET.get("order", "newest") + tag = request.GET.get("tag") + highlights = is_true(request.GET.get("highlights", False)) + story_hashes = request.GET.getlist("h") or request.GET.getlist("h[]") story_hashes = story_hashes[:100] - version = int(request.GET.get('v', 1)) - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - message = None - order_by = '-' if order == "newest" else "" - if page: offset = limit * (page - 1) - + version = int(request.GET.get("v", 1)) + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + message = None + order_by = "-" if order == "newest" else "" + if page: + offset = limit * (page - 1) + if query: - # results = SearchStarredStory.query(user.pk, query) - # story_ids = [result.db_id for result in results] + # results = SearchStarredStory.query(user.pk, query) + # story_ids = [result.db_id for result in results] if user.profile.is_premium: - stories = MStarredStory.find_stories(query, user.pk, tag=tag, offset=offset, limit=limit, - order=order) + stories = MStarredStory.find_stories( + query, user.pk, tag=tag, offset=offset, limit=limit, order=order + ) else: stories = [] message = "You must be a premium subscriber to search." elif highlights: if user.profile.is_premium: mstories = MStarredStory.objects( - user_id=user.pk, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"} - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] - stories = Feed.format_stories(mstories) + user_id=user.pk, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ).order_by("%sstarred_date" % order_by)[offset : offset + limit] + stories = Feed.format_stories(mstories) else: stories = [] message = "You must be a premium subscriber to read through saved story highlights." elif tag: if user.profile.is_premium: - mstories = MStarredStory.objects( - user_id=user.pk, - user_tags__contains=tag - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] - stories = Feed.format_stories(mstories) + mstories = MStarredStory.objects(user_id=user.pk, user_tags__contains=tag).order_by( + "%sstarred_date" % order_by + )[offset : offset + limit] + stories = Feed.format_stories(mstories) else: stories = [] message = "You must be a premium subscriber to read saved stories by tag." elif story_hashes: limit = 100 - mstories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] + mstories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).order_by( + "%sstarred_date" % order_by + )[offset : offset + limit] stories = Feed.format_stories(mstories) else: - mstories = MStarredStory.objects( - user_id=user.pk - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] + mstories = MStarredStory.objects(user_id=user.pk).order_by("%sstarred_date" % order_by)[ + offset : offset + limit + ] stories = Feed.format_stories(mstories) - + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - - story_hashes = [story['story_hash'] for story in stories] - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) - usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk') - usersub_ids = [us['feed__pk'] for us in usersub_ids] + + story_hashes = [story["story_hash"] for story in stories] + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) + usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values( + "feed__pk" + ) + usersub_ids = [us["feed__pk"] for us in usersub_ids] unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids))) - unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) - unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds) + unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) + unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds) for story in stories: - if story['story_feed_id'] in unsub_feeds: continue - duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=story['story_feed_id']) - if not duplicate_feed: continue + if story["story_feed_id"] in unsub_feeds: + continue + duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=story["story_feed_id"]) + if not duplicate_feed: + continue feed_id = duplicate_feed[0].feed_id try: - saved_story = MStarredStory.objects.get(user_id=user.pk, story_hash=story['story_hash']) + saved_story = MStarredStory.objects.get(user_id=user.pk, story_hash=story["story_hash"]) saved_story.feed_id = feed_id - _, story_hash = MStory.split_story_hash(story['story_hash']) + _, story_hash = MStory.split_story_hash(story["story_hash"]) saved_story.story_hash = "%s:%s" % (feed_id, story_hash) saved_story.story_feed_id = feed_id - story['story_hash'] = saved_story.story_hash - story['story_feed_id'] = saved_story.story_feed_id + story["story_hash"] = saved_story.story_hash + story["story_feed_id"] = saved_story.story_feed_id saved_story.save() - logging.user(request, "~FCSaving new feed for starred story: ~SB%s -> %s" % (story['story_hash'], feed_id)) + logging.user( + request, "~FCSaving new feed for starred story: ~SB%s -> %s" % (story["story_hash"], feed_id) + ) except (MStarredStory.DoesNotExist, MStarredStory.MultipleObjectsReturned): - logging.user(request, "~FCCan't find feed for starred story: ~SB%s" % (story['story_hash'])) + logging.user(request, "~FCCan't find feed for starred story: ~SB%s" % (story["story_hash"])) continue - + shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes) shared_stories = [] if shared_story_hashes: - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=shared_story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=shared_story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, nowtz) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['read_status'] = 1 - story['starred'] = True - story['intelligence'] = { - 'feed': 1, - 'author': 0, - 'tags': 0, - 'title': 0, + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + starred_date = localtime_for_timezone(story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, nowtz) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["read_status"] = 1 + story["starred"] = True + story["intelligence"] = { + "feed": 1, + "author": 0, + "tags": 0, + "title": 0, } - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" logging.user(request, "~FCLoading starred stories: ~SB%s stories %s" % (len(stories), search_log)) - + return { "stories": stories, "user_profiles": user_profiles, - 'feeds': list(unsub_feeds.values()) if version == 2 else unsub_feeds, + "feeds": list(unsub_feeds.values()) if version == 2 else unsub_feeds, "message": message, } + @json.json_view def starred_story_hashes(request): - user = get_user(request) - include_timestamps = is_true(request.GET.get('include_timestamps', False)) - - mstories = MStarredStory.objects( - user_id=user.pk - ).only('story_hash', 'starred_date', 'starred_updated').order_by('-starred_date') - + user = get_user(request) + include_timestamps = is_true(request.GET.get("include_timestamps", False)) + + mstories = ( + MStarredStory.objects(user_id=user.pk) + .only("story_hash", "starred_date", "starred_updated") + .order_by("-starred_date") + ) + if include_timestamps: story_hashes = [] for s in mstories: @@ -1119,21 +1191,22 @@ def starred_story_hashes(request): story_hashes.append((s.story_hash, date.strftime("%s"))) else: story_hashes = [s.story_hash for s in mstories] - - logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" % - (len(story_hashes))) + + logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" % (len(story_hashes))) return dict(starred_story_hashes=story_hashes) + def starred_stories_rss_feed(request, user_id, secret_token): return starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug=None) + def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug): try: user = User.objects.get(pk=user_id) except User.DoesNotExist: raise Http404 - + if tag_slug: try: tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug) @@ -1143,160 +1216,181 @@ def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug): raise Http404 else: _, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) - + data = {} if tag_slug: - data['title'] = "Saved Stories - %s" % tag_counts.tag + data["title"] = "Saved Stories - %s" % tag_counts.tag else: - data['title'] = "Saved Stories" - data['link'] = "%s%s" % ( + data["title"] = "Saved Stories" + data["link"] = "%s%s" % ( settings.NEWSBLUR_URL, - reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug))) + reverse("saved-stories-tag", kwargs=dict(tag_name=tag_slug)), + ) if tag_slug: - data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username, - tag_counts.tag) + data["description"] = 'Stories saved by %s on NewsBlur with the tag "%s".' % ( + user.username, + tag_counts.tag, + ) else: - data['description'] = "Stories saved by %s on NewsBlur." % (user.username) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "%s%s" % ( + data["description"] = "Stories saved by %s on NewsBlur." % (user.username) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "%s%s" % ( settings.NEWSBLUR_URL, - reverse('starred-stories-rss-feed-tag', - kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)), + reverse( + "starred-stories-rss-feed-tag", + kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug), + ), ) rss = feedgenerator.Atom1Feed(**data) if not tag_slug or not tag_counts.tag: - starred_stories = MStarredStory.objects( - user_id=user.pk - ).order_by('-starred_date').limit(25) + starred_stories = MStarredStory.objects(user_id=user.pk).order_by("-starred_date").limit(25) elif tag_counts.is_highlights: - starred_stories = MStarredStory.objects( - user_id=user.pk, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"} - ).order_by('-starred_date').limit(25) + starred_stories = ( + MStarredStory.objects( + user_id=user.pk, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ) + .order_by("-starred_date") + .limit(25) + ) else: - starred_stories = MStarredStory.objects( - user_id=user.pk, - user_tags__contains=tag_counts.tag - ).order_by('-starred_date').limit(25) + starred_stories = ( + MStarredStory.objects(user_id=user.pk, user_tags__contains=tag_counts.tag) + .order_by("-starred_date") + .limit(25) + ) starred_stories = Feed.format_stories(starred_stories) for starred_story in starred_stories: story_data = { - 'title': smart_str(starred_story['story_title']), - 'link': starred_story['story_permalink'], - 'description': smart_str(starred_story['story_content']), - 'author_name': starred_story['story_authors'], - 'categories': starred_story['story_tags'], - 'unique_id': starred_story['story_permalink'], - 'pubdate': starred_story['starred_date'], + "title": smart_str(starred_story["story_title"]), + "link": starred_story["story_permalink"], + "description": smart_str(starred_story["story_content"]), + "author_name": starred_story["story_authors"], + "categories": starred_story["story_tags"], + "unique_id": starred_story["story_permalink"], + "pubdate": starred_story["starred_date"], } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % ( - user.username, - tag_counts.tag if tag_slug else "[All stories]", - tag_counts.count if tag_slug else starred_count, - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, + "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" + % ( + user.username, + tag_counts.tag if tag_slug else "[All stories]", + tag_counts.count if tag_slug else starred_count, + request.META.get("HTTP_USER_AGENT", "")[:24], + ), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug): domain = Site.objects.get_current().domain - date_hack_2023 = (datetime.datetime.now() > datetime.datetime(2023, 7, 1)) + date_hack_2023 = datetime.datetime.now() > datetime.datetime(2023, 7, 1) try: user = User.objects.get(pk=user_id) except User.DoesNotExist: raise Http404 - + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=user) feed_ids, folder_title = user_sub_folders.feed_ids_under_folder_slug(folder_slug) - + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids) if feed_ids and ((user.profile.is_archive and date_hack_2023) or (not date_hack_2023)): params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": 20, - "order": 'newest', - "read_filter": 'all', - "cache_prefix": "RSS:" + "order": "newest", + "read_filter": "all", + "cache_prefix": "RSS:", } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) else: story_hashes = [] - mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') + mstories = MStory.objects(story_hash__in=story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) - + filtered_stories = [] - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] - found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) + found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids, - social_user_id=0)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids, social_user_id=0) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - - sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + + sort_classifiers_by_feed( + user=user, + feed_ids=found_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) for story in stories: - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - if unread_filter == 'focus' and story['score'] >= 1: + story["score"] = UserSubscription.score_story(story["intelligence"]) + if unread_filter == "focus" and story["score"] >= 1: filtered_stories.append(story) - elif unread_filter == 'unread' and story['score'] >= 0: + elif unread_filter == "unread" and story["score"] >= 0: filtered_stories.append(story) stories = filtered_stories - + data = {} - data['title'] = "%s from %s (%s sites)" % (folder_title, user.username, len(feed_ids)) - data['link'] = "https://%s%s" % ( - domain, - reverse('folder', kwargs=dict(folder_name=folder_title))) - data['description'] = "Unread stories in %s on NewsBlur. From %s's account and contains %s sites." % ( + data["title"] = "%s from %s (%s sites)" % (folder_title, user.username, len(feed_ids)) + data["link"] = "https://%s%s" % (domain, reverse("folder", kwargs=dict(folder_name=folder_title))) + data["description"] = "Unread stories in %s on NewsBlur. From %s's account and contains %s sites." % ( folder_title, user.username, - len(feed_ids)) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "https://%s%s" % ( + len(feed_ids), + ) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "https://%s%s" % ( domain, - reverse('folder-rss-feed', - kwargs=dict(user_id=user_id, secret_token=secret_token, unread_filter=unread_filter, folder_slug=folder_slug)), + reverse( + "folder-rss-feed", + kwargs=dict( + user_id=user_id, + secret_token=secret_token, + unread_filter=unread_filter, + folder_slug=folder_slug, + ), + ), ) rss = feedgenerator.Atom1Feed(**data) for story in stories: - feed = Feed.get_by_id(story['story_feed_id']) + feed = Feed.get_by_id(story["story_feed_id"]) feed_title = feed.feed_title if feed else "" try: usersub = UserSubscription.objects.get(user=user, feed=feed) @@ -1304,58 +1398,59 @@ def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug): feed_title = usersub.user_title except UserSubscription.DoesNotExist: usersub = None - + story_content = """%s

%s""" % ( - smart_str(story['story_content']), + smart_str(story["story_content"]), Site.objects.get_current().domain, - story['story_feed_id'], + story["story_feed_id"], feed_title, ) - story_content = re.sub(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]', '', story_content) - story_title = "%s%s" % (("%s: " % feed_title) if feed_title else "", story['story_title']) + story_content = re.sub(r"[\x00-\x08\x0B-\x0C\x0E-\x1F]", "", story_content) + story_title = "%s%s" % (("%s: " % feed_title) if feed_title else "", story["story_title"]) story_data = { - 'title': story_title, - 'link': story['story_permalink'], - 'description': story_content, - 'categories': story['story_tags'], - 'unique_id': 'https://%s/site/%s/%s/' % (domain, story['story_feed_id'], story['guid_hash']), - 'pubdate': localtime_for_timezone(story['story_date'], user.profile.timezone), + "title": story_title, + "link": story["story_permalink"], + "description": story_content, + "categories": story["story_tags"], + "unique_id": "https://%s/site/%s/%s/" % (domain, story["story_feed_id"], story["guid_hash"]), + "pubdate": localtime_for_timezone(story["story_date"], user.profile.timezone), } - if story['story_authors']: - story_data['author_name'] = story['story_authors'] + if story["story_authors"]: + story_data["author_name"] = story["story_authors"] rss.add_item(**story_data) # TODO: Remove below date hack to accomodate users who paid for premium but want folder rss if not user.profile.is_archive and date_hack_2023: story_data = { - 'title': "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", - 'link': "https://%s/?next=premium" % domain, - 'description': "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", - 'unique_id': "https://%s/premium_only" % domain, - 'pubdate': localtime_for_timezone(datetime.datetime.now(), user.profile.timezone), + "title": "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", + "link": "https://%s/?next=premium" % domain, + "description": "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", + "unique_id": "https://%s/premium_only" % domain, + "pubdate": localtime_for_timezone(datetime.datetime.now(), user.profile.timezone), } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's folder RSS feed (%s, %s stories): ~FM%s" % ( - user.username, - folder_title, - len(stories), - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, + "~FBGenerating ~SB%s~SN's folder RSS feed (%s, %s stories): ~FM%s" + % (user.username, folder_title, len(stories), request.META.get("HTTP_USER_AGENT", "")[:24]), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + @json.json_view def load_read_stories(request): - user = get_user(request) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 10)) - page = int(request.GET.get('page', 0)) - order = request.GET.get('order', 'newest') - query = request.GET.get('query', '').strip() - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + user = get_user(request) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 10)) + page = int(request.GET.get("page", 0)) + order = request.GET.get("order", "newest") + query = request.GET.get("query", "").strip() + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) message = None - if page: offset = limit * (page - 1) - + if page: + offset = limit * (page - 1) + if query: stories = [] message = "Not implemented yet." @@ -1368,58 +1463,65 @@ def load_read_stories(request): story_hashes = RUserStory.get_read_stories(user.pk, offset=offset, limit=limit, order=order) mstories = MStory.objects(story_hash__in=story_hashes) stories = Feed.format_stories(mstories) - stories = sorted(stories, key=lambda story: story_hashes.index(story['story_hash']), - reverse=bool(order=="oldest")) - - stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - - story_hashes = [story['story_hash'] for story in stories] - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) - usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk') - usersub_ids = [us['feed__pk'] for us in usersub_ids] - unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids))) - unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) - unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] + stories = sorted( + stories, + key=lambda story: story_hashes.index(story["story_hash"]), + reverse=bool(order == "oldest"), + ) + + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) + + story_hashes = [story["story_hash"] for story in stories] + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) + usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values( + "feed__pk" + ) + usersub_ids = [us["feed__pk"] for us in usersub_ids] + unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids))) + unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) + unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] + + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).hint( + [("user_id", 1), ("story_hash", 1)] + ) + starred_stories = dict([(story.story_hash, story) for story in starred_stories]) - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - starred_stories = MStarredStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('user_id', 1), ('story_hash', 1)]) - starred_stories = dict([(story.story_hash, story) - for story in starred_stories]) - nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - story['read_status'] = 1 - story['intelligence'] = { - 'feed': 1, - 'author': 0, - 'tags': 0, - 'title': 0, + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + story["read_status"] = 1 + story["intelligence"] = { + "feed": 1, + "author": 0, + "tags": 0, + "title": 0, } - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_story = Feed.format_story(starred_stories[story['story_hash']]) - starred_date = localtime_for_timezone(starred_story['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_story = Feed.format_story(starred_stories[story["story_hash"]]) + starred_date = localtime_for_timezone(starred_story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" logging.user(request, "~FCLoading read stories: ~SB%s stories %s" % (len(stories), search_log)) - + return { "stories": stories, "user_profiles": user_profiles, @@ -1427,41 +1529,42 @@ def load_read_stories(request): "message": message, } + @json.json_view def load_river_stories__redis(request): # get_post is request.REQUEST, since this endpoint needs to handle either # GET or POST requests, since the parameters for this endpoint can be # very long, at which point the max size of a GET url request is exceeded. - get_post = getattr(request, request.method) - limit = int(get_post.get('limit', 12)) - start = time.time() - user = get_user(request) - message = None - feed_ids = get_post.getlist('feeds') or get_post.getlist('feeds[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] + get_post = getattr(request, request.method) + limit = int(get_post.get("limit", 12)) + start = time.time() + user = get_user(request) + message = None + feed_ids = get_post.getlist("feeds") or get_post.getlist("feeds[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] if not feed_ids: - feed_ids = get_post.getlist('f') or get_post.getlist('f[]') - feed_ids = [int(feed_id) for feed_id in get_post.getlist('f') if feed_id] - story_hashes = get_post.getlist('h') or get_post.getlist('h[]') - story_hashes = story_hashes[:100] - requested_hashes = len(story_hashes) + feed_ids = get_post.getlist("f") or get_post.getlist("f[]") + feed_ids = [int(feed_id) for feed_id in get_post.getlist("f") if feed_id] + story_hashes = get_post.getlist("h") or get_post.getlist("h[]") + story_hashes = story_hashes[:100] + requested_hashes = len(story_hashes) original_feed_ids = list(feed_ids) - page = int(get_post.get('page', 1)) - order = get_post.get('order', 'newest') - read_filter = get_post.get('read_filter', 'unread') - query = get_post.get('query', '').strip() - include_hidden = is_true(get_post.get('include_hidden', False)) - include_feeds = is_true(get_post.get('include_feeds', False)) - on_dashboard = is_true(get_post.get('dashboard', False)) or is_true(get_post.get('on_dashboard', False)) - infrequent = is_true(get_post.get('infrequent', False)) + page = int(get_post.get("page", 1)) + order = get_post.get("order", "newest") + read_filter = get_post.get("read_filter", "unread") + query = get_post.get("query", "").strip() + include_hidden = is_true(get_post.get("include_hidden", False)) + include_feeds = is_true(get_post.get("include_feeds", False)) + on_dashboard = is_true(get_post.get("dashboard", False)) or is_true(get_post.get("on_dashboard", False)) + infrequent = is_true(get_post.get("infrequent", False)) if infrequent: - infrequent = get_post.get('infrequent') - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - usersubs = [] - code = 1 - user_search = None - offset = (page-1) * limit - story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-') + infrequent = get_post.get("infrequent") + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + usersubs = [] + code = 1 + user_search = None + offset = (page - 1) * limit + story_date_order = "%sstory_date" % ("" if order == "oldest" else "-") if user.pk == 86178: # Disable Michael_Novakhov account @@ -1470,46 +1573,47 @@ def load_river_stories__redis(request): if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) - + if story_hashes: unread_feed_story_hashes = None - read_filter = 'all' + read_filter = "all" mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order) stories = Feed.format_stories(mstories) elif query: if user.profile.is_premium: user_search = MUserSearch.get_user(user.pk) user_search.touch_search_date() - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter='all') + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="all") feed_ids = [sub.feed_id for sub in usersubs] if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit) mstories = stories - unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids, - read_filter="unread", order=order, - cutoff_date=user.profile.unread_cutoff) + unread_feed_story_hashes = UserSubscription.story_hashes( + user.pk, + feed_ids=feed_ids, + read_filter="unread", + order=order, + cutoff_date=user.profile.unread_cutoff, + ) else: stories = [] mstories = [] message = "You must be a premium subscriber to search." - elif read_filter == 'starred': - mstories = MStarredStory.objects( - user_id=user.pk, - story_feed_id__in=feed_ids - ).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit] - stories = Feed.format_stories(mstories) + elif read_filter == "starred": + mstories = MStarredStory.objects(user_id=user.pk, story_feed_id__in=feed_ids).order_by( + "%sstarred_date" % ("-" if order == "newest" else "") + )[offset : offset + limit] + stories = Feed.format_stories(mstories) else: - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter=read_filter) + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter=read_filter) all_feed_ids = [f for f in feed_ids] feed_ids = [sub.feed_id for sub in usersubs] if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) if feed_ids: params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "all_feed_ids": all_feed_ids, "offset": offset, @@ -1527,91 +1631,101 @@ def load_river_stories__redis(request): mstories = MStory.objects(story_hash__in=story_hashes[:limit]).order_by(story_date_order) stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk) - + if not usersubs: - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids, - read_filter=read_filter) - + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids, read_filter=read_filter) + trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) # Find starred stories if found_feed_ids: - if read_filter == 'starred': + if read_filter == "starred": starred_stories = mstories else: - story_hashes = [s['story_hash'] for s in stories] - starred_stories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes) - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags, - highlights=story.highlights, - user_notes=story.user_notes)) - for story in starred_stories]) + story_hashes = [s["story_hash"] for s in stories] + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes) + starred_stories = dict( + [ + ( + story.story_hash, + dict( + starred_date=story.starred_date, + user_tags=story.user_tags, + highlights=story.highlights, + user_notes=story.user_notes, + ), + ) + for story in starred_stories + ] + ) else: starred_stories = {} - + # Intelligence classifiers for all feeds involved if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids, - social_user_id=0)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids, social_user_id=0) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) - + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=found_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) + # Just need to format stories nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - if read_filter == 'starred': - story['read_status'] = 1 + if read_filter == "starred": + story["read_status"] = 1 else: - story['read_status'] = 0 - if read_filter == 'all' or query: - if (unread_feed_story_hashes is not None and - story['story_hash'] not in unread_feed_story_hashes): - story['read_status'] = 1 - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - story['user_notes'] = starred_stories[story['story_hash']]['user_notes'] - story['highlights'] = starred_stories[story['story_hash']]['highlights'] - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 0 + if read_filter == "all" or query: + if unread_feed_story_hashes is not None and story["story_hash"] not in unread_feed_story_hashes: + story["read_status"] = 1 + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + story["user_notes"] = starred_stories[story["story_hash"]]["user_notes"] + story["highlights"] = starred_stories[story["story_hash"]]["highlights"] + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - + story["score"] = UserSubscription.score_story(story["intelligence"]) + if include_feeds: - feeds = Feed.objects.filter(pk__in=set([story['story_feed_id'] for story in stories])) + feeds = Feed.objects.filter(pk__in=set([story["story_feed_id"] for story in stories])) feeds = [feed.canonical(include_favicon=False) for feed in feeds] - + if not user.profile.is_premium and not include_feeds: message = "The full River of News is a premium feature." code = 0 @@ -1623,57 +1737,79 @@ def load_river_stories__redis(request): hidden_stories_removed = 0 new_stories = [] for story in stories: - if story['score'] >= 0: + if story["score"] >= 0: new_stories.append(story) else: hidden_stories_removed += 1 stories = new_stories - + # if page > 1: # import random # time.sleep(random.randint(10, 16)) - + diff = time.time() - start timediff = round(float(diff), 2) if requested_hashes and story_hashes: - logging.user(request, "~FB%sLoading ~FC%s~FB stories: %s%s" % - ("~FBAuto-" if on_dashboard else "", - requested_hashes, story_hashes[:3], f"...(+{len(story_hashes)-3})" if len(story_hashes) > 3 else "")) + logging.user( + request, + "~FB%sLoading ~FC%s~FB stories: %s%s" + % ( + "~FBAuto-" if on_dashboard else "", + requested_hashes, + story_hashes[:3], + f"...(+{len(story_hashes)-3})" if len(story_hashes) > 3 else "", + ), + ) else: - logging.user(request, "~FY%sLoading ~FC%sriver stories~FY: ~SBp%s~SN (%s/%s " - "stories, ~SN%s/%s/%s feeds, %s/%s)" % - ("~FCAuto-" if on_dashboard else "", - "~FB~SBinfrequent~SN~FC " if infrequent else "", - page, len(stories), len(mstories), len(found_feed_ids), - len(feed_ids), len(original_feed_ids), order, read_filter)) + logging.user( + request, + "~FY%sLoading ~FC%sriver stories~FY: ~SBp%s~SN (%s/%s " + "stories, ~SN%s/%s/%s feeds, %s/%s)" + % ( + "~FCAuto-" if on_dashboard else "", + "~FB~SBinfrequent~SN~FC " if infrequent else "", + page, + len(stories), + len(mstories), + len(found_feed_ids), + len(feed_ids), + len(original_feed_ids), + order, + read_filter, + ), + ) - if not on_dashboard and not (requested_hashes and story_hashes): - MAnalyticsLoader.add(page_load=diff) # Only count full pages, not individual stories - if hasattr(request, 'start_time'): + if not on_dashboard and not (requested_hashes and story_hashes): + MAnalyticsLoader.add(page_load=diff) # Only count full pages, not individual stories + if hasattr(request, "start_time"): seconds = time.time() - request.start_time - RStats.add('page_load', duration=seconds) + RStats.add("page_load", duration=seconds) - data = dict(code=code, - message=message, - stories=stories, - classifiers=classifiers, - elapsed_time=timediff, - user_search=user_search, - user_profiles=user_profiles) - - if include_feeds: data['feeds'] = feeds - if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed + data = dict( + code=code, + message=message, + stories=stories, + classifiers=classifiers, + elapsed_time=timediff, + user_search=user_search, + user_profiles=user_profiles, + ) + if include_feeds: + data["feeds"] = feeds + if not include_hidden: + data["hidden_stories_removed"] = hidden_stories_removed return data + @json.json_view def load_river_stories_widget(request): logging.user(request, "Widget load") river_stories_data = json.decode(load_river_stories__redis(request).content) timeout = 3 start = time.time() - + def load_url(url): original_url = url url = urllib.parse.urljoin(settings.NEWSBLUR_URL, url) @@ -1686,105 +1822,122 @@ def load_river_stories_widget(request): pass if not conn: # logging.user(request.user, '"%s" wasn\'t fetched, trying again: %s' % (url, e)) - url = url.replace('localhost', 'haproxy') + url = url.replace("localhost", "haproxy") try: conn = urllib.request.urlopen(url, context=scontext, timeout=timeout) except (urllib.error.HTTPError, urllib.error.URLError, socket.timeout) as e: - logging.user(request.user, '~FB"%s" ~FRnot fetched~FB in %ss: ~SB%s' % (url, (time.time() - start), e)) + logging.user( + request.user, '~FB"%s" ~FRnot fetched~FB in %ss: ~SB%s' % (url, (time.time() - start), e) + ) return None data = conn.read() if not url.startswith("data:"): - data = base64.b64encode(data).decode('utf-8') + data = base64.b64encode(data).decode("utf-8") logging.user(request.user, '~FB"%s" ~SBfetched~SN in ~SB%ss' % (url, (time.time() - start))) return dict(url=original_url, data=data) - + # Find the image thumbnails and download in parallel thumbnail_urls = [] - for story in river_stories_data['stories']: - thumbnail_values = list(story['secure_image_thumbnails'].values()) + for story in river_stories_data["stories"]: + thumbnail_values = list(story["secure_image_thumbnails"].values()) for thumbnail_value in thumbnail_values: - if 'data:' in thumbnail_value: + if "data:" in thumbnail_value: continue thumbnail_urls.append(thumbnail_value) break with concurrent.futures.ThreadPoolExecutor(max_workers=6) as executor: pages = executor.map(load_url, thumbnail_urls) - + # Reassemble thumbnails back into stories thumbnail_data = dict() for page in pages: - if not page: continue - thumbnail_data[page['url']] = page['data'] - for story in river_stories_data['stories']: - thumbnail_values = list(story['secure_image_thumbnails'].values()) + if not page: + continue + thumbnail_data[page["url"]] = page["data"] + for story in river_stories_data["stories"]: + thumbnail_values = list(story["secure_image_thumbnails"].values()) if thumbnail_values and thumbnail_values[0] in thumbnail_data: page_url = thumbnail_values[0] - story['select_thumbnail_data'] = thumbnail_data[page_url] - + story["select_thumbnail_data"] = thumbnail_data[page_url] + logging.user(request, ("Elapsed Time: %ss" % (time.time() - start))) - + return river_stories_data - + + @json.json_view def complete_river(request): - user = get_user(request) - feed_ids = request.POST.getlist('feeds') or request.POST.getlist('feeds[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id and feed_id.isnumeric()] - page = int(request.POST.get('page', 1)) - read_filter = request.POST.get('read_filter', 'unread') + user = get_user(request) + feed_ids = request.POST.getlist("feeds") or request.POST.getlist("feeds[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id and feed_id.isnumeric()] + page = int(request.POST.get("page", 1)) + read_filter = request.POST.get("read_filter", "unread") stories_truncated = 0 - - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter=read_filter) + + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter=read_filter) feed_ids = [sub.feed_id for sub in usersubs] if feed_ids: - stories_truncated = UserSubscription.truncate_river(user.pk, feed_ids, read_filter, cache_prefix="dashboard:") - + stories_truncated = UserSubscription.truncate_river( + user.pk, feed_ids, read_filter, cache_prefix="dashboard:" + ) + if page >= 1: - logging.user(request, "~FC~BBRiver complete on page ~SB%s~SN, truncating ~SB%s~SN stories from ~SB%s~SN feeds" % (page, stories_truncated, len(feed_ids))) - + logging.user( + request, + "~FC~BBRiver complete on page ~SB%s~SN, truncating ~SB%s~SN stories from ~SB%s~SN feeds" + % (page, stories_truncated, len(feed_ids)), + ) + return dict(code=1, message="Truncated %s stories from %s" % (stories_truncated, len(feed_ids))) + @json.json_view def unread_story_hashes(request): - user = get_user(request) - feed_ids = request.GET.getlist('feed_id') or request.GET.getlist('feed_id[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] - include_timestamps = is_true(request.GET.get('include_timestamps', False)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'unread') - - story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids, - order=order, read_filter=read_filter, - include_timestamps=include_timestamps, - group_by_feed=True, - cutoff_date=user.profile.unread_cutoff) + user = get_user(request) + feed_ids = request.GET.getlist("feed_id") or request.GET.getlist("feed_id[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] + include_timestamps = is_true(request.GET.get("include_timestamps", False)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "unread") - logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" % - (len(feed_ids), len(story_hashes))) + story_hashes = UserSubscription.story_hashes( + user.pk, + feed_ids=feed_ids, + order=order, + read_filter=read_filter, + include_timestamps=include_timestamps, + group_by_feed=True, + cutoff_date=user.profile.unread_cutoff, + ) + + logging.user( + request, + "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" + % (len(feed_ids), len(story_hashes)), + ) return dict(unread_feed_story_hashes=story_hashes) + @ajax_login_required @json.json_view def mark_all_as_read(request): code = 1 try: - days = int(request.POST.get('days', 0)) + days = int(request.POST.get("days", 0)) except ValueError: - return dict(code=-1, message="Days parameter must be an integer, not: %s" % - request.POST.get('days')) + return dict(code=-1, message="Days parameter must be an integer, not: %s" % request.POST.get("days")) read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days) - + feeds = UserSubscription.objects.filter(user=request.user) - infrequent = is_true(request.POST.get('infrequent', False)) + infrequent = is_true(request.POST.get("infrequent", False)) if infrequent: - infrequent = request.POST.get('infrequent') + infrequent = request.POST.get("infrequent") feed_ids = Feed.low_volume_feeds([usersub.feed.pk for usersub in feeds], stories_per_month=infrequent) feeds = UserSubscription.objects.filter(user=request.user, feed_id__in=feed_ids) - + socialsubs = MSocialSubscription.objects.filter(user_id=request.user.pk) for subtype in [feeds, socialsubs]: for sub in subtype: @@ -1795,39 +1948,45 @@ def mark_all_as_read(request): sub.needs_unread_recalc = True sub.mark_read_date = read_date sub.save() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - - logging.user(request, "~FMMarking %s as read: ~SB%s days" % (("all" if not infrequent else "infrequent stories"), days,)) + r.publish(request.user.username, "reload:feeds") + + logging.user( + request, + "~FMMarking %s as read: ~SB%s days" + % ( + ("all" if not infrequent else "infrequent stories"), + days, + ), + ) return dict(code=code) - + + @ajax_login_required @json.json_view def mark_story_as_read(request): - story_ids = request.POST.getlist('story_id') or request.POST.getlist('story_id[]') + story_ids = request.POST.getlist("story_id") or request.POST.getlist("story_id[]") try: - feed_id = int(get_argument_or_404(request, 'feed_id')) + feed_id = int(get_argument_or_404(request, "feed_id")) except ValueError: - return dict(code=-1, errors=["You must pass a valid feed_id: %s" % - request.POST.get('feed_id')]) - + return dict(code=-1, errors=["You must pass a valid feed_id: %s" % request.POST.get("feed_id")]) + try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: feed_id = duplicate_feed[0].feed_id try: - usersub = UserSubscription.objects.get(user=request.user, - feed=duplicate_feed[0].feed) - except (Feed.DoesNotExist): + usersub = UserSubscription.objects.get(user=request.user, feed=duplicate_feed[0].feed) + except Feed.DoesNotExist: return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id]) else: return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id]) except UserSubscription.DoesNotExist: usersub = None - + if usersub: data = usersub.mark_story_ids_as_read(story_ids, request=request) else: @@ -1835,30 +1994,33 @@ def mark_story_as_read(request): return data + @ajax_login_required @json.json_view def mark_story_hashes_as_read(request): - retrying_failed = is_true(request.POST.get('retrying_failed', False)) + retrying_failed = is_true(request.POST.get("retrying_failed", False)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) try: - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") except UnreadablePostError: return dict(code=-1, message="Missing `story_hash` list parameter.") - - feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes, username=request.user.username) + + feed_ids, friend_ids = RUserStory.mark_story_hashes_read( + request.user.pk, story_hashes, username=request.user.username + ) if request.user.profile.is_archive: RUserUnreadStory.mark_read(request.user.pk, story_hashes) - + if friend_ids: socialsubs = MSocialSubscription.objects.filter( - user_id=request.user.pk, - subscription_user_id__in=friend_ids) + user_id=request.user.pk, subscription_user_id__in=friend_ids + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() - r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id) + r.publish(request.user.username, "social:%s" % socialsub.subscription_user_id) # Also count on original subscription for feed_id in feed_ids: @@ -1868,55 +2030,59 @@ def mark_story_hashes_as_read(request): usersub.last_read_date = datetime.datetime.now() if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc', 'last_read_date']) + usersub.save(update_fields=["needs_unread_recalc", "last_read_date"]) else: - usersub.save(update_fields=['last_read_date']) - r.publish(request.user.username, 'feed:%s' % feed_id) - - hash_count = len(story_hashes) - logging.user(request, "~FYRead %s %s: %s %s" % ( - hash_count, 'story' if hash_count == 1 else 'stories', - story_hashes, - '(retrying failed)' if retrying_failed else '')) + usersub.save(update_fields=["last_read_date"]) + r.publish(request.user.username, "feed:%s" % feed_id) + + hash_count = len(story_hashes) + logging.user( + request, + "~FYRead %s %s: %s %s" + % ( + hash_count, + "story" if hash_count == 1 else "stories", + story_hashes, + "(retrying failed)" if retrying_failed else "", + ), + ) + + return dict(code=1, story_hashes=story_hashes, feed_ids=feed_ids, friend_user_ids=friend_ids) - return dict(code=1, story_hashes=story_hashes, - feed_ids=feed_ids, friend_user_ids=friend_ids) @ajax_login_required @json.json_view def mark_feed_stories_as_read(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - feeds_stories = request.POST.get('feeds_stories', "{}") + feeds_stories = request.POST.get("feeds_stories", "{}") feeds_stories = json.decode(feeds_stories) - data = { - 'code': -1, - 'message': 'Nothing was marked as read' - } - + data = {"code": -1, "message": "Nothing was marked as read"} + for feed_id, story_ids in list(feeds_stories.items()): try: feed_id = int(feed_id) except ValueError: continue try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) data = usersub.mark_story_ids_as_read(story_ids, request=request) except UserSubscription.DoesNotExist: return dict(code=-1, error="You are not subscribed to this feed_id: %d" % feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) try: - if not duplicate_feed: raise Feed.DoesNotExist - usersub = UserSubscription.objects.get(user=request.user, - feed=duplicate_feed[0].feed) + if not duplicate_feed: + raise Feed.DoesNotExist + usersub = UserSubscription.objects.get(user=request.user, feed=duplicate_feed[0].feed) data = usersub.mark_story_ids_as_read(story_ids, request=request) except (UserSubscription.DoesNotExist, Feed.DoesNotExist): return dict(code=-1, error="No feed exists for feed_id: %d" % feed_id) - r.publish(request.user.username, 'feed:%s' % feed_id) - + r.publish(request.user.username, "feed:%s" % feed_id) + return data - + + @ajax_login_required @json.json_view def mark_social_stories_as_read(request): @@ -1924,103 +2090,113 @@ def mark_social_stories_as_read(request): errors = [] data = {} r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - users_feeds_stories = request.POST.get('users_feeds_stories', "{}") + users_feeds_stories = request.POST.get("users_feeds_stories", "{}") users_feeds_stories = json.decode(users_feeds_stories) for social_user_id, feeds in list(users_feeds_stories.items()): for feed_id, story_ids in list(feeds.items()): feed_id = int(feed_id) try: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=social_user_id) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) data = socialsub.mark_story_ids_as_read(story_ids, feed_id, request=request) except OperationError as e: code = -1 errors.append("Already read story: %s" % e) except MSocialSubscription.DoesNotExist: - MSocialSubscription.mark_unsub_story_ids_as_read(request.user.pk, social_user_id, - story_ids, feed_id, - request=request) + MSocialSubscription.mark_unsub_story_ids_as_read( + request.user.pk, social_user_id, story_ids, feed_id, request=request + ) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: try: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=social_user_id) - data = socialsub.mark_story_ids_as_read(story_ids, duplicate_feed[0].feed.pk, request=request) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) + data = socialsub.mark_story_ids_as_read( + story_ids, duplicate_feed[0].feed.pk, request=request + ) except (UserSubscription.DoesNotExist, Feed.DoesNotExist): code = -1 errors.append("No feed exists for feed_id %d." % feed_id) else: continue - r.publish(request.user.username, 'feed:%s' % feed_id) - r.publish(request.user.username, 'social:%s' % social_user_id) + r.publish(request.user.username, "feed:%s" % feed_id) + r.publish(request.user.username, "social:%s" % social_user_id) data.update(code=code, errors=errors) return data - -@required_params('story_id', feed_id=int) + + +@required_params("story_id", feed_id=int) @ajax_login_required @json.json_view def mark_story_as_unread(request): - story_id = request.POST.get('story_id', None) - feed_id = int(request.POST.get('feed_id', 0)) - + story_id = request.POST.get("story_id", None) + feed_id = int(request.POST.get("feed_id", 0)) + try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) feed = usersub.feed except UserSubscription.DoesNotExist: usersub = None feed = Feed.get_by_id(feed_id) - + if usersub and not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc']) - + usersub.save(update_fields=["needs_unread_recalc"]) + data = dict(code=0, payload=dict(story_id=story_id)) - + story, found_original = MStory.find_story(feed_id, story_id) - + if not story: logging.user(request, "~FY~SBUnread~SN story in feed: %s (NOT FOUND)" % (feed)) return dict(code=-1, message="Story not found.") message = RUserStory.story_can_be_marked_unread_by_user(story, request.user) if message: - data['code'] = -1 - data['message'] = message + data["code"] = -1 + data["message"] = message return data - + if usersub: data = usersub.invert_read_stories_after_unread_story(story, request) - - social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk, - story_feed_id=feed_id, - story_guid_hash=story.guid_hash) + + social_subs = MSocialSubscription.mark_dirty_sharing_story( + user_id=request.user.pk, story_feed_id=feed_id, story_guid_hash=story.guid_hash + ) dirty_count = social_subs and social_subs.count() dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else "" RUserStory.mark_story_hash_unread(request.user, story_hash=story.story_hash) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) logging.user(request, "~FY~SBUnread~SN story in feed: %s %s" % (feed, dirty_count)) - + return data + @ajax_login_required @json.json_view -@required_params('story_hash') +@required_params("story_hash") def mark_story_hash_as_unread(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") is_list = len(story_hashes) > 1 datas = [] for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) story, _ = MStory.find_story(feed_id, story_hash) if not story: - data = dict(code=-1, message="That story has been removed from the feed, no need to mark it unread.", story_hash=story_hash) + data = dict( + code=-1, + message="That story has been removed from the feed, no need to mark it unread.", + story_hash=story_hash, + ) if not is_list: return data else: @@ -2032,28 +2208,28 @@ def mark_story_hash_as_unread(request): return data else: datas.append(data) - + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id) if usersubs: usersub = usersubs[0] if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc']) + usersub.save(update_fields=["needs_unread_recalc"]) data = usersub.invert_read_stories_after_unread_story(story, request) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user, story_hash) if friend_ids: socialsubs = MSocialSubscription.objects.filter( - user_id=request.user.pk, - subscription_user_id__in=friend_ids) + user_id=request.user.pk, subscription_user_id__in=friend_ids + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() - r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id) + r.publish(request.user.username, "social:%s" % socialsub.subscription_user_id) logging.user(request, "~FYUnread story in feed/socialsubs: %s/%s" % (feed_id, friend_ids)) @@ -2065,35 +2241,38 @@ def mark_story_hash_as_unread(request): return datas + @ajax_login_required @json.json_view def mark_feed_as_read(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - feed_ids = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') - cutoff_timestamp = int(request.POST.get('cutoff_timestamp', 0)) - direction = request.POST.get('direction', 'older') - infrequent = is_true(request.POST.get('infrequent', False)) + feed_ids = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") + cutoff_timestamp = int(request.POST.get("cutoff_timestamp", 0)) + direction = request.POST.get("direction", "older") + infrequent = is_true(request.POST.get("infrequent", False)) if infrequent: - infrequent = request.POST.get('infrequent') + infrequent = request.POST.get("infrequent") multiple = len(feed_ids) > 1 code = 1 errors = [] cutoff_date = datetime.datetime.fromtimestamp(cutoff_timestamp) if cutoff_timestamp else None - + if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) - feed_ids = [str(f) for f in feed_ids] # This method expects strings - + feed_ids = [str(f) for f in feed_ids] # This method expects strings + if cutoff_date: - logging.user(request, "~FMMark %s feeds read, %s - cutoff: %s/%s" % - (len(feed_ids), direction, cutoff_timestamp, cutoff_date)) - + logging.user( + request, + "~FMMark %s feeds read, %s - cutoff: %s/%s" + % (len(feed_ids), direction, cutoff_timestamp, cutoff_date), + ) + for feed_id in feed_ids: - if 'social:' in feed_id: - user_id = int(feed_id.replace('social:', '')) + if "social:" in feed_id: + user_id = int(feed_id.replace("social:", "")) try: - sub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=user_id) + sub = MSocialSubscription.objects.get(user_id=request.user.pk, subscription_user_id=user_id) except MSocialSubscription.DoesNotExist: logging.user(request, "~FRCouldn't find socialsub: %s" % user_id) continue @@ -2109,61 +2288,63 @@ def mark_feed_as_read(request): except (Feed.DoesNotExist, UserSubscription.DoesNotExist) as e: errors.append("User not subscribed: %s" % e) continue - except (ValueError) as e: + except ValueError as e: errors.append("Invalid feed_id: %s" % e) continue if not sub: errors.append("User not subscribed: %s" % feed_id) continue - + try: if direction == "older": marked_read = sub.mark_feed_read(cutoff_date=cutoff_date) else: marked_read = sub.mark_newer_stories_read(cutoff_date=cutoff_date) if marked_read and not multiple: - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) except IntegrityError as e: errors.append("Could not mark feed as read: %s" % e) code = -1 - + if multiple: logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % len(feed_ids)) - r.publish(request.user.username, 'refresh:%s' % ','.join(feed_ids)) - + r.publish(request.user.username, "refresh:%s" % ",".join(feed_ids)) + if errors: logging.user(request, "~FMMarking read had errors: ~FR%s" % errors) - + return dict(code=code, errors=errors, cutoff_date=cutoff_date, direction=direction) + def _parse_user_info(user): return { - 'user_info': { - 'is_anonymous': json.encode(user.is_anonymous), - 'is_authenticated': json.encode(user.is_authenticated), - 'username': json.encode(user.username if user.is_authenticated else 'Anonymous') + "user_info": { + "is_anonymous": json.encode(user.is_anonymous), + "is_authenticated": json.encode(user.is_authenticated), + "username": json.encode(user.username if user.is_authenticated else "Anonymous"), } } + @ajax_login_required @json.json_view def add_url(request): code = 0 - url = request.POST['url'] - folder = request.POST.get('folder', '').replace('river:', '') - new_folder = request.POST.get('new_folder', '').replace('river:', '') - auto_active = is_true(request.POST.get('auto_active', 1)) - skip_fetch = is_true(request.POST.get('skip_fetch', False)) + url = request.POST["url"] + folder = request.POST.get("folder", "").replace("river:", "") + new_folder = request.POST.get("new_folder", "").replace("river:", "") + auto_active = is_true(request.POST.get("auto_active", 1)) + skip_fetch = is_true(request.POST.get("skip_fetch", False)) feed = None - + if not url: code = -1 - message = 'Enter in the website address or the feed URL.' + message = "Enter in the website address or the feed URL." elif any([(banned_url in url) for banned_url in BANNED_URLS]): code = -1 message = "The publisher of this website has banned NewsBlur." - elif re.match('(https?://)?twitter.com/\w+/?$', url): + elif re.match("(https?://)?twitter.com/\w+/?$", url): if not request.user.profile.is_premium: message = "You must be a premium subscriber to add Twitter feeds." code = -1 @@ -2177,7 +2358,7 @@ def add_url(request): except tweepy.TweepError: code = -1 message = "Your Twitter connection isn't setup. Go to Manage - Friends/Followers and reconnect Twitter." - + if code == -1: return dict(code=code, message=message) @@ -2186,25 +2367,26 @@ def add_url(request): usf.add_folder(folder, new_folder) folder = new_folder - code, message, us = UserSubscription.add_subscription(user=request.user, feed_address=url, - folder=folder, auto_active=auto_active, - skip_fetch=skip_fetch) + code, message, us = UserSubscription.add_subscription( + user=request.user, feed_address=url, folder=folder, auto_active=auto_active, skip_fetch=skip_fetch + ) feed = us and us.feed if feed: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:%s' % feed.pk) + r.publish(request.user.username, "reload:%s" % feed.pk) MUserSearch.schedule_index_feeds_for_search(feed.pk, request.user.pk) - + return dict(code=code, message=message, feed=feed) + @ajax_login_required @json.json_view def add_folder(request): - folder = request.POST['folder'].replace('river:', '') - parent_folder = request.POST.get('parent_folder', '').replace('river:', '') + folder = request.POST["folder"].replace("river:", "") + parent_folder = request.POST.get("parent_folder", "").replace("river:", "") folders = None logging.user(request, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder)) - + if folder: code = 1 message = "" @@ -2212,43 +2394,45 @@ def add_folder(request): user_sub_folders_object.add_folder(parent_folder, folder) folders = json.decode(user_sub_folders_object.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") else: code = -1 message = "Gotta write in a folder name." - + return dict(code=code, message=message, folders=folders) + @ajax_login_required @json.json_view def delete_feed(request): - feed_id = int(request.POST['feed_id']) - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if not in_folder or in_folder == ' ': + feed_id = int(request.POST["feed_id"]) + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if not in_folder or in_folder == " ": in_folder = "" - + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) user_sub_folders.delete_feed(feed_id, in_folder) - + feed = Feed.objects.filter(pk=feed_id) if feed: feed[0].count_subscribers() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, message="Removed %s from '%s'." % (feed, in_folder)) + @ajax_login_required @json.json_view def delete_feed_by_url(request): message = "" code = 0 - url = request.POST['url'] - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if in_folder == ' ': + url = request.POST["url"] + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if in_folder == " ": in_folder = "" - + logging.user(request.user, "~FBFinding feed (delete_feed_by_url): %s" % url) feed = Feed.get_feed_from_url(url, create=False) if feed: @@ -2261,19 +2445,22 @@ def delete_feed_by_url(request): else: code = -1 message = "URL not found." - + return dict(code=code, message=message) - + + @ajax_login_required @json.json_view def delete_folder(request): - folder_to_delete = request.POST.get('folder_name') or request.POST.get('folder_to_delete') - in_folder = request.POST.get('in_folder', None) - feed_ids_in_folder = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') + folder_to_delete = request.POST.get("folder_name") or request.POST.get("folder_to_delete") + in_folder = request.POST.get("in_folder", None) + feed_ids_in_folder = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") feed_ids_in_folder = [int(f) for f in feed_ids_in_folder if f] - request.user.profile.send_opml_export_email(reason="You have deleted an entire folder of feeds, so here's a backup of all of your subscriptions just in case.") - + request.user.profile.send_opml_export_email( + reason="You have deleted an entire folder of feeds, so here's a backup of all of your subscriptions just in case." + ) + # Works piss poor with duplicate folder titles, if they are both in the same folder. # Deletes all, but only in the same folder parent. But nobody should be doing that, right? user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) @@ -2281,19 +2468,21 @@ def delete_folder(request): folders = json.decode(user_sub_folders.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, folders=folders) -@required_params('feeds_by_folder') +@required_params("feeds_by_folder") @ajax_login_required @json.json_view def delete_feeds_by_folder(request): - feeds_by_folder = json.decode(request.POST['feeds_by_folder']) + feeds_by_folder = json.decode(request.POST["feeds_by_folder"]) + + request.user.profile.send_opml_export_email( + reason="You have deleted a number of feeds at once, so here's a backup of all of your subscriptions just in case." + ) - request.user.profile.send_opml_export_email(reason="You have deleted a number of feeds at once, so here's a backup of all of your subscriptions just in case.") - # Works piss poor with duplicate folder titles, if they are both in the same folder. # Deletes all, but only in the same folder parent. But nobody should be doing that, right? user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) @@ -2301,38 +2490,40 @@ def delete_feeds_by_folder(request): folders = json.decode(user_sub_folders.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, folders=folders) + @ajax_login_required @json.json_view def rename_feed(request): - feed = get_object_or_404(Feed, pk=int(request.POST['feed_id'])) + feed = get_object_or_404(Feed, pk=int(request.POST["feed_id"])) try: user_sub = UserSubscription.objects.get(user=request.user, feed=feed) except UserSubscription.DoesNotExist: return dict(code=-1, message=f"You are not subscribed to {feed.feed_title}") - - feed_title = request.POST['feed_title'] - - logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % ( - feed.feed_title, feed_title)) - + + feed_title = request.POST["feed_title"] + + logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (feed.feed_title, feed_title)) + user_sub.user_title = feed_title user_sub.save() - + return dict(code=1) - + + @ajax_login_required @json.json_view def rename_folder(request): - folder_to_rename = request.POST.get('folder_name') or request.POST.get('folder_to_rename') - new_folder_name = request.POST['new_folder_name'] - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if 'Top Level' in in_folder: in_folder = '' + folder_to_rename = request.POST.get("folder_name") or request.POST.get("folder_to_rename") + new_folder_name = request.POST["new_folder_name"] + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if "Top Level" in in_folder: + in_folder = "" code = 0 - + # Works piss poor with duplicate folder titles, if they are both in the same folder. # renames all, but only in the same folder parent. But nobody should be doing that, right? if folder_to_rename and new_folder_name: @@ -2341,66 +2532,74 @@ def rename_folder(request): code = 1 else: code = -1 - + return dict(code=code) - + + @ajax_login_required @json.json_view def move_feed_to_folders(request): - feed_id = int(request.POST['feed_id']) - in_folders = request.POST.getlist('in_folders', '') or request.POST.getlist('in_folders[]', '') - to_folders = request.POST.getlist('to_folders', '') or request.POST.getlist('to_folders[]', '') + feed_id = int(request.POST["feed_id"]) + in_folders = request.POST.getlist("in_folders", "") or request.POST.getlist("in_folders[]", "") + to_folders = request.POST.getlist("to_folders", "") or request.POST.getlist("to_folders[]", "") user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_feed_to_folders(feed_id, in_folders=in_folders, - to_folders=to_folders) - + user_sub_folders = user_sub_folders.move_feed_to_folders( + feed_id, in_folders=in_folders, to_folders=to_folders + ) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @ajax_login_required @json.json_view def move_feed_to_folder(request): - feed_id = int(request.POST['feed_id']) - in_folder = request.POST.get('in_folder', '') - to_folder = request.POST.get('to_folder', '') + feed_id = int(request.POST["feed_id"]) + in_folder = request.POST.get("in_folder", "") + to_folder = request.POST.get("to_folder", "") user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder, - to_folder=to_folder) - + user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder, to_folder=to_folder) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @ajax_login_required @json.json_view def move_folder_to_folder(request): - folder_name = request.POST['folder_name'] - in_folder = request.POST.get('in_folder', '') - to_folder = request.POST.get('to_folder', '') - + folder_name = request.POST["folder_name"] + in_folder = request.POST.get("in_folder", "") + to_folder = request.POST.get("to_folder", "") + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder) - + user_sub_folders = user_sub_folders.move_folder_to_folder( + folder_name, in_folder=in_folder, to_folder=to_folder + ) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) -@required_params('feeds_by_folder', 'to_folder') + +@required_params("feeds_by_folder", "to_folder") @ajax_login_required @json.json_view def move_feeds_by_folder_to_folder(request): - feeds_by_folder = json.decode(request.POST['feeds_by_folder']) - to_folder = request.POST['to_folder'] - new_folder = request.POST.get('new_folder', None) + feeds_by_folder = json.decode(request.POST["feeds_by_folder"]) + to_folder = request.POST["to_folder"] + new_folder = request.POST.get("new_folder", None) + + request.user.profile.send_opml_export_email( + reason="You have moved a number of feeds at once, so here's a backup of all of your subscriptions just in case." + ) - request.user.profile.send_opml_export_email(reason="You have moved a number of feeds at once, so here's a backup of all of your subscriptions just in case.") - user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) if new_folder: @@ -2408,44 +2607,50 @@ def move_feeds_by_folder_to_folder(request): to_folder = new_folder user_sub_folders = user_sub_folders.move_feeds_by_folder_to_folder(feeds_by_folder, to_folder) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @login_required def add_feature(request): if not request.user.is_staff: return HttpResponseForbidden() - code = -1 + code = -1 form = FeatureForm(request.POST) - + if form.is_valid(): form.save() code = 1 - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + return dict(code=code) - + + @json.json_view def load_features(request): user = get_user(request) - page = max(int(request.GET.get('page', 0)), 0) + page = max(int(request.GET.get("page", 0)), 0) if page > 1: - logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page+1)) - features = list(Feature.objects.all()[page*3:(page+1)*3+1].values()) - features = [{ - 'description': f['description'], - 'date': localtime_for_timezone(f['date'], user.profile.timezone).strftime("%b %d, %Y") - } for f in features] + logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page + 1)) + features = list(Feature.objects.all()[page * 3 : (page + 1) * 3 + 1].values()) + features = [ + { + "description": f["description"], + "date": localtime_for_timezone(f["date"], user.profile.timezone).strftime("%b %d, %Y"), + } + for f in features + ] return features + @ajax_login_required @json.json_view def save_feed_order(request): - folders = request.POST.get('folders') + folders = request.POST.get("folders") if folders: # Test that folders can be JSON decoded folders_list = json.decode(folders) @@ -2454,44 +2659,50 @@ def save_feed_order(request): user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user) user_sub_folders.folders = folders user_sub_folders.save() - + return {} + @json.json_view def feeds_trainer(request): classifiers = [] - feed_id = request.GET.get('feed_id') + feed_id = request.GET.get("feed_id") user = get_user(request) usersubs = UserSubscription.objects.filter(user=user, active=True) - + if feed_id: feed = get_object_or_404(Feed, pk=feed_id) usersubs = usersubs.filter(feed=feed) - usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month') - + usersubs = usersubs.select_related("feed").order_by("-feed__stories_last_month") + for us in usersubs: if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id: classifier = dict() - classifier['classifiers'] = get_classifiers_for_user(user, feed_id=us.feed.pk) - classifier['feed_id'] = us.feed_id - classifier['stories_last_month'] = us.feed.stories_last_month - classifier['num_subscribers'] = us.feed.num_subscribers - classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] - classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else [] + classifier["classifiers"] = get_classifiers_for_user(user, feed_id=us.feed.pk) + classifier["feed_id"] = us.feed_id + classifier["stories_last_month"] = us.feed.stories_last_month + classifier["num_subscribers"] = us.feed.num_subscribers + classifier["feed_tags"] = ( + json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] + ) + classifier["feed_authors"] = ( + json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else [] + ) classifiers.append(classifier) - + user.profile.has_trained_intelligence = True user.profile.save() - + logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers))) - + return classifiers + @ajax_login_required @json.json_view def save_feed_chooser(request): is_premium = request.user.profile.is_premium - approved_feeds = request.POST.getlist('approved_feeds') or request.POST.getlist('approved_feeds[]') + approved_feeds = request.POST.getlist("approved_feeds") or request.POST.getlist("approved_feeds[]") approved_feeds = [int(feed_id) for feed_id in approved_feeds if feed_id] approve_all = False if not is_premium: @@ -2500,7 +2711,7 @@ def save_feed_chooser(request): approve_all = True activated = 0 usersubs = UserSubscription.objects.filter(user=request.user) - + for sub in usersubs: try: if sub.feed_id in approved_feeds or approve_all: @@ -2515,32 +2726,31 @@ def save_feed_chooser(request): sub.save() except Feed.DoesNotExist: pass - + UserSubscription.queue_new_feeds(request.user) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - - logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % ( - activated, - usersubs.count() - )) - - return {'activated': activated} + r.publish(request.user.username, "reload:feeds") + + logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % (activated, usersubs.count())) + + return {"activated": activated} + @ajax_login_required def retrain_all_sites(request): for sub in UserSubscription.objects.filter(user=request.user): sub.is_trained = False sub.save() - + return feeds_trainer(request) - + + @login_required def activate_premium_account(request): try: - usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user) + usersubs = UserSubscription.objects.select_related("feed").filter(user=request.user) for sub in usersubs: sub.active = True sub.save() @@ -2549,11 +2759,12 @@ def activate_premium_account(request): sub.feed.schedule_feed_fetch_immediately() except Exception as e: logging.user(request, "~BR~FWPremium activation failed: {e} {usersubs}") - + request.user.profile.is_premium = True request.user.profile.save() - - return HttpResponseRedirect(reverse('index')) + + return HttpResponseRedirect(reverse("index")) + @login_required def login_as(request): @@ -2561,69 +2772,74 @@ def login_as(request): logging.user(request, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!") assert False return HttpResponseForbidden() - username = request.GET['user'] + username = request.GET["user"] user = get_object_or_404(User, username__iexact=username) user.backend = settings.AUTHENTICATION_BACKENDS[0] - login_user(request, user, backend='django.contrib.auth.backends.ModelBackend') - return HttpResponseRedirect(reverse('index')) - + login_user(request, user, backend="django.contrib.auth.backends.ModelBackend") + return HttpResponseRedirect(reverse("index")) + + def iframe_buster(request): logging.user(request, "~FB~SBiFrame bust!") return HttpResponse(status=204) -@required_params('story_id', feed_id=int) + +@required_params("story_id", feed_id=int) @ajax_login_required @json.json_view def mark_story_as_starred(request): return _mark_story_as_starred(request) - -@required_params('story_hash') + + +@required_params("story_hash") @ajax_login_required @json.json_view def mark_story_hash_as_starred(request): return _mark_story_as_starred(request) - + + def _mark_story_as_starred(request): - code = 1 - feed_id = int(request.POST.get('feed_id', 0)) - story_id = request.POST.get('story_id', None) - user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') - user_notes = request.POST.get('user_notes', None) - highlights = request.POST.getlist('highlights') or request.POST.getlist('highlights[]') or [] - message = "" - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + code = 1 + feed_id = int(request.POST.get("feed_id", 0)) + story_id = request.POST.get("story_id", None) + user_tags = request.POST.getlist("user_tags") or request.POST.getlist("user_tags[]") + user_notes = request.POST.get("user_notes", None) + highlights = request.POST.getlist("highlights") or request.POST.getlist("highlights[]") or [] + message = "" + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") is_list = len(story_hashes) > 1 datas = [] if not len(story_hashes): - story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) + story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) if story: story_hashes = [story.story_hash] - + if not len(story_hashes): - return {'code': -1, 'message': "Could not find story to save."} - + return {"code": -1, "message": "Could not find story to save."} + for story_hash in story_hashes: - story, _ = MStory.find_story(story_hash=story_hash) + story, _ = MStory.find_story(story_hash=story_hash) if not story: logging.user(request, "~FCStarring ~FRfailed~FC: %s not found" % (story_hash)) - datas.append({'code': -1, 'message': "Could not save story, not found", 'story_hash': story_hash}) + datas.append({"code": -1, "message": "Could not save story, not found", "story_hash": story_hash}) continue feed_id = story and story.story_feed_id - - story_db = dict([(k, v) for k, v in list(story._data.items()) - if k is not None and v is not None]) + + story_db = dict([(k, v) for k, v in list(story._data.items()) if k is not None and v is not None]) # Pop all existing user-specific fields because we don't want to reuse them from the found story # in case MStory.find_story uses somebody else's saved/shared story (because the original is deleted) - story_db.pop('user_id', None) - story_db.pop('starred_date', None) - story_db.pop('id', None) - story_db.pop('user_tags', None) - story_db.pop('highlights', None) - story_db.pop('user_notes', None) - + story_db.pop("user_id", None) + story_db.pop("starred_date", None) + story_db.pop("id", None) + story_db.pop("user_tags", None) + story_db.pop("highlights", None) + story_db.pop("user_notes", None) + now = datetime.datetime.now() - story_values = dict(starred_date=now, user_tags=user_tags, highlights=highlights, user_notes=user_notes, **story_db) + story_values = dict( + starred_date=now, user_tags=user_tags, highlights=highlights, user_notes=user_notes, **story_db + ) params = dict(story_guid=story.story_guid, user_id=request.user.pk) starred_story = MStarredStory.objects(**params).limit(1) created = False @@ -2632,19 +2848,25 @@ def _mark_story_as_starred(request): removed_highlights = [] if not starred_story: params.update(story_values) - if 'story_latest_content_z' in params: - params.pop('story_latest_content_z') + if "story_latest_content_z" in params: + params.pop("story_latest_content_z") try: starred_story = MStarredStory.objects.create(**params) except OperationError as e: - logging.user(request, "~FCStarring ~FRfailed~FC: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], e)) - datas.append({'code': -1, 'message': "Could not save story due to: %s" % e, 'story_hash': story_hash}) - + logging.user( + request, "~FCStarring ~FRfailed~FC: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], e) + ) + datas.append( + {"code": -1, "message": "Could not save story due to: %s" % e, "story_hash": story_hash} + ) + created = True - MActivity.new_starred_story(user_id=request.user.pk, - story_title=story.story_title, - story_feed_id=feed_id, - story_id=starred_story.story_guid) + MActivity.new_starred_story( + user_id=request.user.pk, + story_title=story.story_title, + story_feed_id=feed_id, + story_id=starred_story.story_guid, + ) new_user_tags = user_tags new_highlights = highlights changed_user_notes = bool(user_notes) @@ -2660,57 +2882,74 @@ def _mark_story_as_starred(request): starred_story.highlights = highlights starred_story.user_notes = user_notes starred_story.save() - + if len(highlights) == 1 and len(new_highlights) == 1: MStarredStoryCounts.adjust_count(request.user.pk, highlights=True, amount=1) elif len(highlights) == 0 and len(removed_highlights): MStarredStoryCounts.adjust_count(request.user.pk, highlights=True, amount=-1) - + for tag in new_user_tags: MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=1) for tag in removed_user_tags: MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1) - + if random.random() < 0.01: MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) if not starred_count and len(starred_counts): - starred_count = MStarredStory.objects(user_id=request.user.pk).count() - + starred_count = MStarredStory.objects(user_id=request.user.pk).count() + if not changed_user_notes: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'story:starred:%s' % story.story_hash) - + r.publish(request.user.username, "story:starred:%s" % story.story_hash) + if created: - logging.user(request, "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags)) + logging.user( + request, + "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags), + ) else: - logging.user(request, "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN/~FM%s~FC)" % (story.story_title[:32], starred_story.user_tags, starred_story.user_notes)) - - datas.append({'code': code, 'message': message, 'starred_count': starred_count, 'starred_counts': starred_counts}) - + logging.user( + request, + "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN/~FM%s~FC)" + % (story.story_title[:32], starred_story.user_tags, starred_story.user_notes), + ) + + datas.append( + { + "code": code, + "message": message, + "starred_count": starred_count, + "starred_counts": starred_counts, + } + ) + if len(datas) >= 2: return datas elif len(datas) == 1: return datas[0] return datas - -@required_params('story_id') + + +@required_params("story_id") @ajax_login_required @json.json_view def mark_story_as_unstarred(request): return _mark_story_as_unstarred(request) - -@required_params('story_hash') + + +@required_params("story_hash") @ajax_login_required @json.json_view def mark_story_hash_as_unstarred(request): return _mark_story_as_unstarred(request) + def _mark_story_as_unstarred(request): - code = 1 - story_id = request.POST.get('story_id', None) - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + code = 1 + story_id = request.POST.get("story_id", None) + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") starred_counts = None starred_story = None if story_id: @@ -2720,28 +2959,32 @@ def _mark_story_as_unstarred(request): story_hashes = [starred_story.story_hash] else: story_hashes = [story_id] - + datas = [] for story_hash in story_hashes: starred_story = MStarredStory.objects(user_id=request.user.pk, story_hash=story_hash) if not starred_story: logging.user(request, "~FCUnstarring ~FRfailed~FC: %s not found" % (story_hash)) - datas.append({'code': -1, 'message': "Could not unsave story, not found", 'story_hash': story_hash}) + datas.append( + {"code": -1, "message": "Could not unsave story, not found", "story_hash": story_hash} + ) continue - + starred_story = starred_story[0] logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story.story_title[:50])) user_tags = starred_story.user_tags feed_id = starred_story.story_feed_id - MActivity.remove_starred_story(user_id=request.user.pk, - story_feed_id=starred_story.story_feed_id, - story_id=starred_story.story_guid) + MActivity.remove_starred_story( + user_id=request.user.pk, + story_feed_id=starred_story.story_feed_id, + story_id=starred_story.story_guid, + ) starred_story.user_id = 0 try: starred_story.save() except NotUniqueError: starred_story.delete() - + MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=-1) for tag in user_tags: @@ -2752,27 +2995,32 @@ def _mark_story_as_unstarred(request): MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts = MStarredStoryCounts.user_counts(request.user.pk) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'story:unstarred:%s' % starred_story.story_hash) - + r.publish(request.user.username, "story:unstarred:%s" % starred_story.story_hash) + if not story_hashes: datas.append(dict(code=-1, message=f"Failed to find {story_hashes}")) - - return {'code': code, 'starred_counts': starred_counts, 'messages': datas} - + + return {"code": code, "starred_counts": starred_counts, "messages": datas} + + @ajax_login_required @json.json_view def starred_counts(request): starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) - logging.user(request, "~FCRequesting starred counts: ~SB%s stories (%s tags)" % (starred_count, len([s for s in starred_counts if s['tag']]))) + logging.user( + request, + "~FCRequesting starred counts: ~SB%s stories (%s tags)" + % (starred_count, len([s for s in starred_counts if s["tag"]])), + ) + + return {"starred_count": starred_count, "starred_counts": starred_counts} + - return {'starred_count': starred_count, 'starred_counts': starred_counts} - @ajax_login_required @json.json_view def send_story_email(request): - def validate_email_as_bool(email): try: validate_email(email) @@ -2780,46 +3028,49 @@ def send_story_email(request): except: return False - code = 1 - message = 'OK' - user = get_user(request) - story_id = request.POST['story_id'] - feed_id = request.POST['feed_id'] - to_addresses = request.POST.get('to', '').replace(',', ' ').replace(' ', ' ').strip().split(' ') - from_name = request.POST['from_name'] - from_email = request.POST['from_email'] - email_cc = is_true(request.POST.get('email_cc', 'true')) - comments = request.POST['comments'] - comments = comments[:2048] # Separated due to PyLint - from_address = 'share@newsblur.com' + code = 1 + message = "OK" + user = get_user(request) + story_id = request.POST["story_id"] + feed_id = request.POST["feed_id"] + to_addresses = request.POST.get("to", "").replace(",", " ").replace(" ", " ").strip().split(" ") + from_name = request.POST["from_name"] + from_email = request.POST["from_email"] + email_cc = is_true(request.POST.get("email_cc", "true")) + comments = request.POST["comments"] + comments = comments[:2048] # Separated due to PyLint + from_address = "share@newsblur.com" share_user_profile = MSocialProfile.get_user(request.user.pk) - + quota = 32 if user.profile.is_premium else 1 if share_user_profile.over_story_email_quota(quota=quota): code = -1 if user.profile.is_premium: - message = 'You can only send %s stories per day by email.' % quota + message = "You can only send %s stories per day by email." % quota else: - message = 'Upgrade to a premium subscription to send more than one story per day by email.' - logging.user(request, '~BRNOT ~BMSharing story by email to %s recipient, over quota: %s/%s' % - (len(to_addresses), story_id, feed_id)) + message = "Upgrade to a premium subscription to send more than one story per day by email." + logging.user( + request, + "~BRNOT ~BMSharing story by email to %s recipient, over quota: %s/%s" + % (len(to_addresses), story_id, feed_id), + ) elif not to_addresses: code = -1 - message = 'Please provide at least one email address.' + message = "Please provide at least one email address." elif not all(validate_email_as_bool(to_address) for to_address in to_addresses if to_addresses): code = -1 - message = 'You need to send the email to a valid email address.' + message = "You need to send the email to a valid email address." elif not validate_email_as_bool(from_email): code = -1 - message = 'You need to provide your email address.' + message = "You need to provide your email address." elif not from_name: code = -1 - message = 'You need to provide your name.' + message = "You need to provide your name." else: story, _ = MStory.find_story(feed_id, story_id) - story = Feed.format_story(story, feed_id, text=True) - feed = Feed.get_by_id(story['story_feed_id']) - params = { + story = Feed.format_story(story, feed_id, text=True) + feed = Feed.get_by_id(story["story_feed_id"]) + params = { "to_addresses": to_addresses, "from_name": from_name, "from_email": from_email, @@ -2830,79 +3081,92 @@ def send_story_email(request): "feed": feed, "share_user_profile": share_user_profile, } - text = render_to_string('mail/email_story.txt', params) - html = render_to_string('mail/email_story.xhtml', params) - subject = '%s' % (story['story_title']) - cc = None + text = render_to_string("mail/email_story.txt", params) + html = render_to_string("mail/email_story.xhtml", params) + subject = "%s" % (story["story_title"]) + cc = None if email_cc: - cc = ['%s <%s>' % (from_name, from_email)] - subject = subject.replace('\n', ' ') - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % from_address, - to=to_addresses, - cc=cc, - headers={'Reply-To': "%s <%s>" % (from_name, from_email)}) + cc = ["%s <%s>" % (from_name, from_email)] + subject = subject.replace("\n", " ") + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % from_address, + to=to_addresses, + cc=cc, + headers={"Reply-To": "%s <%s>" % (from_name, from_email)}, + ) msg.attach_alternative(html, "text/html") # try: msg.send() # except boto.ses.connection.BotoServerError as e: # code = -1 # message = "Email error: %s" % str(e) - + share_user_profile.save_sent_email() - - logging.user(request, '~BMSharing story by email to %s recipient%s (%s): ~FY~SB%s~SN~BM~FY/~SB%s' % - (len(to_addresses), '' if len(to_addresses) == 1 else 's', to_addresses, - story['story_title'][:50], feed and feed.feed_title[:50])) - - return {'code': code, 'message': message} + + logging.user( + request, + "~BMSharing story by email to %s recipient%s (%s): ~FY~SB%s~SN~BM~FY/~SB%s" + % ( + len(to_addresses), + "" if len(to_addresses) == 1 else "s", + to_addresses, + story["story_title"][:50], + feed and feed.feed_title[:50], + ), + ) + + return {"code": code, "message": message} + @json.json_view def load_tutorial(request): - if request.GET.get('finished'): - logging.user(request, '~BY~FW~SBFinishing Tutorial') + if request.GET.get("finished"): + logging.user(request, "~BY~FW~SBFinishing Tutorial") return {} else: - newsblur_feed = Feed.objects.filter(feed_address__icontains='blog.newsblur.com').order_by('-pk')[0] - logging.user(request, '~BY~FW~SBLoading Tutorial') - return { - 'newsblur_feed': newsblur_feed.canonical() - } + newsblur_feed = Feed.objects.filter(feed_address__icontains="blog.newsblur.com").order_by("-pk")[0] + logging.user(request, "~BY~FW~SBLoading Tutorial") + return {"newsblur_feed": newsblur_feed.canonical()} -@required_params('query', 'feed_id') + +@required_params("query", "feed_id") @json.json_view def save_search(request): - feed_id = request.POST['feed_id'] - query = request.POST['query'] - + feed_id = request.POST["feed_id"] + query = request.POST["query"] + MSavedSearch.save_search(user_id=request.user.pk, feed_id=feed_id, query=query) - + saved_searches = MSavedSearch.user_searches(request.user.pk) - + return { - 'saved_searches': saved_searches, + "saved_searches": saved_searches, } -@required_params('query', 'feed_id') + +@required_params("query", "feed_id") @json.json_view def delete_search(request): - feed_id = request.POST['feed_id'] - query = request.POST['query'] + feed_id = request.POST["feed_id"] + query = request.POST["query"] MSavedSearch.delete_search(user_id=request.user.pk, feed_id=feed_id, query=query) saved_searches = MSavedSearch.user_searches(request.user.pk) return { - 'saved_searches': saved_searches, + "saved_searches": saved_searches, } -@required_params('river_id', 'river_side', 'river_order') + +@required_params("river_id", "river_side", "river_order") @json.json_view def save_dashboard_river(request): - river_id = request.POST['river_id'] - river_side = request.POST['river_side'] - river_order = int(request.POST['river_order']) + river_id = request.POST["river_id"] + river_side = request.POST["river_side"] + river_order = int(request.POST["river_order"]) logging.user(request, "~FCSaving dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order)) @@ -2910,21 +3174,24 @@ def save_dashboard_river(request): dashboard_rivers = MDashboardRiver.get_user_rivers(request.user.pk) return { - 'dashboard_rivers': dashboard_rivers, + "dashboard_rivers": dashboard_rivers, } -@required_params('river_id', 'river_side', 'river_order') + +@required_params("river_id", "river_side", "river_order") @json.json_view def remove_dashboard_river(request): - river_id = request.POST['river_id'] - river_side = request.POST['river_side'] - river_order = int(request.POST['river_order']) + river_id = request.POST["river_id"] + river_side = request.POST["river_side"] + river_order = int(request.POST["river_order"]) - logging.user(request, "~FRRemoving~FC dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order)) + logging.user( + request, "~FRRemoving~FC dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order) + ) MDashboardRiver.remove_river(request.user.pk, river_side, river_order) dashboard_rivers = MDashboardRiver.get_user_rivers(request.user.pk) return { - 'dashboard_rivers': dashboard_rivers, + "dashboard_rivers": dashboard_rivers, } diff --git a/apps/recommendations/migrations/0001_initial.py b/apps/recommendations/migrations/0001_initial.py index 5d1623cf9..24217a40d 100644 --- a/apps/recommendations/migrations/0001_initial.py +++ b/apps/recommendations/migrations/0001_initial.py @@ -6,40 +6,73 @@ import django.db.models.deletion class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='RecommendedFeed', + name="RecommendedFeed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('description', models.TextField(blank=True, null=True)), - ('is_public', models.BooleanField(default=False)), - ('created_date', models.DateField(auto_now_add=True)), - ('approved_date', models.DateField(null=True)), - ('declined_date', models.DateField(null=True)), - ('twitter', models.CharField(blank=True, max_length=50, null=True)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recommendations', to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recommendations', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("description", models.TextField(blank=True, null=True)), + ("is_public", models.BooleanField(default=False)), + ("created_date", models.DateField(auto_now_add=True)), + ("approved_date", models.DateField(null=True)), + ("declined_date", models.DateField(null=True)), + ("twitter", models.CharField(blank=True, max_length=50, null=True)), + ( + "feed", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="recommendations", + to="rss_feeds.Feed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="recommendations", + to=settings.AUTH_USER_MODEL, + ), + ), ], options={ - 'ordering': ['-approved_date', '-created_date'], + "ordering": ["-approved_date", "-created_date"], }, ), migrations.CreateModel( - name='RecommendedFeedUserFeedback', + name="RecommendedFeedUserFeedback", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('score', models.IntegerField(default=0)), - ('created_date', models.DateField(auto_now_add=True)), - ('recommendation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback', to='recommendations.RecommendedFeed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feed_feedback', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("score", models.IntegerField(default=0)), + ("created_date", models.DateField(auto_now_add=True)), + ( + "recommendation", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="feedback", + to="recommendations.RecommendedFeed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="feed_feedback", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/recommendations/models.py b/apps/recommendations/models.py index 5914fed7a..43976f11b 100644 --- a/apps/recommendations/models.py +++ b/apps/recommendations/models.py @@ -6,67 +6,69 @@ from apps.reader.models import UserSubscription, UserSubscriptionFolders from utils import json_functions as json from collections import defaultdict + class RecommendedFeed(models.Model): - feed = models.ForeignKey(Feed, related_name='recommendations', on_delete=models.CASCADE) - user = models.ForeignKey(User, related_name='recommendations', on_delete=models.CASCADE) - description = models.TextField(null=True, blank=True) - is_public = models.BooleanField(default=False) - created_date = models.DateField(auto_now_add=True) + feed = models.ForeignKey(Feed, related_name="recommendations", on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="recommendations", on_delete=models.CASCADE) + description = models.TextField(null=True, blank=True) + is_public = models.BooleanField(default=False) + created_date = models.DateField(auto_now_add=True) approved_date = models.DateField(null=True) declined_date = models.DateField(null=True) - twitter = models.CharField(max_length=50, null=True, blank=True) - + twitter = models.CharField(max_length=50, null=True, blank=True) + def __str__(self): return "%s (%s)" % (self.feed, self.approved_date or self.created_date) - + class Meta: - ordering = ['-approved_date', '-created_date'] + ordering = ["-approved_date", "-created_date"] class RecommendedFeedUserFeedback(models.Model): - recommendation = models.ForeignKey(RecommendedFeed, related_name='feedback', on_delete=models.CASCADE) - user = models.ForeignKey(User, related_name='feed_feedback', on_delete=models.CASCADE) - score = models.IntegerField(default=0) - created_date = models.DateField(auto_now_add=True) + recommendation = models.ForeignKey(RecommendedFeed, related_name="feedback", on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="feed_feedback", on_delete=models.CASCADE) + score = models.IntegerField(default=0) + created_date = models.DateField(auto_now_add=True) + class MFeedFolder(mongo.Document): feed_id = mongo.IntField() folder = mongo.StringField() count = mongo.IntField() - + meta = { - 'collection': 'feed_folders', - 'indexes': ['feed_id', 'folder'], - 'allow_inheritance': False, + "collection": "feed_folders", + "indexes": ["feed_id", "folder"], + "allow_inheritance": False, } - + def __str__(self): feed = Feed.get_by_id(self.feed_id) return "%s - %s (%s)" % (feed, self.folder, self.count) - + @classmethod def count_feed(cls, feed_id): feed = Feed.get_by_id(feed_id) print(feed) found_folders = defaultdict(int) - user_ids = [sub['user_id'] for sub in UserSubscription.objects.filter(feed=feed).values('user_id')] + user_ids = [sub["user_id"] for sub in UserSubscription.objects.filter(feed=feed).values("user_id")] usf = UserSubscriptionFolders.objects.filter(user_id__in=user_ids) for sub in usf: user_sub_folders = json.decode(sub.folders) folder_title = cls.feed_folder_parent(user_sub_folders, feed.pk) - if not folder_title: continue + if not folder_title: + continue found_folders[folder_title.lower()] += 1 # print "%-20s - %s" % (folder_title if folder_title != '' else '[Top]', sub.user_id) print(sorted(list(found_folders.items()), key=lambda f: f[1], reverse=True)) - - + @classmethod - def feed_folder_parent(cls, folders, feed_id, folder_title=''): + def feed_folder_parent(cls, folders, feed_id, folder_title=""): for item in folders: if isinstance(item, int) and item == feed_id: return folder_title elif isinstance(item, dict): for f_k, f_v in list(item.items()): sub_folder_title = cls.feed_folder_parent(f_v, feed_id, f_k) - if sub_folder_title: + if sub_folder_title: return sub_folder_title diff --git a/apps/recommendations/templatetags/recommendations_tags.py b/apps/recommendations/templatetags/recommendations_tags.py index 3978381de..acd3d41fe 100644 --- a/apps/recommendations/templatetags/recommendations_tags.py +++ b/apps/recommendations/templatetags/recommendations_tags.py @@ -7,25 +7,25 @@ from apps.rss_feeds.models import MFeedIcon register = template.Library() -@register.inclusion_tag('recommendations/render_recommended_feed.xhtml', takes_context=True) + +@register.inclusion_tag("recommendations/render_recommended_feed.xhtml", takes_context=True) def render_recommended_feed(context, recommended_feeds, unmoderated=False): - user = get_user(context['user']) - + user = get_user(context["user"]) + usersub = None - if context['user'].is_authenticated: + if context["user"].is_authenticated: usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) recommended_feed = recommended_feeds and recommended_feeds[0] feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) - + if recommended_feed: return { - 'recommended_feed' : recommended_feed, - 'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline, - 'usersub' : usersub, - 'feed_icon' : feed_icon and feed_icon[0], - 'user' : context['user'], - 'has_next_page' : len(recommended_feeds) > 1, - 'unmoderated' : unmoderated, - 'today' : datetime.datetime.now(), + "recommended_feed": recommended_feed, + "description": recommended_feed.description or recommended_feed.feed.data.feed_tagline, + "usersub": usersub, + "feed_icon": feed_icon and feed_icon[0], + "user": context["user"], + "has_next_page": len(recommended_feeds) > 1, + "unmoderated": unmoderated, + "today": datetime.datetime.now(), } - \ No newline at end of file diff --git a/apps/recommendations/tests.py b/apps/recommendations/tests.py index c7c4668e1..f51d798ff 100644 --- a/apps/recommendations/tests.py +++ b/apps/recommendations/tests.py @@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application. from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ class SimpleTest(TestCase): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/recommendations/urls.py b/apps/recommendations/urls.py index 481b618e2..9a61b3d5d 100644 --- a/apps/recommendations/urls.py +++ b/apps/recommendations/urls.py @@ -2,9 +2,9 @@ from django.conf.urls import * from apps.recommendations import views urlpatterns = [ - url(r'^load_recommended_feed', views.load_recommended_feed, name='load-recommended-feed'), - url(r'^save_recommended_feed', views.save_recommended_feed, name='save-recommended-feed'), - url(r'^approve_feed', views.approve_feed, name='approve-recommended-feed'), - url(r'^decline_feed', views.decline_feed, name='decline-recommended-feed'), - url(r'^load_feed_info/(?P\d+)', views.load_feed_info, name='load-recommended-feed-info'), + url(r"^load_recommended_feed", views.load_recommended_feed, name="load-recommended-feed"), + url(r"^save_recommended_feed", views.save_recommended_feed, name="save-recommended-feed"), + url(r"^approve_feed", views.approve_feed, name="approve-recommended-feed"), + url(r"^decline_feed", views.decline_feed, name="decline-recommended-feed"), + url(r"^load_feed_info/(?P\d+)", views.load_feed_info, name="load-recommended-feed-info"), ] diff --git a/apps/recommendations/views.py b/apps/recommendations/views.py index ae9ac6065..dabdbd9c1 100644 --- a/apps/recommendations/views.py +++ b/apps/recommendations/views.py @@ -11,43 +11,52 @@ from utils.user_functions import get_user, ajax_login_required, admin_only def load_recommended_feed(request): - user = get_user(request) - page = max(int(request.GET.get('page', 0)), 0) - usersub = None - refresh = request.GET.get('refresh') - now = datetime.datetime.now() - unmoderated = request.GET.get('unmoderated', False) == 'true' - + user = get_user(request) + page = max(int(request.GET.get("page", 0)), 0) + usersub = None + refresh = request.GET.get("refresh") + now = datetime.datetime.now() + unmoderated = request.GET.get("unmoderated", False) == "true" + if unmoderated: - recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[page:page+2] + recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[ + page : page + 2 + ] else: - recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[page:page+2] + recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[ + page : page + 2 + ] if recommended_feeds and request.user.is_authenticated: usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) - if refresh != 'true' and page > 0: - logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1)) - + if refresh != "true" and page > 0: + logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page + 1)) + recommended_feed = recommended_feeds and recommended_feeds[0] if not recommended_feeds: return HttpResponse("") - + feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) - + if recommended_feed: - return render(request, 'recommendations/render_recommended_feed.xhtml', { - 'recommended_feed' : recommended_feed, - 'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline, - 'usersub' : usersub, - 'feed_icon' : feed_icon and feed_icon[0], - 'has_next_page' : len(recommended_feeds) > 1, - 'has_previous_page' : page != 0, - 'unmoderated' : unmoderated, - 'today' : datetime.datetime.now(), - 'page' : page, - }) + return render( + request, + "recommendations/render_recommended_feed.xhtml", + { + "recommended_feed": recommended_feed, + "description": recommended_feed.description or recommended_feed.feed.data.feed_tagline, + "usersub": usersub, + "feed_icon": feed_icon and feed_icon[0], + "has_next_page": len(recommended_feeds) > 1, + "has_previous_page": page != 0, + "unmoderated": unmoderated, + "today": datetime.datetime.now(), + "page": page, + }, + ) else: return HttpResponse("") - + + @json.json_view def load_feed_info(request, feed_id): feed = get_object_or_404(Feed, pk=feed_id) @@ -56,58 +65,56 @@ def load_feed_info(request, feed_id): recommended_feed = RecommendedFeed.objects.filter(user=request.user, feed=feed) if recommended_feed: previous_recommendation = recommended_feed[0].created_date - + return { - 'num_subscribers': feed.num_subscribers, - 'tagline': feed.data.feed_tagline, - 'previous_recommendation': previous_recommendation + "num_subscribers": feed.num_subscribers, + "tagline": feed.data.feed_tagline, + "previous_recommendation": previous_recommendation, } - + + @ajax_login_required @json.json_view def save_recommended_feed(request): - feed_id = request.POST['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) - tagline = request.POST['tagline'] - twitter = request.POST.get('twitter') - code = 1 - + feed_id = request.POST["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) + tagline = request.POST["tagline"] + twitter = request.POST.get("twitter") + code = 1 + recommended_feed, created = RecommendedFeed.objects.get_or_create( - feed=feed, - user=request.user, - defaults=dict( - description=tagline, - twitter=twitter - ) + feed=feed, user=request.user, defaults=dict(description=tagline, twitter=twitter) ) return dict(code=code if created else -1) - + + @admin_only @ajax_login_required def approve_feed(request): - feed_id = request.POST['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) - date = request.POST['date'] + feed_id = request.POST["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) + date = request.POST["date"] recommended_feed = RecommendedFeed.objects.filter(feed=feed)[0] - - year, month, day = re.search(r'(\d{4})-(\d{1,2})-(\d{1,2})', date).groups() + + year, month, day = re.search(r"(\d{4})-(\d{1,2})-(\d{1,2})", date).groups() recommended_feed.is_public = True recommended_feed.approved_date = datetime.date(int(year), int(month), int(day)) recommended_feed.save() - + return load_recommended_feed(request) + @admin_only @ajax_login_required def decline_feed(request): - feed_id = request.GET['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) + feed_id = request.GET["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) recommended_feeds = RecommendedFeed.objects.filter(feed=feed) - + for recommended_feed in recommended_feeds: recommended_feed.is_public = False recommended_feed.declined_date = datetime.datetime.now() recommended_feed.save() - + return load_recommended_feed(request) diff --git a/apps/rss_feeds/factories.py b/apps/rss_feeds/factories.py index 74abbdcc7..c6d6c0d62 100644 --- a/apps/rss_feeds/factories.py +++ b/apps/rss_feeds/factories.py @@ -8,24 +8,27 @@ from django.conf import settings NEWSBLUR_DIR = settings.NEWSBLUR_DIR fake = Faker() + def generate_address(): return f"{NEWSBLUR_DIR}/apps/analyzer/fixtures/{fake.word()}.xml" + class FeedFactory(DjangoModelFactory): feed_address = FuzzyAttribute(generate_address) feed_link = FuzzyAttribute(generate_address) - creation = factory.Faker('date') - feed_title = factory.Faker('sentence') - last_update = factory.Faker('date_time') - next_scheduled_update = factory.Faker('date_time') - last_story_date = factory.Faker('date_time') + creation = factory.Faker("date") + feed_title = factory.Faker("sentence") + last_update = factory.Faker("date_time") + next_scheduled_update = factory.Faker("date_time") + last_story_date = factory.Faker("date_time") min_to_decay = 1 - last_modified = factory.Faker('date_time') + last_modified = factory.Faker("date_time") hash_address_and_link = fake.sha1() class Meta: model = Feed + class DuplicateFeedFactory(DjangoModelFactory): class Meta: - model = DuplicateFeed \ No newline at end of file + model = DuplicateFeed diff --git a/apps/rss_feeds/icon_importer.py b/apps/rss_feeds/icon_importer.py index 95108b55c..9db42f1a4 100644 --- a/apps/rss_feeds/icon_importer.py +++ b/apps/rss_feeds/icon_importer.py @@ -33,7 +33,6 @@ from utils.feed_functions import TimeoutError, timelimit class IconImporter(object): - def __init__(self, feed, page_data=None, force=False): self.feed = feed self.force = force @@ -45,27 +44,27 @@ class IconImporter(object): # print 'Not found, skipping...' return if ( - not self.force - and not self.feed.favicon_not_found - and self.feed_icon.icon_url - and self.feed.s3_icon + not self.force + and not self.feed.favicon_not_found + and self.feed_icon.icon_url + and self.feed.s3_icon ): # print 'Found, but skipping...' return - if 'facebook.com' in self.feed.feed_address: + if "facebook.com" in self.feed.feed_address: image, image_file, icon_url = self.fetch_facebook_image() else: image, image_file, icon_url = self.fetch_image_from_page_data() if not image: image, image_file, icon_url = self.fetch_image_from_path(force=self.force) - + if not image: self.feed_icon.not_found = True self.feed_icon.save() self.feed.favicon_not_found = True self.feed.save() return False - + image = self.normalize_image(image) try: color = self.determine_dominant_color_in_image(image) @@ -79,49 +78,53 @@ class IconImporter(object): if len(image_str) > 500000: image = None - if (image and - (self.force or - self.feed_icon.data != image_str or - self.feed_icon.icon_url != icon_url or - self.feed_icon.not_found or - (settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))): - logging.debug(" ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" % ( - self.feed.log_title[:30], - self.feed_icon.color != color, self.feed_icon.color, color, - self.feed_icon.data != image_str, - self.feed_icon.icon_url != icon_url, - self.feed_icon.not_found, - settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon)) + if image and ( + self.force + or self.feed_icon.data != image_str + or self.feed_icon.icon_url != icon_url + or self.feed_icon.not_found + or (settings.BACKED_BY_AWS.get("icons_on_s3") and not self.feed.s3_icon) + ): + logging.debug( + " ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" + % ( + self.feed.log_title[:30], + self.feed_icon.color != color, + self.feed_icon.color, + color, + self.feed_icon.data != image_str, + self.feed_icon.icon_url != icon_url, + self.feed_icon.not_found, + settings.BACKED_BY_AWS.get("icons_on_s3") and not self.feed.s3_icon, + ) + ) self.feed_icon.data = image_str self.feed_icon.icon_url = icon_url self.feed_icon.color = color self.feed_icon.not_found = False self.feed_icon.save() - if settings.BACKED_BY_AWS.get('icons_on_s3'): + if settings.BACKED_BY_AWS.get("icons_on_s3"): self.save_to_s3(image_str) if self.feed.favicon_color != color: self.feed.favicon_color = color self.feed.favicon_not_found = False - self.feed.save(update_fields=['favicon_color', 'favicon_not_found']) - + self.feed.save(update_fields=["favicon_color", "favicon_not_found"]) + return not self.feed.favicon_not_found def save_to_s3(self, image_str): expires = datetime.datetime.now() + datetime.timedelta(days=60) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") base64.b64decode(image_str) - settings.S3_CONN.Object(settings.S3_ICONS_BUCKET_NAME, - self.feed.s3_icons_key).put(Body=base64.b64decode(image_str), - ContentType='image/png', - Expires=expires, - ACL='public-read' - ) + settings.S3_CONN.Object(settings.S3_ICONS_BUCKET_NAME, self.feed.s3_icons_key).put( + Body=base64.b64decode(image_str), ContentType="image/png", Expires=expires, ACL="public-read" + ) self.feed.s3_icon = True self.feed.save() def load_icon(self, image_file, index=None): - ''' + """ DEPRECATED Load Windows ICO image. @@ -130,10 +133,10 @@ class IconImporter(object): description. Cribbed and modified from http://djangosnippets.org/snippets/1287/ - ''' + """ try: image_file.seek(0) - header = struct.unpack('<3H', image_file.read(6)) + header = struct.unpack("<3H", image_file.read(6)) except Exception: return @@ -144,7 +147,7 @@ class IconImporter(object): # Collect icon directories directories = [] for i in range(header[2]): - directory = list(struct.unpack('<4B2H2I', image_file.read(16))) + directory = list(struct.unpack("<4B2H2I", image_file.read(16))) for j in range(3): if not directory[j]: directory[j] = 256 @@ -175,7 +178,7 @@ class IconImporter(object): image = BmpImagePlugin.DibImageFile(image_file) except IOError: return - if image.mode == 'RGBA': + if image.mode == "RGBA": # Windows XP 32-bit color depth icon without AND bitmap pass else: @@ -194,10 +197,9 @@ class IconImporter(object): # Load AND bitmap image_file.seek(offset) string = image_file.read(size) - mask = Image.frombytes('1', image.size, string, 'raw', - ('1;I', stride, -1)) + mask = Image.frombytes("1", image.size, string, "raw", ("1;I", stride, -1)) - image = image.convert('RGBA') + image = image.convert("RGBA") image.putalpha(mask) return image @@ -208,7 +210,7 @@ class IconImporter(object): content = None if self.page_data: content = self.page_data - elif settings.BACKED_BY_AWS.get('pages_on_node'): + elif settings.BACKED_BY_AWS.get("pages_on_node"): domain = "node-page.service.consul:8008" if settings.DOCKERBUILD: domain = "node:8008" @@ -222,7 +224,7 @@ class IconImporter(object): content = page_response.content except requests.ConnectionError: pass - elif settings.BACKED_BY_AWS.get('pages_on_s3') and self.feed.s3_page: + elif settings.BACKED_BY_AWS.get("pages_on_s3") and self.feed.s3_page: key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key) compressed_content = key.get()["Body"].read() stream = BytesIO(compressed_content) @@ -238,28 +240,35 @@ class IconImporter(object): try: content = requests.get(self.cleaned_feed_link, timeout=10).content url = self._url_from_html(content) - except (AttributeError, SocketError, requests.ConnectionError, - requests.models.MissingSchema, requests.sessions.InvalidSchema, - requests.sessions.TooManyRedirects, - requests.models.InvalidURL, - requests.models.ChunkedEncodingError, - requests.models.ContentDecodingError, - http.client.IncompleteRead, - requests.adapters.ReadTimeout, - LocationParseError, OpenSSLError, PyAsn1Error, - ValueError) as e: + except ( + AttributeError, + SocketError, + requests.ConnectionError, + requests.models.MissingSchema, + requests.sessions.InvalidSchema, + requests.sessions.TooManyRedirects, + requests.models.InvalidURL, + requests.models.ChunkedEncodingError, + requests.models.ContentDecodingError, + http.client.IncompleteRead, + requests.adapters.ReadTimeout, + LocationParseError, + OpenSSLError, + PyAsn1Error, + ValueError, + ) as e: logging.debug(" ---> ~SN~FRFailed~FY to fetch ~FGfeed icon~FY: %s" % e) if url: image, image_file = self.get_image_from_url(url) return image, image_file, url - + @property def cleaned_feed_link(self): - if self.feed.feed_link.startswith('http'): + if self.feed.feed_link.startswith("http"): return self.feed.feed_link - return 'http://' + self.feed.feed_link - - def fetch_image_from_path(self, path='favicon.ico', force=False): + return "http://" + self.feed.feed_link + + def fetch_image_from_path(self, path="favicon.ico", force=False): image = None url = None @@ -267,7 +276,7 @@ class IconImporter(object): url = self.feed_icon.icon_url if not url and self.feed.feed_link and len(self.feed.feed_link) > 6: try: - url = urllib.parse.urljoin(self.feed.feed_link, 'favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "favicon.ico") except ValueError: url = None if not url: @@ -275,21 +284,21 @@ class IconImporter(object): image, image_file = self.get_image_from_url(url) if not image: - url = urllib.parse.urljoin(self.feed.feed_link, '/favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "/favicon.ico") image, image_file = self.get_image_from_url(url) # print 'Found: %s - %s' % (url, image) return image, image_file, url - + def fetch_facebook_image(self): facebook_fetcher = FacebookFetcher(self.feed) url = facebook_fetcher.favicon_url() image, image_file = self.get_image_from_url(url) if not image: - url = urllib.parse.urljoin(self.feed.feed_link, '/favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "/favicon.ico") image, image_file = self.get_image_from_url(url) # print 'Found: %s - %s' % (url, image) return image, image_file, url - + def get_image_from_url(self, url): # print 'Requesting: %s' % url if not url: @@ -298,15 +307,15 @@ class IconImporter(object): @timelimit(30) def _1(url): headers = { - 'User-Agent': 'NewsBlur Favicon Fetcher - %s subscriber%s - %s %s' % - ( - self.feed.num_subscribers, - 's' if self.feed.num_subscribers != 1 else '', - self.feed.permalink, - self.feed.fake_user_agent, - ), - 'Connection': 'close', - 'Accept': 'image/png,image/x-icon,image/*;q=0.9,*/*;q=0.8' + "User-Agent": "NewsBlur Favicon Fetcher - %s subscriber%s - %s %s" + % ( + self.feed.num_subscribers, + "s" if self.feed.num_subscribers != 1 else "", + self.feed.permalink, + self.feed.fake_user_agent, + ), + "Connection": "close", + "Accept": "image/png,image/x-icon,image/*;q=0.9,*/*;q=0.8", } try: request = urllib.request.Request(url, headers=headers) @@ -314,6 +323,7 @@ class IconImporter(object): except Exception: return None return icon + try: icon = _1(url) except TimeoutError: @@ -333,7 +343,7 @@ class IconImporter(object): return url try: if isinstance(content, str): - content = content.encode('utf-8') + content = content.encode("utf-8") icon_path = lxml.html.fromstring(content).xpath( '//link[@rel="icon" or @rel="shortcut icon"]/@href' ) @@ -341,7 +351,7 @@ class IconImporter(object): return url if icon_path: - if str(icon_path[0]).startswith('http'): + if str(icon_path[0]).startswith("http"): url = icon_path[0] else: url = urllib.parse.urljoin(self.feed.feed_link, icon_path[0]) @@ -350,9 +360,9 @@ class IconImporter(object): def normalize_image(self, image): # if image.size != (16, 16): # image = image.resize((16, 16), Image.BICUBIC) - if image.mode != 'RGBA': + if image.mode != "RGBA": try: - image = image.convert('RGBA') + image = image.convert("RGBA") except IOError: pass @@ -362,8 +372,8 @@ class IconImporter(object): NUM_CLUSTERS = 5 # Convert image into array of values for each point. - if image.mode == '1': - image.convert('L') + if image.mode == "1": + image.convert("L") ar = numpy.array(image) # ar = scipy.misc.fromimage(image) shape = ar.shape @@ -371,7 +381,7 @@ class IconImporter(object): # Reshape array of values to merge color bands. [[R], [G], [B], [A]] => [R, G, B, A] if len(shape) > 2: ar = ar.reshape(scipy.product(shape[:2]), shape[2]) - + # Get NUM_CLUSTERS worth of centroids. ar = ar.astype(numpy.float) codes, _ = scipy.cluster.vq.kmeans(ar, NUM_CLUSTERS) @@ -379,9 +389,16 @@ class IconImporter(object): # Pare centroids, removing blacks and whites and shades of really dark and really light. original_codes = codes for low, hi in [(60, 200), (35, 230), (10, 250)]: - codes = scipy.array([code for code in codes - if not ((code[0] < low and code[1] < low and code[2] < low) or - (code[0] > hi and code[1] > hi and code[2] > hi))]) + codes = scipy.array( + [ + code + for code in codes + if not ( + (code[0] < low and code[1] < low and code[2] < low) + or (code[0] > hi and code[1] > hi and code[2] > hi) + ) + ] + ) if not len(codes): codes = original_codes else: @@ -409,7 +426,7 @@ class IconImporter(object): def string_from_image(self, image): output = BytesIO() - image.save(output, 'png', quality=95) + image.save(output, "png", quality=95) contents = output.getvalue() output.close() return base64.b64encode(contents).decode() diff --git a/apps/rss_feeds/management/commands/calculate_scores.py b/apps/rss_feeds/management/commands/calculate_scores.py index 8f914f28c..7e371554a 100644 --- a/apps/rss_feeds/management/commands/calculate_scores.py +++ b/apps/rss_feeds/management/commands/calculate_scores.py @@ -7,55 +7,67 @@ import errno import re import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-a", "--all", dest="all", action="store_true", help="All feeds, need it or not (can be combined with a user)"), - parser.add_argument("-s", "--silent", dest="silent", default=False, action="store_true", help="Inverse verbosity."), + parser.add_argument( + "-a", + "--all", + dest="all", + action="store_true", + help="All feeds, need it or not (can be combined with a user)", + ), + parser.add_argument( + "-s", "--silent", dest="silent", default=False, action="store_true", help="Inverse verbosity." + ), parser.add_argument("-u", "--user", dest="user", nargs=1, help="Specify user id or username"), parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true"), - parser.add_argument("-D", "--days", dest="days", nargs=1, default=1, type='int'), - parser.add_argument("-O", "--offset", dest="offset", nargs=1, default=0, type='int'), + parser.add_argument("-D", "--days", dest="days", nargs=1, default=1, type="int"), + parser.add_argument("-O", "--offset", dest="offset", nargs=1, default=0, type="int"), def handle(self, *args, **options): settings.LOG_TO_STREAM = True - if options['daemonize']: + if options["daemonize"]: daemonize() - if options['user']: - if re.match(r"([0-9]+)", options['user']): - users = User.objects.filter(pk=int(options['user'])) + if options["user"]: + if re.match(r"([0-9]+)", options["user"]): + users = User.objects.filter(pk=int(options["user"])) else: - users = User.objects.filter(username=options['user']) + users = User.objects.filter(username=options["user"]) else: - users = User.objects.filter(profile__last_seen_on__gte=datetime.datetime.now()-datetime.timedelta(days=options['days'])).order_by('pk') - + users = User.objects.filter( + profile__last_seen_on__gte=datetime.datetime.now() - datetime.timedelta(days=options["days"]) + ).order_by("pk") + user_count = users.count() for i, u in enumerate(users): - if i < options['offset']: continue - if options['all']: + if i < options["offset"]: + continue + if options["all"]: usersubs = UserSubscription.objects.filter(user=u, active=True) else: usersubs = UserSubscription.objects.filter(user=u, needs_unread_recalc=True) - print((" ---> %s has %s feeds (%s/%s)" % (u.username, usersubs.count(), i+1, user_count))) + print((" ---> %s has %s feeds (%s/%s)" % (u.username, usersubs.count(), i + 1, user_count))) for sub in usersubs: try: - sub.calculate_feed_scores(silent=options['silent']) + sub.calculate_feed_scores(silent=options["silent"]) except Exception as e: print((" ***> Exception: %s" % e)) continue - + + def daemonize(): """ Detach from the terminal and continue as a daemon. """ # swiped from twisted/scripts/twistd.py # See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16 - if os.fork(): # launch child and... - os._exit(0) # kill off parent + if os.fork(): # launch child and... + os._exit(0) # kill off parent os.setsid() - if os.fork(): # launch child and... - os._exit(0) # kill off parent again. + if os.fork(): # launch child and... + os._exit(0) # kill off parent again. os.umask(0o77) null = os.open("/dev/null", os.O_RDWR) for i in range(3): @@ -64,4 +76,4 @@ def daemonize(): except OSError as e: if e.errno != errno.EBADF: raise - os.close(null) \ No newline at end of file + os.close(null) diff --git a/apps/rss_feeds/management/commands/count_stories.py b/apps/rss_feeds/management/commands/count_stories.py index 06d7ba91b..41db438e6 100644 --- a/apps/rss_feeds/management/commands/count_stories.py +++ b/apps/rss_feeds/management/commands/count_stories.py @@ -1,23 +1,23 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-t", "--title", dest="title", default=None) parser.add_argument("-V", "--verbose", dest="verbose", action="store_true") - + def handle(self, *args, **options): - if options['title']: - feeds = Feed.objects.filter(feed_title__icontains=options['title']) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + if options["title"]: + feeds = Feed.objects.filter(feed_title__icontains=options["title"]) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.all() # Count stories in past month to calculate next scheduled update for feed in feeds: - feed.count_stories(verbose=options['verbose']) - - print(("\nCounted %s feeds" % feeds.count())) \ No newline at end of file + feed.count_stories(verbose=options["verbose"]) + + print(("\nCounted %s feeds" % feeds.count())) diff --git a/apps/rss_feeds/management/commands/count_subscribers.py b/apps/rss_feeds/management/commands/count_subscribers.py index 40757a44d..a9e465f7b 100644 --- a/apps/rss_feeds/management/commands/count_subscribers.py +++ b/apps/rss_feeds/management/commands/count_subscribers.py @@ -1,34 +1,34 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-t", "--title", dest="title", default=None) parser.add_argument("-V", "--verbose", dest="verbose", action="store_true") parser.add_argument("-D", "--delete", dest="delete", action="store_true") - + def handle(self, *args, **options): - if options['title']: - feeds = Feed.objects.filter(feed_title__icontains=options['title']) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + if options["title"]: + feeds = Feed.objects.filter(feed_title__icontains=options["title"]) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.all() - + feeds_count = feeds.count() - + for i in range(0, feeds_count, 100): - feeds = Feed.objects.all()[i:i+100] + feeds = Feed.objects.all()[i : i + 100] for feed in feeds.iterator(): - feed.count_subscribers(verbose=options['verbose']) - - if options['delete']: + feed.count_subscribers(verbose=options["verbose"]) + + if options["delete"]: print("# Deleting old feeds...") old_feeds = Feed.objects.filter(num_subscribers=0) for feed in old_feeds: feed.count_subscribers(verbose=True) if feed.num_subscribers == 0: - print((' ---> Deleting: [%s] %s' % (feed.pk, feed))) - feed.delete() \ No newline at end of file + print((" ---> Deleting: [%s] %s" % (feed.pk, feed))) + feed.delete() diff --git a/apps/rss_feeds/management/commands/mark_read.py b/apps/rss_feeds/management/commands/mark_read.py index f72d4f5f3..ae158ead4 100644 --- a/apps/rss_feeds/management/commands/mark_read.py +++ b/apps/rss_feeds/management/commands/mark_read.py @@ -3,28 +3,30 @@ from django.contrib.auth.models import User from apps.reader.models import UserSubscription import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-d", "--days", dest="days", nargs=1, default=1, help="Days of unread") parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-U", "--userid", dest="userid", nargs=1, help="Specify user id or username") - + def handle(self, *args, **options): - if options['userid']: - user = User.objects.filter(pk=options['userid'])[0] - elif options['username']: - user = User.objects.get(username__icontains=options['username']) + if options["userid"]: + user = User.objects.filter(pk=options["userid"])[0] + elif options["username"]: + user = User.objects.get(username__icontains=options["username"]) else: raise Exception("Need username or user id.") - + user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.save() feeds = UserSubscription.objects.filter(user=user) for sub in feeds: - if options['days'] == 0: + if options["days"] == 0: sub.mark_feed_read() else: - sub.mark_read_date = datetime.datetime.utcnow() - datetime.timedelta(days=int(options['days'])) + sub.mark_read_date = datetime.datetime.utcnow() - datetime.timedelta( + days=int(options["days"]) + ) sub.needs_unread_recalc = True - sub.save() \ No newline at end of file + sub.save() diff --git a/apps/rss_feeds/management/commands/query_popularity.py b/apps/rss_feeds/management/commands/query_popularity.py index 65acd70fc..5afc87493 100644 --- a/apps/rss_feeds/management/commands/query_popularity.py +++ b/apps/rss_feeds/management/commands/query_popularity.py @@ -8,8 +8,8 @@ import errno import re import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_argument(self, parser): parser.add_argument("-q", "--query", dest="query", help="Search query") parser.add_argument("-l", "--limit", dest="limit", type="int", default=1000, help="Limit of stories") @@ -18,4 +18,4 @@ class Command(BaseCommand): # settings.LOG_TO_STREAM = True # Feed.query_popularity(options['query'], limit=options['limit']) - Feed.xls_query_popularity(options['query'], limit=options['limit']) \ No newline at end of file + Feed.xls_query_popularity(options["query"], limit=options["limit"]) diff --git a/apps/rss_feeds/management/commands/refresh_feed.py b/apps/rss_feeds/management/commands/refresh_feed.py index 63527d548..14a1772c1 100644 --- a/apps/rss_feeds/management/commands/refresh_feed.py +++ b/apps/rss_feeds/management/commands/refresh_feed.py @@ -2,8 +2,8 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed from utils.management_functions import daemonize -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-F", "--force", dest="force", action="store_true") @@ -11,11 +11,11 @@ class Command(BaseCommand): parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true") def handle(self, *args, **options): - if options['daemonize']: + if options["daemonize"]: daemonize() - - if options['title']: - feed = Feed.objects.get(feed_title__icontains=options['title']) + + if options["title"]: + feed = Feed.objects.get(feed_title__icontains=options["title"]) else: - feed = Feed.get_by_id(options['feed']) - feed.update(force=options['force'], single_threaded=True, verbose=True) + feed = Feed.get_by_id(options["feed"]) + feed.update(force=options["force"], single_threaded=True, verbose=True) diff --git a/apps/rss_feeds/management/commands/refresh_feeds.py b/apps/rss_feeds/management/commands/refresh_feeds.py index 44b2ed505..ef176beeb 100644 --- a/apps/rss_feeds/management/commands/refresh_feeds.py +++ b/apps/rss_feeds/management/commands/refresh_feeds.py @@ -13,76 +13,85 @@ import datetime class Command(BaseCommand): - def add_arguments(self, parser): parser.add_argument("-f", "--feed", default=None) parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true") parser.add_argument("-F", "--force", dest="force", action="store_true") parser.add_argument("-s", "--single_threaded", dest="single_threaded", action="store_true") - parser.add_argument('-t', '--timeout', type=int, default=10, - help='Wait timeout in seconds when connecting to feeds.') - parser.add_argument('-u', '--username', type=str, dest='username') - parser.add_argument('-V', '--verbose', action='store_true', - dest='verbose', default=False, help='Verbose output.') - parser.add_argument('-S', '--skip', type=int, - dest='skip', default=0, help='Skip stories per month < #.') - parser.add_argument('-w', '--workerthreads', type=int, default=4, - help='Worker threads that will fetch feeds in parallel.') + parser.add_argument( + "-t", "--timeout", type=int, default=10, help="Wait timeout in seconds when connecting to feeds." + ) + parser.add_argument("-u", "--username", type=str, dest="username") + parser.add_argument( + "-V", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose output." + ) + parser.add_argument( + "-S", "--skip", type=int, dest="skip", default=0, help="Skip stories per month < #." + ) + parser.add_argument( + "-w", + "--workerthreads", + type=int, + default=4, + help="Worker threads that will fetch feeds in parallel.", + ) def handle(self, *args, **options): - if options['daemonize']: + if options["daemonize"]: daemonize() - + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() - - if options['skip']: - feeds = Feed.objects.filter(next_scheduled_update__lte=now, - average_stories_per_month__lt=options['skip'], - active=True) + + if options["skip"]: + feeds = Feed.objects.filter( + next_scheduled_update__lte=now, average_stories_per_month__lt=options["skip"], active=True + ) print(" ---> Skipping %s feeds" % feeds.count()) for feed in feeds: feed.set_next_scheduled_update() - print('.', end=' ') + print(".", end=" ") return - - socket.setdefaulttimeout(options['timeout']) - if options['force']: + + socket.setdefaulttimeout(options["timeout"]) + if options["force"]: feeds = Feed.objects.all() - elif options['username']: - usersubs = UserSubscription.objects.filter(user=User.objects.get(username=options['username']), active=True) - feeds = Feed.objects.filter(pk__in=usersubs.values('feed_id')) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + elif options["username"]: + usersubs = UserSubscription.objects.filter( + user=User.objects.get(username=options["username"]), active=True + ) + feeds = Feed.objects.filter(pk__in=usersubs.values("feed_id")) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True) - - feeds = feeds.order_by('?') - + + feeds = feeds.order_by("?") + for f in feeds: f.set_next_scheduled_update() - - num_workers = min(len(feeds), options['workerthreads']) - if options['single_threaded']: + + num_workers = min(len(feeds), options["workerthreads"]) + if options["single_threaded"]: num_workers = 1 - - options['compute_scores'] = True - options['quick'] = float(MStatistics.get('quick_fetch', 0)) - options['updates_off'] = MStatistics.get('updates_off', False) - - disp = feed_fetcher.Dispatcher(options, num_workers) - + + options["compute_scores"] = True + options["quick"] = float(MStatistics.get("quick_fetch", 0)) + options["updates_off"] = MStatistics.get("updates_off", False) + + disp = feed_fetcher.Dispatcher(options, num_workers) + feeds_queue = [] for _ in range(num_workers): feeds_queue.append([]) - + i = 0 for feed in feeds: - feeds_queue[i%num_workers].append(feed.pk) + feeds_queue[i % num_workers].append(feed.pk) i += 1 disp.add_jobs(feeds_queue, i) - + django.db.connection.close() - + print(" ---> Fetching %s feeds..." % feeds.count()) disp.run_jobs() diff --git a/apps/rss_feeds/management/commands/task_feeds.py b/apps/rss_feeds/management/commands/task_feeds.py index 794d8184b..664c250ea 100644 --- a/apps/rss_feeds/management/commands/task_feeds.py +++ b/apps/rss_feeds/management/commands/task_feeds.py @@ -5,16 +5,22 @@ import datetime class Command(BaseCommand): - def add_arguments(self, parser): parser.add_argument("-f", "--feed", default=None) - parser.add_argument("-a", "--all", default=False, action='store_true') - parser.add_argument("-b", "--broken", help="Task broken feeds that havent been fetched in a day.", default=False, action='store_true') - parser.add_argument('-V', '--verbose', action='store_true', - dest='verbose', default=False, help='Verbose output.') - + parser.add_argument("-a", "--all", default=False, action="store_true") + parser.add_argument( + "-b", + "--broken", + help="Task broken feeds that havent been fetched in a day.", + default=False, + action="store_true", + ) + parser.add_argument( + "-V", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose output." + ) + def handle(self, *args, **options): - if options['broken']: + if options["broken"]: TaskBrokenFeeds.apply() else: TaskFeeds.apply() diff --git a/apps/rss_feeds/management/commands/trim_feeds.py b/apps/rss_feeds/management/commands/trim_feeds.py index 6d6c1090b..2cff87ad8 100644 --- a/apps/rss_feeds/management/commands/trim_feeds.py +++ b/apps/rss_feeds/management/commands/trim_feeds.py @@ -2,27 +2,23 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed import gc -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None), def handle(self, *args, **options): - if not options['feed']: - feeds = Feed.objects.filter( - fetched_once=True, - active_subscribers=0, - premium_subscribers=0 - ) + if not options["feed"]: + feeds = Feed.objects.filter(fetched_once=True, active_subscribers=0, premium_subscribers=0) else: - feeds = Feed.objects.filter(feed_id=options['feed']) + feeds = Feed.objects.filter(feed_id=options["feed"]) for f in queryset_iterator(feeds): f.trim_feed(verbose=True) - + def queryset_iterator(queryset, chunksize=100): - ''' + """ Iterate over a Django Queryset ordered by the primary key This method loads a maximum of chunksize (default: 1000) rows in it's @@ -31,12 +27,12 @@ def queryset_iterator(queryset, chunksize=100): classes. Note that the implementation of the iterator does not support ordered query sets. - ''' - last_pk = queryset.order_by('-pk')[0].pk - queryset = queryset.order_by('pk') + """ + last_pk = queryset.order_by("-pk")[0].pk + queryset = queryset.order_by("pk") pk = queryset[0].pk while pk < last_pk: for row in queryset.filter(pk__gte=pk, pk__lte=last_pk)[:chunksize]: yield row pk += chunksize - gc.collect() \ No newline at end of file + gc.collect() diff --git a/apps/rss_feeds/migrations/0001_initial.py b/apps/rss_feeds/migrations/0001_initial.py index e2e0255cd..5741ee8b5 100644 --- a/apps/rss_feeds/migrations/0001_initial.py +++ b/apps/rss_feeds/migrations/0001_initial.py @@ -6,83 +6,107 @@ import utils.fields class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='DuplicateFeed', + name="DuplicateFeed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('duplicate_address', models.CharField(db_index=True, max_length=764)), - ('duplicate_link', models.CharField(db_index=True, max_length=764, null=True)), - ('duplicate_feed_id', models.CharField(db_index=True, max_length=255, null=True)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("duplicate_address", models.CharField(db_index=True, max_length=764)), + ("duplicate_link", models.CharField(db_index=True, max_length=764, null=True)), + ("duplicate_feed_id", models.CharField(db_index=True, max_length=255, null=True)), ], ), migrations.CreateModel( - name='Feed', + name="Feed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feed_address', models.URLField(db_index=True, max_length=764)), - ('feed_address_locked', models.NullBooleanField(default=False)), - ('feed_link', models.URLField(blank=True, default='', max_length=1000, null=True)), - ('feed_link_locked', models.BooleanField(default=False)), - ('hash_address_and_link', models.CharField(max_length=64, unique=True)), - ('feed_title', models.CharField(blank=True, default='[Untitled]', max_length=255, null=True)), - ('is_push', models.NullBooleanField(default=False)), - ('active', models.BooleanField(db_index=True, default=True)), - ('num_subscribers', models.IntegerField(default=-1)), - ('active_subscribers', models.IntegerField(db_index=True, default=-1)), - ('premium_subscribers', models.IntegerField(default=-1)), - ('active_premium_subscribers', models.IntegerField(default=-1)), - ('last_update', models.DateTimeField(db_index=True)), - ('next_scheduled_update', models.DateTimeField()), - ('last_story_date', models.DateTimeField(blank=True, null=True)), - ('fetched_once', models.BooleanField(default=False)), - ('known_good', models.BooleanField(default=False)), - ('has_feed_exception', models.BooleanField(db_index=True, default=False)), - ('has_page_exception', models.BooleanField(db_index=True, default=False)), - ('has_page', models.BooleanField(default=True)), - ('exception_code', models.IntegerField(default=0)), - ('errors_since_good', models.IntegerField(default=0)), - ('min_to_decay', models.IntegerField(default=0)), - ('days_to_trim', models.IntegerField(default=90)), - ('creation', models.DateField(auto_now_add=True)), - ('etag', models.CharField(blank=True, max_length=255, null=True)), - ('last_modified', models.DateTimeField(blank=True, null=True)), - ('stories_last_month', models.IntegerField(default=0)), - ('average_stories_per_month', models.IntegerField(default=0)), - ('last_load_time', models.IntegerField(default=0)), - ('favicon_color', models.CharField(blank=True, max_length=6, null=True)), - ('favicon_not_found', models.BooleanField(default=False)), - ('s3_page', models.NullBooleanField(default=False)), - ('s3_icon', models.NullBooleanField(default=False)), - ('search_indexed', models.NullBooleanField(default=None)), - ('branch_from_feed', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feed_address", models.URLField(db_index=True, max_length=764)), + ("feed_address_locked", models.NullBooleanField(default=False)), + ("feed_link", models.URLField(blank=True, default="", max_length=1000, null=True)), + ("feed_link_locked", models.BooleanField(default=False)), + ("hash_address_and_link", models.CharField(max_length=64, unique=True)), + ("feed_title", models.CharField(blank=True, default="[Untitled]", max_length=255, null=True)), + ("is_push", models.NullBooleanField(default=False)), + ("active", models.BooleanField(db_index=True, default=True)), + ("num_subscribers", models.IntegerField(default=-1)), + ("active_subscribers", models.IntegerField(db_index=True, default=-1)), + ("premium_subscribers", models.IntegerField(default=-1)), + ("active_premium_subscribers", models.IntegerField(default=-1)), + ("last_update", models.DateTimeField(db_index=True)), + ("next_scheduled_update", models.DateTimeField()), + ("last_story_date", models.DateTimeField(blank=True, null=True)), + ("fetched_once", models.BooleanField(default=False)), + ("known_good", models.BooleanField(default=False)), + ("has_feed_exception", models.BooleanField(db_index=True, default=False)), + ("has_page_exception", models.BooleanField(db_index=True, default=False)), + ("has_page", models.BooleanField(default=True)), + ("exception_code", models.IntegerField(default=0)), + ("errors_since_good", models.IntegerField(default=0)), + ("min_to_decay", models.IntegerField(default=0)), + ("days_to_trim", models.IntegerField(default=90)), + ("creation", models.DateField(auto_now_add=True)), + ("etag", models.CharField(blank=True, max_length=255, null=True)), + ("last_modified", models.DateTimeField(blank=True, null=True)), + ("stories_last_month", models.IntegerField(default=0)), + ("average_stories_per_month", models.IntegerField(default=0)), + ("last_load_time", models.IntegerField(default=0)), + ("favicon_color", models.CharField(blank=True, max_length=6, null=True)), + ("favicon_not_found", models.BooleanField(default=False)), + ("s3_page", models.NullBooleanField(default=False)), + ("s3_icon", models.NullBooleanField(default=False)), + ("search_indexed", models.NullBooleanField(default=None)), + ( + "branch_from_feed", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="rss_feeds.Feed", + ), + ), ], options={ - 'db_table': 'feeds', - 'ordering': ['feed_title'], + "db_table": "feeds", + "ordering": ["feed_title"], }, ), migrations.CreateModel( - name='FeedData', + name="FeedData", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feed_tagline', models.CharField(blank=True, max_length=1024, null=True)), - ('story_count_history', models.TextField(blank=True, null=True)), - ('feed_classifier_counts', models.TextField(blank=True, null=True)), - ('popular_tags', models.CharField(blank=True, max_length=1024, null=True)), - ('popular_authors', models.CharField(blank=True, max_length=2048, null=True)), - ('feed', utils.fields.AutoOneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='data', to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feed_tagline", models.CharField(blank=True, max_length=1024, null=True)), + ("story_count_history", models.TextField(blank=True, null=True)), + ("feed_classifier_counts", models.TextField(blank=True, null=True)), + ("popular_tags", models.CharField(blank=True, max_length=1024, null=True)), + ("popular_authors", models.CharField(blank=True, max_length=2048, null=True)), + ( + "feed", + utils.fields.AutoOneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="data", to="rss_feeds.Feed" + ), + ), ], ), migrations.AddField( - model_name='duplicatefeed', - name='feed', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='duplicate_addresses', to='rss_feeds.Feed'), + model_name="duplicatefeed", + name="feed", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="duplicate_addresses", + to="rss_feeds.Feed", + ), ), ] diff --git a/apps/rss_feeds/migrations/0002_remove_mongo_types.py b/apps/rss_feeds/migrations/0002_remove_mongo_types.py index a6b98c7ba..e34e70f9b 100644 --- a/apps/rss_feeds/migrations/0002_remove_mongo_types.py +++ b/apps/rss_feeds/migrations/0002_remove_mongo_types.py @@ -3,28 +3,27 @@ from django.db import migrations from django.conf import settings + def remove_mongo_types(apps, schema_editor): db = settings.MONGODB.newsblur_dev collections = db.collection_names() for collection_name in collections: collection = db[collection_name] print(" ---> %s..." % (collection_name)) - if 'system' in collection_name: continue + if "system" in collection_name: + continue collection.update({}, {"$unset": {"_types": 1}}, multi=True) index_information = collection.index_information() - indexes_to_drop = [key for key, value in index_information.items() - if 'types' in value] + indexes_to_drop = [key for key, value in index_information.items() if "types" in value] # print(index_information, indexes_) for index in indexes_to_drop: print(" ---> Dropping mongo index %s on %s..." % (index, collection_name)) collection.drop_index(index) + class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), ] - operations = [ - migrations.RunPython(remove_mongo_types, migrations.RunPython.noop) - ] + operations = [migrations.RunPython(remove_mongo_types, migrations.RunPython.noop)] diff --git a/apps/rss_feeds/migrations/0003_auto_20220110_2105.py b/apps/rss_feeds/migrations/0003_auto_20220110_2105.py index 9986d3c7e..e13a80de4 100644 --- a/apps/rss_feeds/migrations/0003_auto_20220110_2105.py +++ b/apps/rss_feeds/migrations/0003_auto_20220110_2105.py @@ -4,35 +4,34 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0002_remove_mongo_types'), + ("rss_feeds", "0002_remove_mongo_types"), ] operations = [ migrations.AlterField( - model_name='feed', - name='feed_address_locked', + model_name="feed", + name="feed_address_locked", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='is_push', + model_name="feed", + name="is_push", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='s3_icon', + model_name="feed", + name="s3_icon", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='s3_page', + model_name="feed", + name="s3_page", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='search_indexed', + model_name="feed", + name="search_indexed", field=models.BooleanField(blank=True, default=None, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0003_mongo_version_4_0.py b/apps/rss_feeds/migrations/0003_mongo_version_4_0.py index 9a2999904..7595452d3 100644 --- a/apps/rss_feeds/migrations/0003_mongo_version_4_0.py +++ b/apps/rss_feeds/migrations/0003_mongo_version_4_0.py @@ -3,6 +3,7 @@ from django.db import migrations from django.conf import settings + def set_mongo_feature_compatibility_version(apps, schema_editor): new_version = "4.0" db = settings.MONGODB.admin @@ -13,14 +14,11 @@ def set_mongo_feature_compatibility_version(apps, schema_editor): if old_version != new_version: db.command({"setFeatureCompatibilityVersion": new_version}) print(f" ---> Updated MongoDB featureCompatibilityVersion: {new_version}") - + class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0002_remove_mongo_types'), + ("rss_feeds", "0002_remove_mongo_types"), ] - operations = [ - migrations.RunPython(set_mongo_feature_compatibility_version, migrations.RunPython.noop) - ] + operations = [migrations.RunPython(set_mongo_feature_compatibility_version, migrations.RunPython.noop)] diff --git a/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py b/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py index 7579e56ff..35bc6e6d4 100644 --- a/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py +++ b/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0003_auto_20220110_2105'), + ("rss_feeds", "0003_auto_20220110_2105"), ] operations = [ migrations.AddField( - model_name='feed', - name='pro_subscribers', + model_name="feed", + name="pro_subscribers", field=models.IntegerField(blank=True, default=0, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py b/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py index 1d8152591..3d877b2bd 100644 --- a/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py +++ b/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0004_feed_pro_subscribers'), + ("rss_feeds", "0004_feed_pro_subscribers"), ] operations = [ migrations.AddField( - model_name='feed', - name='archive_subscribers', + model_name="feed", + name="archive_subscribers", field=models.IntegerField(blank=True, default=0, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py b/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py index cebc86363..4123d3f8f 100644 --- a/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py +++ b/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0005_feed_archive_subscribers'), + ("rss_feeds", "0005_feed_archive_subscribers"), ] operations = [ migrations.AddField( - model_name='feed', - name='fs_size_bytes', + model_name="feed", + name="fs_size_bytes", field=models.IntegerField(blank=True, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0007_merge_20220517_1355.py b/apps/rss_feeds/migrations/0007_merge_20220517_1355.py index f9e6e7bde..ae30775af 100644 --- a/apps/rss_feeds/migrations/0007_merge_20220517_1355.py +++ b/apps/rss_feeds/migrations/0007_merge_20220517_1355.py @@ -4,11 +4,9 @@ from django.db import migrations class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0006_feed_fs_size_bytes'), - ('rss_feeds', '0003_mongo_version_4_0'), + ("rss_feeds", "0006_feed_fs_size_bytes"), + ("rss_feeds", "0003_mongo_version_4_0"), ] - operations = [ - ] + operations = [] diff --git a/apps/rss_feeds/migrations/0008_feed_archive_count.py b/apps/rss_feeds/migrations/0008_feed_archive_count.py index 0450de50f..bc7becf63 100644 --- a/apps/rss_feeds/migrations/0008_feed_archive_count.py +++ b/apps/rss_feeds/migrations/0008_feed_archive_count.py @@ -4,15 +4,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0007_merge_20220517_1355'), + ("rss_feeds", "0007_merge_20220517_1355"), ] operations = [ migrations.AddField( - model_name='feed', - name='archive_count', + model_name="feed", + name="archive_count", field=models.IntegerField(blank=True, null=True), ), ] diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index 41c71fa07..89a0999a8 100755 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -18,6 +18,7 @@ from collections import defaultdict from operator import itemgetter from bson.objectid import ObjectId from bs4 import BeautifulSoup + # from nltk.collocations import TrigramCollocationFinder, BigramCollocationFinder, TrigramAssocMeasures, BigramAssocMeasures from django.db import models from django.db import IntegrityError @@ -69,7 +70,9 @@ class Feed(models.Model): archive_subscribers = models.IntegerField(default=0, null=True, blank=True) pro_subscribers = models.IntegerField(default=0, null=True, blank=True) active_premium_subscribers = models.IntegerField(default=-1) - branch_from_feed = models.ForeignKey('Feed', blank=True, null=True, db_index=True, on_delete=models.CASCADE) + branch_from_feed = models.ForeignKey( + "Feed", blank=True, null=True, db_index=True, on_delete=models.CASCADE + ) last_update = models.DateTimeField(db_index=True) next_scheduled_update = models.DateTimeField() last_story_date = models.DateTimeField(null=True, blank=True) @@ -97,18 +100,18 @@ class Feed(models.Model): archive_count = models.IntegerField(null=True, blank=True) class Meta: - db_table="feeds" - ordering=["feed_title"] + db_table = "feeds" + ordering = ["feed_title"] # unique_together=[('feed_address', 'feed_link')] - + def __str__(self): if not self.feed_title: self.feed_title = "[Untitled]" self.save() return "%s%s: %s - %s/%s/%s/%s/%s %s stories (%s bytes)" % ( - self.pk, + self.pk, (" [B: %s]" % self.branch_from_feed.pk if self.branch_from_feed else ""), - self.feed_title, + self.feed_title, self.num_subscribers, self.active_subscribers, self.active_premium_subscribers, @@ -116,46 +119,43 @@ class Feed(models.Model): self.pro_subscribers, self.archive_count, self.fs_size_bytes, - ) - + ) + @property def title(self): title = self.feed_title or "[Untitled]" if self.active_premium_subscribers >= 1: title = "%s*" % title[:29] return title - + @property def log_title(self): return self.__str__() - + @property def permalink(self): return "%s/site/%s/%s" % (settings.NEWSBLUR_URL, self.pk, slugify(self.feed_title.lower()[:50])) - + @property def favicon_url(self): - if settings.BACKED_BY_AWS['icons_on_s3'] and self.s3_icon: + if settings.BACKED_BY_AWS["icons_on_s3"] and self.s3_icon: return "https://s3.amazonaws.com/%s/%s.png" % (settings.S3_ICONS_BUCKET_NAME, self.pk) - return reverse('feed-favicon', kwargs={'feed_id': self.pk}) - + return reverse("feed-favicon", kwargs={"feed_id": self.pk}) + @property def favicon_url_fqdn(self): - if settings.BACKED_BY_AWS['icons_on_s3'] and self.s3_icon: + if settings.BACKED_BY_AWS["icons_on_s3"] and self.s3_icon: return self.favicon_url - return "https://%s%s" % ( - Site.objects.get_current().domain, - self.favicon_url - ) - + return "https://%s%s" % (Site.objects.get_current().domain, self.favicon_url) + @property def s3_pages_key(self): return "%s.gz.html" % self.pk - + @property def s3_icons_key(self): return "%s.png" % self.pk - + @property def unread_cutoff(self): if self.archive_subscribers and self.archive_subscribers > 0: @@ -164,117 +164,121 @@ class Feed(models.Model): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE) - + @classmethod def days_of_story_hashes_for_feed(cls, feed_id): try: - feed = cls.objects.only('archive_subscribers').get(pk=feed_id) + feed = cls.objects.only("archive_subscribers").get(pk=feed_id) return feed.days_of_story_hashes except cls.DoesNotExist: return settings.DAYS_OF_STORY_HASHES - + @property def days_of_story_hashes(self): if self.archive_subscribers and self.archive_subscribers > 0: return settings.DAYS_OF_STORY_HASHES_ARCHIVE return settings.DAYS_OF_STORY_HASHES - + @property def story_hashes_in_unread_cutoff(self): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - current_time = int(time.time() + 60*60*24) - unread_cutoff = self.unread_cutoff.strftime('%s') - story_hashes = r.zrevrangebyscore('zF:%s' % self.pk, current_time, unread_cutoff) + current_time = int(time.time() + 60 * 60 * 24) + unread_cutoff = self.unread_cutoff.strftime("%s") + story_hashes = r.zrevrangebyscore("zF:%s" % self.pk, current_time, unread_cutoff) return story_hashes - + @classmethod def generate_hash_address_and_link(cls, feed_address, feed_link): - if not feed_address: feed_address = "" - if not feed_link: feed_link = "" - return hashlib.sha1((feed_address+feed_link).encode(encoding='utf-8')).hexdigest() - + if not feed_address: + feed_address = "" + if not feed_link: + feed_link = "" + return hashlib.sha1((feed_address + feed_link).encode(encoding="utf-8")).hexdigest() + @property def is_newsletter(self): - return self.feed_address.startswith('newsletter:') or self.feed_address.startswith('http://newsletter:') - + return self.feed_address.startswith("newsletter:") or self.feed_address.startswith( + "http://newsletter:" + ) + def canonical(self, full=False, include_favicon=True): feed = { - 'id': self.pk, - 'feed_title': self.feed_title, - 'feed_address': self.feed_address, - 'feed_link': self.feed_link, - 'num_subscribers': self.num_subscribers, - 'updated': relative_timesince(self.last_update), - 'updated_seconds_ago': seconds_timesince(self.last_update), - 'fs_size_bytes': self.fs_size_bytes, - 'archive_count': self.archive_count, - 'last_story_date': self.last_story_date, - 'last_story_seconds_ago': seconds_timesince(self.last_story_date), - 'stories_last_month': self.stories_last_month, - 'average_stories_per_month': self.average_stories_per_month, - 'min_to_decay': self.min_to_decay, - 'subs': self.num_subscribers, - 'is_push': self.is_push, - 'is_newsletter': self.is_newsletter, - 'fetched_once': self.fetched_once, - 'search_indexed': self.search_indexed, - 'not_yet_fetched': not self.fetched_once, # Legacy. Doh. - 'favicon_color': self.favicon_color, - 'favicon_fade': self.favicon_fade(), - 'favicon_border': self.favicon_border(), - 'favicon_text_color': self.favicon_text_color(), - 'favicon_fetching': self.favicon_fetching, - 'favicon_url': self.favicon_url, - 's3_page': self.s3_page, - 's3_icon': self.s3_icon, - 'disabled_page': not self.has_page, + "id": self.pk, + "feed_title": self.feed_title, + "feed_address": self.feed_address, + "feed_link": self.feed_link, + "num_subscribers": self.num_subscribers, + "updated": relative_timesince(self.last_update), + "updated_seconds_ago": seconds_timesince(self.last_update), + "fs_size_bytes": self.fs_size_bytes, + "archive_count": self.archive_count, + "last_story_date": self.last_story_date, + "last_story_seconds_ago": seconds_timesince(self.last_story_date), + "stories_last_month": self.stories_last_month, + "average_stories_per_month": self.average_stories_per_month, + "min_to_decay": self.min_to_decay, + "subs": self.num_subscribers, + "is_push": self.is_push, + "is_newsletter": self.is_newsletter, + "fetched_once": self.fetched_once, + "search_indexed": self.search_indexed, + "not_yet_fetched": not self.fetched_once, # Legacy. Doh. + "favicon_color": self.favicon_color, + "favicon_fade": self.favicon_fade(), + "favicon_border": self.favicon_border(), + "favicon_text_color": self.favicon_text_color(), + "favicon_fetching": self.favicon_fetching, + "favicon_url": self.favicon_url, + "s3_page": self.s3_page, + "s3_icon": self.s3_icon, + "disabled_page": not self.has_page, } - + if include_favicon: try: feed_icon = MFeedIcon.objects.get(feed_id=self.pk) - feed['favicon'] = feed_icon.data + feed["favicon"] = feed_icon.data except MFeedIcon.DoesNotExist: pass if self.has_page_exception or self.has_feed_exception: - feed['has_exception'] = True - feed['exception_type'] = 'feed' if self.has_feed_exception else 'page' - feed['exception_code'] = self.exception_code + feed["has_exception"] = True + feed["exception_type"] = "feed" if self.has_feed_exception else "page" + feed["exception_code"] = self.exception_code elif full: - feed['has_exception'] = False - feed['exception_type'] = None - feed['exception_code'] = self.exception_code - + feed["has_exception"] = False + feed["exception_type"] = None + feed["exception_code"] = self.exception_code + if full: - feed['average_stories_per_month'] = self.average_stories_per_month - feed['tagline'] = self.data.feed_tagline - feed['feed_tags'] = json.decode(self.data.popular_tags) if self.data.popular_tags else [] - feed['feed_authors'] = json.decode(self.data.popular_authors) if self.data.popular_authors else [] - + feed["average_stories_per_month"] = self.average_stories_per_month + feed["tagline"] = self.data.feed_tagline + feed["feed_tags"] = json.decode(self.data.popular_tags) if self.data.popular_tags else [] + feed["feed_authors"] = json.decode(self.data.popular_authors) if self.data.popular_authors else [] + return feed - + def save(self, *args, **kwargs): if not self.last_update: self.last_update = datetime.datetime.utcnow() if not self.next_scheduled_update: self.next_scheduled_update = datetime.datetime.utcnow() self.fix_google_alerts_urls() - + feed_address = self.feed_address or "" feed_link = self.feed_link or "" self.hash_address_and_link = self.generate_hash_address_and_link(feed_address, feed_link) - - max_feed_title = Feed._meta.get_field('feed_title').max_length + + max_feed_title = Feed._meta.get_field("feed_title").max_length if len(self.feed_title) > max_feed_title: self.feed_title = self.feed_title[:max_feed_title] - max_feed_address = Feed._meta.get_field('feed_address').max_length + max_feed_address = Feed._meta.get_field("feed_address").max_length if len(feed_address) > max_feed_address: self.feed_address = feed_address[:max_feed_address] - max_feed_link = Feed._meta.get_field('feed_link').max_length + max_feed_link = Feed._meta.get_field("feed_link").max_length if len(feed_link) > max_feed_link: self.feed_link = feed_link[:max_feed_link] - + try: super(Feed, self).save(*args, **kwargs) except IntegrityError as e: @@ -284,108 +288,123 @@ class Feed(models.Model): hash_address_and_link = self.generate_hash_address_and_link(feed_address, feed_link) logging.debug(" ---> ~FRNo dupes, checking hash collision: %s" % hash_address_and_link) duplicate_feeds = Feed.objects.filter(hash_address_and_link=hash_address_and_link) - + if not duplicate_feeds: - duplicate_feeds = Feed.objects.filter(feed_address=self.feed_address, - feed_link=self.feed_link) + duplicate_feeds = Feed.objects.filter( + feed_address=self.feed_address, feed_link=self.feed_link + ) if not duplicate_feeds: # Feed has been deleted. Just ignore it. - logging.debug(" ***> Changed to: %s - %s: %s" % (self.feed_address, self.feed_link, duplicate_feeds)) - logging.debug(' ***> [%-30s] Feed deleted (%s).' % (self.log_title[:30], self.pk)) + logging.debug( + " ***> Changed to: %s - %s: %s" % (self.feed_address, self.feed_link, duplicate_feeds) + ) + logging.debug(" ***> [%-30s] Feed deleted (%s)." % (self.log_title[:30], self.pk)) return - + for duplicate_feed in duplicate_feeds: if duplicate_feed.pk != self.pk: - logging.debug(" ---> ~FRFound different feed (%s), merging %s in..." % (duplicate_feeds[0], self.pk)) + logging.debug( + " ---> ~FRFound different feed (%s), merging %s in..." % (duplicate_feeds[0], self.pk) + ) feed = Feed.get_by_id(merge_feeds(duplicate_feeds[0].pk, self.pk)) return feed else: logging.debug(" ---> ~FRFeed is its own dupe? %s == %s" % (self, duplicate_feeds)) except DatabaseError as e: - logging.debug(" ---> ~FBFeed update failed, no change: %s / %s..." % (kwargs.get('update_fields', None), e)) + logging.debug( + " ---> ~FBFeed update failed, no change: %s / %s..." % (kwargs.get("update_fields", None), e) + ) pass - + return self - + @classmethod def index_all_for_search(cls, offset=0, subscribers=2): if not offset: SearchFeed.create_elasticsearch_mapping(delete=True) - - last_pk = cls.objects.latest('pk').pk + + last_pk = cls.objects.latest("pk").pk for f in range(offset, last_pk, 1000): - print(" ---> {f} / {last_pk} ({pct}%)".format(f=f, last_pk=last_pk, pct=str(float(f)/last_pk*100)[:2])) - feeds = Feed.objects.filter(pk__in=range(f, f+1000), - active=True, - active_subscribers__gte=subscribers)\ - .values_list('pk') - for feed_id, in feeds: + print( + " ---> {f} / {last_pk} ({pct}%)".format( + f=f, last_pk=last_pk, pct=str(float(f) / last_pk * 100)[:2] + ) + ) + feeds = Feed.objects.filter( + pk__in=range(f, f + 1000), active=True, active_subscribers__gte=subscribers + ).values_list("pk") + for (feed_id,) in feeds: Feed.objects.get(pk=feed_id).index_feed_for_search() - + def index_feed_for_search(self): min_subscribers = 1 if settings.DEBUG: min_subscribers = 0 if self.num_subscribers > min_subscribers and not self.branch_from_feed and not self.is_newsletter: - SearchFeed.index(feed_id=self.pk, - title=self.feed_title, - address=self.feed_address, - link=self.feed_link, - num_subscribers=self.num_subscribers) - + SearchFeed.index( + feed_id=self.pk, + title=self.feed_title, + address=self.feed_address, + link=self.feed_link, + num_subscribers=self.num_subscribers, + ) + def index_stories_for_search(self): - if self.search_indexed: return - + if self.search_indexed: + return + stories = MStory.objects(story_feed_id=self.pk) for story in stories: story.index_story_for_search() self.search_indexed = True self.save() - + def sync_redis(self): return MStory.sync_feed_redis(self.pk) - + def expire_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - r.expire('F:%s' % self.pk, self.days_of_story_hashes*24*60*60) - r.expire('zF:%s' % self.pk, self.days_of_story_hashes*24*60*60) - + r.expire("F:%s" % self.pk, self.days_of_story_hashes * 24 * 60 * 60) + r.expire("zF:%s" % self.pk, self.days_of_story_hashes * 24 * 60 * 60) + @classmethod def low_volume_feeds(cls, feed_ids, stories_per_month=30): try: stories_per_month = int(stories_per_month) except ValueError: stories_per_month = 30 - feeds = Feed.objects.filter(pk__in=feed_ids, average_stories_per_month__lte=stories_per_month).only('pk') - + feeds = Feed.objects.filter(pk__in=feed_ids, average_stories_per_month__lte=stories_per_month).only( + "pk" + ) + return [f.pk for f in feeds] - + @classmethod def autocomplete(self, prefix, limit=5): results = SearchFeed.query(prefix) - feed_ids = [result['_source']['feed_id'] for result in results[:5]] + feed_ids = [result["_source"]["feed_id"] for result in results[:5]] # results = SearchQuerySet().autocomplete(address=prefix).order_by('-num_subscribers')[:limit] - # + # # if len(results) < limit: # results += SearchQuerySet().autocomplete(title=prefix).order_by('-num_subscribers')[:limit-len(results)] - # + # return feed_ids - + @classmethod def find_or_create(cls, feed_address, feed_link, defaults=None, **kwargs): feeds = cls.objects.filter(feed_address=feed_address, feed_link=feed_link) if feeds: return feeds[0], False - if feed_link and feed_link.endswith('/'): + if feed_link and feed_link.endswith("/"): feeds = cls.objects.filter(feed_address=feed_address, feed_link=feed_link[:-1]) if feeds: return feeds[0], False - + try: feed = cls.objects.get(feed_address=feed_address, feed_link=feed_link) return feed, False @@ -393,34 +412,33 @@ class Feed(models.Model): feed = cls(**defaults) feed = feed.save() return feed, True - + @classmethod def merge_feeds(cls, *args, **kwargs): return merge_feeds(*args, **kwargs) - + def fix_google_alerts_urls(self): - if (self.feed_address.startswith('http://user/') and - '/state/com.google/alerts/' in self.feed_address): + if self.feed_address.startswith("http://user/") and "/state/com.google/alerts/" in self.feed_address: match = re.match(r"http://user/(\d+)/state/com.google/alerts/(\d+)", self.feed_address) if match: user_id, alert_id = match.groups() self.feed_address = "http://www.google.com/alerts/feeds/%s/%s" % (user_id, alert_id) - + @classmethod def schedule_feed_fetches_immediately(cls, feed_ids, user_id=None): if settings.DEBUG: - logging.info(" ---> ~SN~FMSkipping the scheduling immediate fetch of ~SB%s~SN feeds (in DEBUG)..." % - len(feed_ids)) + logging.info( + " ---> ~SN~FMSkipping the scheduling immediate fetch of ~SB%s~SN feeds (in DEBUG)..." + % len(feed_ids) + ) return - + if user_id: user = User.objects.get(pk=user_id) - logging.user(user, "~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % - len(feed_ids)) + logging.user(user, "~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % len(feed_ids)) else: - logging.debug(" ---> ~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % - len(feed_ids)) - + logging.debug(" ---> ~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % len(feed_ids)) + if len(feed_ids) > 100: logging.debug(" ---> ~SN~FMFeeds scheduled: %s" % feed_ids) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) @@ -430,72 +448,78 @@ class Feed(models.Model): feed.count_subscribers() if not feed.active or feed.next_scheduled_update < day_ago: feed.schedule_feed_fetch_immediately(verbose=False) - + @property def favicon_fetching(self): return bool(not (self.favicon_not_found or self.favicon_color)) - + @classmethod def get_feed_by_url(self, *args, **kwargs): return self.get_feed_from_url(*args, **kwargs) - + @classmethod - def get_feed_from_url(cls, url, create=True, aggressive=False, fetch=True, offset=0, user=None, interactive=False): + def get_feed_from_url( + cls, url, create=True, aggressive=False, fetch=True, offset=0, user=None, interactive=False + ): feed = None without_rss = False original_url = url - - if url and url.startswith('newsletter:'): + + if url and url.startswith("newsletter:"): try: return cls.objects.get(feed_address=url) except cls.MultipleObjectsReturned: return cls.objects.filter(feed_address=url)[0] - if url and re.match('(https?://)?twitter.com/\w+/?', url): + if url and re.match("(https?://)?twitter.com/\w+/?", url): without_rss = True - if url and re.match(r'(https?://)?(www\.)?facebook.com/\w+/?$', url): + if url and re.match(r"(https?://)?(www\.)?facebook.com/\w+/?$", url): without_rss = True # Turn url @username@domain.com into domain.com/users/username.rss - if url and url.startswith('@') and '@' in url[1:]: - username, domain = url[1:].split('@') + if url and url.startswith("@") and "@" in url[1:]: + username, domain = url[1:].split("@") url = f"https://{domain}/users/{username}.rss" - if url and 'youtube.com/user/' in url: - username = re.search('youtube.com/user/(\w+)', url).group(1) + if url and "youtube.com/user/" in url: + username = re.search("youtube.com/user/(\w+)", url).group(1) url = "http://gdata.youtube.com/feeds/base/users/%s/uploads" % username without_rss = True - if url and 'youtube.com/@' in url: - username = url.split('youtube.com/@')[1] + if url and "youtube.com/@" in url: + username = url.split("youtube.com/@")[1] url = "http://gdata.youtube.com/feeds/base/users/%s/uploads" % username without_rss = True - if url and 'youtube.com/channel/' in url: - channel_id = re.search('youtube.com/channel/([-_\w]+)', url).group(1) + if url and "youtube.com/channel/" in url: + channel_id = re.search("youtube.com/channel/([-_\w]+)", url).group(1) url = "https://www.youtube.com/feeds/videos.xml?channel_id=%s" % channel_id without_rss = True - if url and 'youtube.com/feeds' in url: + if url and "youtube.com/feeds" in url: without_rss = True - if url and 'youtube.com/playlist' in url: + if url and "youtube.com/playlist" in url: without_rss = True - + def criteria(key, value): if aggressive: - return {'%s__icontains' % key: value} + return {"%s__icontains" % key: value} else: - return {'%s' % key: value} - + return {"%s" % key: value} + def by_url(address): - feed = cls.objects.filter( - branch_from_feed=None - ).filter(**criteria('feed_address', address)).order_by('-num_subscribers') + feed = ( + cls.objects.filter(branch_from_feed=None) + .filter(**criteria("feed_address", address)) + .order_by("-num_subscribers") + ) if not feed: - duplicate_feed = DuplicateFeed.objects.filter(**criteria('duplicate_address', address)) + duplicate_feed = DuplicateFeed.objects.filter(**criteria("duplicate_address", address)) if duplicate_feed and len(duplicate_feed) > offset: feed = [duplicate_feed[offset].feed] if not feed and aggressive: - feed = cls.objects.filter( - branch_from_feed=None - ).filter(**criteria('feed_link', address)).order_by('-num_subscribers') - + feed = ( + cls.objects.filter(branch_from_feed=None) + .filter(**criteria("feed_link", address)) + .order_by("-num_subscribers") + ) + return feed - + @timelimit(10) def _feedfinder_forman(url): found_feed_urls = feedfinder_forman.find_feeds(url) @@ -505,19 +529,21 @@ class Feed(models.Model): def _feedfinder_pilgrim(url): found_feed_urls = feedfinder_pilgrim.feeds(url) return found_feed_urls - + # Normalize and check for feed_address, dupes, and feed_link url = urlnorm.normalize(url) if not url: logging.debug(" ---> ~FRCouldn't normalize url: ~SB%s" % url) return - + feed = by_url(url) found_feed_urls = [] - + if interactive: - import pdb; pdb.set_trace() - + import pdb + + pdb.set_trace() + # Create if it looks good if feed and len(feed) > offset: feed = feed[offset] @@ -525,15 +551,15 @@ class Feed(models.Model): try: found_feed_urls = _feedfinder_forman(url) except TimeoutError: - logging.debug(' ---> Feed finder timed out...') + logging.debug(" ---> Feed finder timed out...") found_feed_urls = [] if not found_feed_urls: try: found_feed_urls = _feedfinder_pilgrim(url) except TimeoutError: - logging.debug(' ---> Feed finder old timed out...') + logging.debug(" ---> Feed finder old timed out...") found_feed_urls = [] - + if len(found_feed_urls): feed_finder_url = found_feed_urls[0] logging.debug(" ---> Found feed URLs for %s: %s" % (url, found_feed_urls)) @@ -550,17 +576,17 @@ class Feed(models.Model): logging.debug(" ---> Found without_rss feed: %s / %s" % (url, original_url)) feed = cls.objects.create(feed_address=url, feed_link=original_url) feed = feed.update(requesting_user_id=user.pk if user else None) - + # Check for JSON feed if not feed and fetch and create: try: r = requests.get(url) except (requests.ConnectionError, requests.models.InvalidURL): r = None - if r and 'application/json' in r.headers.get('Content-Type'): + if r and "application/json" in r.headers.get("Content-Type"): feed = cls.objects.create(feed_address=url) feed = feed.update() - + # Still nothing? Maybe the URL has some clues. if not feed and fetch and len(found_feed_urls): feed_finder_url = found_feed_urls[0] @@ -570,17 +596,18 @@ class Feed(models.Model): feed = feed.update() elif feed and len(feed) > offset: feed = feed[offset] - + # Not created and not within bounds, so toss results. if isinstance(feed, QuerySet): logging.debug(" ---> ~FRNot created and not within bounds, tossing: ~SB%s" % feed) return - + return feed - + @classmethod def task_feeds(cls, feeds, queue_size=12, verbose=True): - if not feeds: return + if not feeds: + return r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) if isinstance(feeds, Feed): @@ -589,50 +616,50 @@ class Feed(models.Model): feeds = [feeds.pk] elif verbose: logging.debug(" ---> ~SN~FBTasking ~SB~FC%s~FB~SN feeds..." % len(feeds)) - + if isinstance(feeds, QuerySet): feeds = [f.pk for f in feeds] - - r.srem('queued_feeds', *feeds) + + r.srem("queued_feeds", *feeds) now = datetime.datetime.now().strftime("%s") p = r.pipeline() for feed_id in feeds: - p.zadd('tasked_feeds', { feed_id: now }) + p.zadd("tasked_feeds", {feed_id: now}) p.execute() - + # for feed_ids in (feeds[pos:pos + queue_size] for pos in xrange(0, len(feeds), queue_size)): for feed_id in feeds: - UpdateFeeds.apply_async(args=(feed_id,), queue='update_feeds') - + UpdateFeeds.apply_async(args=(feed_id,), queue="update_feeds") + @classmethod def drain_task_feeds(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - tasked_feeds = r.zrange('tasked_feeds', 0, -1) + tasked_feeds = r.zrange("tasked_feeds", 0, -1) if tasked_feeds: logging.debug(" ---> ~FRDraining %s tasked feeds..." % len(tasked_feeds)) - r.sadd('queued_feeds', *tasked_feeds) - r.zremrangebyrank('tasked_feeds', 0, -1) + r.sadd("queued_feeds", *tasked_feeds) + r.zremrangebyrank("tasked_feeds", 0, -1) else: logging.debug(" ---> No tasked feeds to drain") - - errored_feeds = r.zrange('error_feeds', 0, -1) + + errored_feeds = r.zrange("error_feeds", 0, -1) if errored_feeds: logging.debug(" ---> ~FRDraining %s errored feeds..." % len(errored_feeds)) - r.sadd('queued_feeds', *errored_feeds) - r.zremrangebyrank('error_feeds', 0, -1) + r.sadd("queued_feeds", *errored_feeds) + r.zremrangebyrank("error_feeds", 0, -1) else: logging.debug(" ---> No errored feeds to drain") def update_all_statistics(self, has_new_stories=False, force=False): - recount = not self.counts_converted_to_redis + recount = not self.counts_converted_to_redis count_extra = False if random.random() < 0.01 or not self.data.popular_tags or not self.data.popular_authors: count_extra = True - + self.count_subscribers(recount=recount) self.calculate_last_story_date() - + if force or has_new_stories or count_extra: self.save_feed_stories_last_month() @@ -642,15 +669,19 @@ class Feed(models.Model): if force or (has_new_stories and count_extra): self.save_popular_authors() self.save_popular_tags() - self.save_feed_story_history_statistics() - + self.save_feed_story_history_statistics() + def calculate_last_story_date(self): last_story_date = None try: - latest_story = MStory.objects( - story_feed_id=self.pk - ).limit(1).order_by('-story_date').only('story_date').first() + latest_story = ( + MStory.objects(story_feed_id=self.pk) + .limit(1) + .order_by("-story_date") + .only("story_date") + .first() + ) if latest_story: last_story_date = latest_story.story_date except MStory.DoesNotExist: @@ -658,15 +689,15 @@ class Feed(models.Model): if not last_story_date or seconds_timesince(last_story_date) < 0: last_story_date = datetime.datetime.now() - + if last_story_date != self.last_story_date: self.last_story_date = last_story_date - self.save(update_fields=['last_story_date']) - + self.save(update_fields=["last_story_date"]) + @classmethod def setup_feeds_for_premium_subscribers(cls, feed_ids): logging.info(f" ---> ~SN~FMScheduling immediate premium setup of ~SB{len(feed_ids)}~SN feeds...") - + feeds = Feed.objects.filter(pk__in=feed_ids) for feed in feeds: feed.setup_feed_for_premium_subscribers() @@ -675,7 +706,7 @@ class Feed(models.Model): self.count_subscribers() self.set_next_scheduled_update(verbose=settings.DEBUG) self.sync_redis() - + def check_feed_link_for_feed_address(self): @timelimit(10) def _1(): @@ -693,13 +724,16 @@ class Feed(models.Model): found_feed_urls = feedfinder_forman.find_feeds(self.feed_link) if len(found_feed_urls) and found_feed_urls[0] != self.feed_address: feed_address = found_feed_urls[0] - + if feed_address: - if any(ignored_domain in feed_address for ignored_domain in [ - 'feedburner.com/atom.xml', - 'feedburner.com/feed/', - 'feedsportal.com', - ]): + if any( + ignored_domain in feed_address + for ignored_domain in [ + "feedburner.com/atom.xml", + "feedburner.com/feed/", + "feedsportal.com", + ] + ): logging.debug(" ---> Feed points to 'Wierdo' or 'feedsportal', ignoring.") return False, self try: @@ -717,135 +751,140 @@ class Feed(models.Model): original_feed.save() merge_feeds(original_feed.pk, self.pk) return feed_address, feed - + if self.feed_address_locked: return False, self - + try: feed_address, feed = _1() except TimeoutError as e: - logging.debug(' ---> [%-30s] Feed address check timed out...' % (self.log_title[:30])) - self.save_feed_history(505, 'Timeout', e) + logging.debug(" ---> [%-30s] Feed address check timed out..." % (self.log_title[:30])) + self.save_feed_history(505, "Timeout", e) feed = self feed_address = None - + return bool(feed_address), feed def save_feed_history(self, status_code, message, exception=None, date=None): - fetch_history = MFetchHistory.add(feed_id=self.pk, - fetch_type='feed', - code=int(status_code), - date=date, - message=message, - exception=exception) - + fetch_history = MFetchHistory.add( + feed_id=self.pk, + fetch_type="feed", + code=int(status_code), + date=date, + message=message, + exception=exception, + ) + if status_code not in (200, 304): self.errors_since_good += 1 - self.count_errors_in_history('feed', status_code, fetch_history=fetch_history) + self.count_errors_in_history("feed", status_code, fetch_history=fetch_history) self.set_next_scheduled_update(verbose=settings.DEBUG) elif self.has_feed_exception or self.errors_since_good: self.errors_since_good = 0 self.has_feed_exception = False self.active = True self.save() - + def save_page_history(self, status_code, message, exception=None, date=None): - fetch_history = MFetchHistory.add(feed_id=self.pk, - fetch_type='page', - code=int(status_code), - date=date, - message=message, - exception=exception) - + fetch_history = MFetchHistory.add( + feed_id=self.pk, + fetch_type="page", + code=int(status_code), + date=date, + message=message, + exception=exception, + ) + if status_code not in (200, 304): - self.count_errors_in_history('page', status_code, fetch_history=fetch_history) + self.count_errors_in_history("page", status_code, fetch_history=fetch_history) elif self.has_page_exception or not self.has_page: self.has_page_exception = False self.has_page = True self.active = True self.save() - + def save_raw_feed(self, raw_feed, fetch_date): - MFetchHistory.add(feed_id=self.pk, - fetch_type='raw_feed', - code=200, - message=raw_feed, - date=fetch_date) - - def count_errors_in_history(self, exception_type='feed', status_code=None, fetch_history=None): + MFetchHistory.add(feed_id=self.pk, fetch_type="raw_feed", code=200, message=raw_feed, date=fetch_date) + + def count_errors_in_history(self, exception_type="feed", status_code=None, fetch_history=None): if not fetch_history: fetch_history = MFetchHistory.feed(self.pk) - fh = fetch_history[exception_type + '_fetch_history'] - non_errors = [h for h in fh if h['status_code'] and int(h['status_code']) in (200, 304)] - errors = [h for h in fh if h['status_code'] and int(h['status_code']) not in (200, 304)] - + fh = fetch_history[exception_type + "_fetch_history"] + non_errors = [h for h in fh if h["status_code"] and int(h["status_code"]) in (200, 304)] + errors = [h for h in fh if h["status_code"] and int(h["status_code"]) not in (200, 304)] + if len(non_errors) == 0 and len(errors) > 1: self.active = True - if exception_type == 'feed': + if exception_type == "feed": self.has_feed_exception = True # self.active = False # No longer, just geometrically fetch - elif exception_type == 'page': + elif exception_type == "page": self.has_page_exception = True self.exception_code = status_code or int(errors[0]) self.save() elif self.exception_code > 0: self.active = True self.exception_code = 0 - if exception_type == 'feed': + if exception_type == "feed": self.has_feed_exception = False - elif exception_type == 'page': + elif exception_type == "page": self.has_page_exception = False self.save() - - logging.debug(' ---> [%-30s] ~FBCounting any errors in history: %s (%s non errors)' % - (self.log_title[:30], len(errors), len(non_errors))) - + + logging.debug( + " ---> [%-30s] ~FBCounting any errors in history: %s (%s non errors)" + % (self.log_title[:30], len(errors), len(non_errors)) + ) + return errors, non_errors - def count_redirects_in_history(self, fetch_type='feed', fetch_history=None): - logging.debug(' ---> [%-30s] Counting redirects in history...' % (self.log_title[:30])) + def count_redirects_in_history(self, fetch_type="feed", fetch_history=None): + logging.debug(" ---> [%-30s] Counting redirects in history..." % (self.log_title[:30])) if not fetch_history: fetch_history = MFetchHistory.feed(self.pk) - fh = fetch_history[fetch_type+'_fetch_history'] - redirects = [h for h in fh if h['status_code'] and int(h['status_code']) in (301, 302)] - non_redirects = [h for h in fh if h['status_code'] and int(h['status_code']) not in (301, 302)] - + fh = fetch_history[fetch_type + "_fetch_history"] + redirects = [h for h in fh if h["status_code"] and int(h["status_code"]) in (301, 302)] + non_redirects = [h for h in fh if h["status_code"] and int(h["status_code"]) not in (301, 302)] + return redirects, non_redirects - + @property def original_feed_id(self): if self.branch_from_feed: return self.branch_from_feed.pk else: return self.pk - + @property def counts_converted_to_redis(self): SUBSCRIBER_EXPIRE_DATE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) - subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime('%s')) + subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime("%s")) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) total_key = "s:%s" % self.original_feed_id premium_key = "sp:%s" % self.original_feed_id - last_recount = r.zscore(total_key, -1) # Need to subtract this extra when counting subs + last_recount = r.zscore(total_key, -1) # Need to subtract this extra when counting subs # Check for expired feeds with no active users who would have triggered a cleanup if last_recount and last_recount > subscriber_expire: return True elif last_recount: - logging.info(" ---> [%-30s] ~SN~FBFeed has expired redis subscriber counts (%s < %s), clearing..." % ( - self.log_title[:30], last_recount, subscriber_expire)) + logging.info( + " ---> [%-30s] ~SN~FBFeed has expired redis subscriber counts (%s < %s), clearing..." + % (self.log_title[:30], last_recount, subscriber_expire) + ) r.delete(total_key, -1) r.delete(premium_key, -1) - + return False - + def count_subscribers(self, recount=True, verbose=False): if recount or not self.counts_converted_to_redis: from apps.profile.models import Profile + Profile.count_feed_subscribers(feed_id=self.pk) SUBSCRIBER_EXPIRE_DATE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) - subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime('%s')) - now = int(datetime.datetime.now().strftime('%s')) + subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime("%s")) + now = int(datetime.datetime.now().strftime("%s")) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) total = 0 active = 0 @@ -853,9 +892,9 @@ class Feed(models.Model): archive = 0 pro = 0 active_premium = 0 - + # Include all branched feeds in counts - feed_ids = [f['id'] for f in Feed.objects.filter(branch_from_feed=self.original_feed_id).values('id')] + feed_ids = [f["id"] for f in Feed.objects.filter(branch_from_feed=self.original_feed_id).values("id")] feed_ids.append(self.original_feed_id) feed_ids = list(set(feed_ids)) @@ -863,21 +902,21 @@ class Feed(models.Model): # For each branched feed, count different subscribers for feed_id in feed_ids: pipeline = r.pipeline() - + # now+1 ensures `-1` flag will be corrected for later with - 1 total_key = "s:%s" % feed_id premium_key = "sp:%s" % feed_id archive_key = "sarchive:%s" % feed_id pro_key = "spro:%s" % feed_id pipeline.zcard(total_key) - pipeline.zcount(total_key, subscriber_expire, now+1) + pipeline.zcount(total_key, subscriber_expire, now + 1) pipeline.zcard(premium_key) - pipeline.zcount(premium_key, subscriber_expire, now+1) + pipeline.zcount(premium_key, subscriber_expire, now + 1) pipeline.zcard(archive_key) pipeline.zcard(pro_key) results = pipeline.execute() - + # -1 due to counts_converted_to_redis using key=-1 for last_recount date total += max(0, results[0] - 1) active += max(0, results[1] - 1) @@ -885,64 +924,69 @@ class Feed(models.Model): active_premium += max(0, results[3] - 1) archive += max(0, results[4] - 1) pro += max(0, results[5] - 1) - + original_num_subscribers = self.num_subscribers original_active_subs = self.active_subscribers original_premium_subscribers = self.premium_subscribers original_active_premium_subscribers = self.active_premium_subscribers original_archive_subscribers = self.archive_subscribers original_pro_subscribers = self.pro_subscribers - logging.info(" ---> [%-30s] ~SN~FBCounting subscribers from ~FCredis~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s ~SN~FC%s" % - (self.log_title[:30], total, active, premium, active_premium, archive, pro, "(%s branches)" % (len(feed_ids)-1) if len(feed_ids)>1 else "")) + logging.info( + " ---> [%-30s] ~SN~FBCounting subscribers from ~FCredis~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s ~SN~FC%s" + % ( + self.log_title[:30], + total, + active, + premium, + active_premium, + archive, + pro, + "(%s branches)" % (len(feed_ids) - 1) if len(feed_ids) > 1 else "", + ) + ) else: from apps.reader.models import UserSubscription - + subs = UserSubscription.objects.filter(feed__in=feed_ids) original_num_subscribers = self.num_subscribers total = subs.count() - + active_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE + feed__in=feed_ids, active=True, user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE ) original_active_subs = self.active_subscribers active = active_subs.count() - + premium_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_premium=True + feed__in=feed_ids, active=True, user__profile__is_premium=True ) original_premium_subscribers = self.premium_subscribers premium = premium_subs.count() - + archive_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_archive=True + feed__in=feed_ids, active=True, user__profile__is_archive=True ) original_archive_subscribers = self.archive_subscribers archive = archive_subs.count() - + pro_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_pro=True + feed__in=feed_ids, active=True, user__profile__is_pro=True ) original_pro_subscribers = self.pro_subscribers pro = pro_subs.count() - + active_premium_subscribers = UserSubscription.objects.filter( - feed__in=feed_ids, + feed__in=feed_ids, active=True, user__profile__is_premium=True, - user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE + user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE, ) original_active_premium_subscribers = self.active_premium_subscribers active_premium = active_premium_subscribers.count() - logging.debug(" ---> [%-30s] ~SN~FBCounting subscribers from ~FYpostgres~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" % - (self.log_title[:30], total, active, premium, active_premium, archive, pro)) + logging.debug( + " ---> [%-30s] ~SN~FBCounting subscribers from ~FYpostgres~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" + % (self.log_title[:30], total, active, premium, active_premium, archive, pro) + ) if settings.DOCKERBUILD: # Local installs enjoy 100% active feeds @@ -955,42 +999,55 @@ class Feed(models.Model): self.active_premium_subscribers = active_premium self.archive_subscribers = archive self.pro_subscribers = pro - if (self.num_subscribers != original_num_subscribers or - self.active_subscribers != original_active_subs or - self.premium_subscribers != original_premium_subscribers or - self.active_premium_subscribers != original_active_premium_subscribers or - self.archive_subscribers != original_archive_subscribers or - self.pro_subscribers != original_pro_subscribers): + if ( + self.num_subscribers != original_num_subscribers + or self.active_subscribers != original_active_subs + or self.premium_subscribers != original_premium_subscribers + or self.active_premium_subscribers != original_active_premium_subscribers + or self.archive_subscribers != original_archive_subscribers + or self.pro_subscribers != original_pro_subscribers + ): if original_premium_subscribers == -1 or original_active_premium_subscribers == -1: self.save() else: - self.save(update_fields=['num_subscribers', 'active_subscribers', - 'premium_subscribers', 'active_premium_subscribers', - 'archive_subscribers', 'pro_subscribers']) - + self.save( + update_fields=[ + "num_subscribers", + "active_subscribers", + "premium_subscribers", + "active_premium_subscribers", + "archive_subscribers", + "pro_subscribers", + ] + ) + if verbose: if self.num_subscribers <= 1: print(".", end=" ") else: - print("\n %s> %s subscriber%s: %s" % ( - '-' * min(self.num_subscribers, 20), - self.num_subscribers, - '' if self.num_subscribers == 1 else 's', - self.feed_title, - ), end=' ') - + print( + "\n %s> %s subscriber%s: %s" + % ( + "-" * min(self.num_subscribers, 20), + self.num_subscribers, + "" if self.num_subscribers == 1 else "s", + self.feed_title, + ), + end=" ", + ) + def _split_favicon_color(self, color=None): if not color: color = self.favicon_color if not color: return None, None, None - splitter = lambda s, p: [s[i:i+p] for i in range(0, len(s), p)] + splitter = lambda s, p: [s[i : i + p] for i in range(0, len(s), p)] red, green, blue = splitter(color[:6], 2) return red, green, blue - + def favicon_fade(self): return self.adjust_color(adjust=30) - + def adjust_color(self, color=None, adjust=0): red, green, blue = self._split_favicon_color(color=color) if red and green and blue: @@ -1002,11 +1059,11 @@ class Feed(models.Model): def favicon_border(self): red, green, blue = self._split_favicon_color() if red and green and blue: - fade_red = hex(min(int(int(red, 16) * .75), 255))[2:].zfill(2) - fade_green = hex(min(int(int(green, 16) * .75), 255))[2:].zfill(2) - fade_blue = hex(min(int(int(blue, 16) * .75), 255))[2:].zfill(2) + fade_red = hex(min(int(int(red, 16) * 0.75), 255))[2:].zfill(2) + fade_green = hex(min(int(int(green, 16) * 0.75), 255))[2:].zfill(2) + fade_blue = hex(min(int(int(blue, 16) * 0.75), 255))[2:].zfill(2) return "%s%s%s" % (fade_red, fade_green, fade_blue) - + def favicon_text_color(self): # Color format: {r: 1, g: .5, b: 0} def contrast(color1, color2): @@ -1018,10 +1075,10 @@ class Feed(models.Model): return (lum2 + 0.05) / (lum1 + 0.05) def luminosity(color): - r = color['red'] - g = color['green'] - b = color['blue'] - val = lambda c: c/12.92 if c <= 0.02928 else math.pow(((c + 0.055)/1.055), 2.4) + r = color["red"] + g = color["green"] + b = color["blue"] + val = lambda c: c / 12.92 if c <= 0.02928 else math.pow(((c + 0.055) / 1.055), 2.4) red = val(r) green = val(g) blue = val(b) @@ -1030,25 +1087,25 @@ class Feed(models.Model): red, green, blue = self._split_favicon_color() if red and green and blue: color = { - 'red': int(red, 16) / 256.0, - 'green': int(green, 16) / 256.0, - 'blue': int(blue, 16) / 256.0, + "red": int(red, 16) / 256.0, + "green": int(green, 16) / 256.0, + "blue": int(blue, 16) / 256.0, } white = { - 'red': 1, - 'green': 1, - 'blue': 1, + "red": 1, + "green": 1, + "blue": 1, } grey = { - 'red': 0.5, - 'green': 0.5, - 'blue': 0.5, + "red": 0.5, + "green": 0.5, + "blue": 0.5, } - + if contrast(color, white) > contrast(color, grey): - return 'white' + return "white" else: - return 'black' + return "black" def fill_out_archive_stories(self, force=False, starting_page=1): """ @@ -1058,33 +1115,34 @@ class Feed(models.Model): before_story_count = MStory.objects(story_feed_id=self.pk).count() if not force and not self.archive_subscribers: - logging.debug(" ---> [%-30s] ~FBNot filling out archive stories, no archive subscribers" % ( - self.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBNot filling out archive stories, no archive subscribers" + % (self.log_title[:30]) + ) return before_story_count, before_story_count self.update(archive_page=starting_page) after_story_count = MStory.objects(story_feed_id=self.pk).count() - logging.debug(" ---> [%-30s] ~FCFilled out archive, ~FM~SB%s~SN new stories~FC, total of ~SB%s~SN stories" % ( - self.log_title[:30], - after_story_count - before_story_count, - after_story_count)) - + logging.debug( + " ---> [%-30s] ~FCFilled out archive, ~FM~SB%s~SN new stories~FC, total of ~SB%s~SN stories" + % (self.log_title[:30], after_story_count - before_story_count, after_story_count) + ) + def save_feed_stories_last_month(self, verbose=False): month_ago = datetime.datetime.utcnow() - datetime.timedelta(days=30) - stories_last_month = MStory.objects(story_feed_id=self.pk, - story_date__gte=month_ago).count() + stories_last_month = MStory.objects(story_feed_id=self.pk, story_date__gte=month_ago).count() if self.stories_last_month != stories_last_month: self.stories_last_month = stories_last_month - self.save(update_fields=['stories_last_month']) - + self.save(update_fields=["stories_last_month"]) + if verbose: print(f" ---> {self.feed} [{self.pk}]: {self.stories_last_month} stories last month") - + def save_feed_story_history_statistics(self, current_counts=None): """ Fills in missing months between earlier occurances and now. - + Save format: [('YYYY-MM, #), ...] Example output: [(2010-12, 123), (2011-01, 146)] """ @@ -1096,7 +1154,7 @@ class Feed(models.Model): current_counts = self.data.story_count_history and json.decode(self.data.story_count_history) if isinstance(current_counts, dict): - current_counts = current_counts['months'] + current_counts = current_counts["months"] if not current_counts: current_counts = [] @@ -1118,15 +1176,15 @@ class Feed(models.Model): dates = defaultdict(int) hours = defaultdict(int) days = defaultdict(int) - results = MStory.objects(story_feed_id=self.pk).map_reduce(map_f, reduce_f, output='inline') + results = MStory.objects(story_feed_id=self.pk).map_reduce(map_f, reduce_f, output="inline") for result in results: - dates[result.value['month']] += 1 - hours[int(result.value['hour'])] += 1 - days[int(result.value['day'])] += 1 - year = int(re.findall(r"(\d{4})-\d{1,2}", result.value['month'])[0]) + dates[result.value["month"]] += 1 + hours[int(result.value["hour"])] += 1 + days[int(result.value["day"])] += 1 + year = int(re.findall(r"(\d{4})-\d{1,2}", result.value["month"])[0]) if year < min_year and year > 2000: min_year = year - + # Add on to existing months, always amending up, never down. (Current month # is guaranteed to be accurate, since trim_feeds won't delete it until after # a month. Hacker News can have 1,000+ and still be counted.) @@ -1136,38 +1194,37 @@ class Feed(models.Model): dates[current_month] = current_count if year < min_year and year > 2000: min_year = year - - # Assemble a list with 0's filled in for missing months, + + # Assemble a list with 0's filled in for missing months, # trimming left and right 0's. months = [] start = False - for year in range(min_year, now.year+1): - for month in range(1, 12+1): + for year in range(min_year, now.year + 1): + for month in range(1, 12 + 1): if datetime.datetime(year, month, 1) < now: - key = '%s-%s' % (year, month) + key = "%s-%s" % (year, month) if dates.get(key) or start: start = True months.append((key, dates.get(key, 0))) total += dates.get(key, 0) if dates.get(key, 0) > 0: - month_count += 1 # Only count months that have stories for the average + month_count += 1 # Only count months that have stories for the average original_story_count_history = self.data.story_count_history - self.data.story_count_history = json.encode({'months': months, 'hours': hours, 'days': days}) + self.data.story_count_history = json.encode({"months": months, "hours": hours, "days": days}) if self.data.story_count_history != original_story_count_history: - self.data.save(update_fields=['story_count_history']) - + self.data.save(update_fields=["story_count_history"]) + original_average_stories_per_month = self.average_stories_per_month if not total or not month_count: self.average_stories_per_month = 0 else: self.average_stories_per_month = int(round(total / float(month_count))) if self.average_stories_per_month != original_average_stories_per_month: - self.save(update_fields=['average_stories_per_month']) - - + self.save(update_fields=["average_stories_per_month"]) + def save_classifier_counts(self): from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag - + def calculate_scores(cls, facet): map_f = """ function() { @@ -1176,7 +1233,9 @@ class Feed(models.Model): neg: this.score<0 ? Math.abs(this.score) : 0 }); } - """ % (facet) + """ % ( + facet + ) reduce_f = """ function(key, values) { var result = {pos: 0, neg: 0}; @@ -1188,68 +1247,72 @@ class Feed(models.Model): } """ scores = [] - res = cls.objects(feed_id=self.pk).map_reduce(map_f, reduce_f, output='inline') + res = cls.objects(feed_id=self.pk).map_reduce(map_f, reduce_f, output="inline") for r in res: - facet_values = dict([(k, int(v)) for k,v in r.value.items()]) + facet_values = dict([(k, int(v)) for k, v in r.value.items()]) facet_values[facet] = r.key - if facet_values['pos'] + facet_values['neg'] >= 1: + if facet_values["pos"] + facet_values["neg"] >= 1: scores.append(facet_values) - scores = sorted(scores, key=lambda v: v['neg'] - v['pos']) + scores = sorted(scores, key=lambda v: v["neg"] - v["pos"]) return scores - + scores = {} - for cls, facet in [(MClassifierTitle, 'title'), - (MClassifierAuthor, 'author'), - (MClassifierTag, 'tag'), - (MClassifierFeed, 'feed_id')]: + for cls, facet in [ + (MClassifierTitle, "title"), + (MClassifierAuthor, "author"), + (MClassifierTag, "tag"), + (MClassifierFeed, "feed_id"), + ]: scores[facet] = calculate_scores(cls, facet) - if facet == 'feed_id' and scores[facet]: - scores['feed'] = scores[facet] - del scores['feed_id'] + if facet == "feed_id" and scores[facet]: + scores["feed"] = scores[facet] + del scores["feed_id"] elif not scores[facet]: del scores[facet] - + if scores: self.data.feed_classifier_counts = json.encode(scores) self.data.save() - + return scores - + @property def user_agent(self): feed_parts = urllib.parse.urlparse(self.feed_address) - if feed_parts.netloc.find('.tumblr.com') != -1: + if feed_parts.netloc.find(".tumblr.com") != -1: # Certain tumblr feeds will redirect to tumblr's login page when fetching. # A known workaround is using facebook's user agent. - return 'facebookexternalhit/1.0 (+http://www.facebook.com/externalhit_uatext.php)' + return "facebookexternalhit/1.0 (+http://www.facebook.com/externalhit_uatext.php)" - ua = ('NewsBlur Feed Fetcher - %s subscriber%s - %s %s' % ( - self.num_subscribers, - 's' if self.num_subscribers != 1 else '', - self.permalink, - self.fake_user_agent, - )) + ua = "NewsBlur Feed Fetcher - %s subscriber%s - %s %s" % ( + self.num_subscribers, + "s" if self.num_subscribers != 1 else "", + self.permalink, + self.fake_user_agent, + ) return ua - + @property def fake_user_agent(self): - ua = ('("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' - 'AppleWebKit/605.1.15 (KHTML, like Gecko) ' - 'Version/14.0.1 Safari/605.1.15")') - + ua = ( + '("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' + "AppleWebKit/605.1.15 (KHTML, like Gecko) " + 'Version/14.0.1 Safari/605.1.15")' + ) + return ua - + def fetch_headers(self, fake=False): headers = { - 'User-Agent': self.user_agent if not fake else self.fake_user_agent, - 'Accept': 'application/atom+xml, application/rss+xml, application/xml;q=0.8, text/xml;q=0.6, */*;q=0.2', - 'Accept-Encoding': 'gzip, deflate', + "User-Agent": self.user_agent if not fake else self.fake_user_agent, + "Accept": "application/atom+xml, application/rss+xml, application/xml;q=0.8, text/xml;q=0.6, */*;q=0.2", + "Accept-Encoding": "gzip, deflate", } - + return headers - + def update(self, **kwargs): try: from utils import feed_fetcher @@ -1260,24 +1323,24 @@ class Feed(models.Model): original_feed_id = int(self.pk) options = { - 'verbose': kwargs.get('verbose'), - 'timeout': 10, - 'single_threaded': kwargs.get('single_threaded', True), - 'force': kwargs.get('force'), - 'force_fp': kwargs.get('force_fp'), - 'compute_scores': kwargs.get('compute_scores', True), - 'mongodb_replication_lag': kwargs.get('mongodb_replication_lag', None), - 'fake': kwargs.get('fake'), - 'quick': kwargs.get('quick'), - 'updates_off': kwargs.get('updates_off'), - 'debug': kwargs.get('debug'), - 'fpf': kwargs.get('fpf'), - 'feed_xml': kwargs.get('feed_xml'), - 'requesting_user_id': kwargs.get('requesting_user_id', None), - 'archive_page': kwargs.get('archive_page', None), + "verbose": kwargs.get("verbose"), + "timeout": 10, + "single_threaded": kwargs.get("single_threaded", True), + "force": kwargs.get("force"), + "force_fp": kwargs.get("force_fp"), + "compute_scores": kwargs.get("compute_scores", True), + "mongodb_replication_lag": kwargs.get("mongodb_replication_lag", None), + "fake": kwargs.get("fake"), + "quick": kwargs.get("quick"), + "updates_off": kwargs.get("updates_off"), + "debug": kwargs.get("debug"), + "fpf": kwargs.get("fpf"), + "feed_xml": kwargs.get("feed_xml"), + "requesting_user_id": kwargs.get("requesting_user_id", None), + "archive_page": kwargs.get("archive_page", None), } - - if getattr(settings, 'TEST_DEBUG', False) and "NEWSBLUR_DIR" in self.feed_address: + + if getattr(settings, "TEST_DEBUG", False) and "NEWSBLUR_DIR" in self.feed_address: print(" ---> Testing feed fetch: %s" % self.log_title) # options['force_fp'] = True # No, why would this be needed? original_feed_address = self.feed_address @@ -1286,39 +1349,42 @@ class Feed(models.Model): if self.feed_link: self.feed_link = self.feed_link.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR) if self.feed_address != original_feed_address or self.feed_link != original_feed_link: - self.save(update_fields=['feed_address', 'feed_link']) - + self.save(update_fields=["feed_address", "feed_link"]) + if self.is_newsletter: feed = self.update_newsletter_icon() else: - disp = feed_fetcher.Dispatcher(options, 1) + disp = feed_fetcher.Dispatcher(options, 1) disp.add_jobs([[self.pk]]) feed = disp.run_jobs() - + if feed: feed = Feed.get_by_id(feed.pk) if feed: feed.last_update = datetime.datetime.utcnow() feed.set_next_scheduled_update(verbose=settings.DEBUG) - r.zadd('fetched_feeds_last_hour', { feed.pk: int(datetime.datetime.now().strftime('%s')) }) - + r.zadd("fetched_feeds_last_hour", {feed.pk: int(datetime.datetime.now().strftime("%s"))}) + if not feed or original_feed_id != feed.pk: - logging.info(" ---> ~FRFeed changed id, removing %s from tasked_feeds queue..." % original_feed_id) - r.zrem('tasked_feeds', original_feed_id) - r.zrem('error_feeds', original_feed_id) + logging.info( + " ---> ~FRFeed changed id, removing %s from tasked_feeds queue..." % original_feed_id + ) + r.zrem("tasked_feeds", original_feed_id) + r.zrem("error_feeds", original_feed_id) if feed: - r.zrem('tasked_feeds', feed.pk) - r.zrem('error_feeds', feed.pk) - + r.zrem("tasked_feeds", feed.pk) + r.zrem("error_feeds", feed.pk) + return feed - + def update_newsletter_icon(self): from apps.rss_feeds.icon_importer import IconImporter + icon_importer = IconImporter(self) icon_importer.save() - + return self - + @classmethod def get_by_id(cls, feed_id, feed_address=None): try: @@ -1333,41 +1399,43 @@ class Feed(models.Model): duplicate_feeds = DuplicateFeed.objects.filter(duplicate_address=feed_address) if duplicate_feeds: return duplicate_feeds[0].feed - + @classmethod def get_by_name(cls, query, limit=1): results = SearchFeed.query(query) feed_ids = [result.feed_id for result in results] - + if limit == 1: return Feed.get_by_id(feed_ids[0]) else: return [Feed.get_by_id(f) for f in feed_ids][:limit] - + def add_update_stories(self, stories, existing_stories, verbose=False, updates_off=False): ret_values = dict(new=0, updated=0, same=0, error=0) error_count = self.error_count - new_story_hashes = [s.get('story_hash') for s in stories] - + new_story_hashes = [s.get("story_hash") for s in stories] + if settings.DEBUG or verbose: - logging.debug(" ---> [%-30s] ~FBChecking ~SB%s~SN new/updated against ~SB%s~SN stories" % ( - self.log_title[:30], - len(stories), - len(list(existing_stories.keys())))) + logging.debug( + " ---> [%-30s] ~FBChecking ~SB%s~SN new/updated against ~SB%s~SN stories" + % (self.log_title[:30], len(stories), len(list(existing_stories.keys()))) + ) + @timelimit(5) def _1(story, story_content, existing_stories, new_story_hashes): - existing_story, story_has_changed = self._exists_story(story, story_content, - existing_stories, new_story_hashes) + existing_story, story_has_changed = self._exists_story( + story, story_content, existing_stories, new_story_hashes + ) return existing_story, story_has_changed - + for story in stories: if verbose: - logging.debug(" ---> [%-30s] ~FBChecking ~SB%s~SN / ~SB%s" % ( - self.log_title[:30], - story.get('title'), - story.get('guid'))) - - story_content = story.get('story_content') + logging.debug( + " ---> [%-30s] ~FBChecking ~SB%s~SN / ~SB%s" + % (self.log_title[:30], story.get("title"), story.get("guid")) + ) + + story_content = story.get("story_content") if error_count: story_content = strip_comments__lxml(story_content) else: @@ -1375,39 +1443,49 @@ class Feed(models.Model): story_tags = self.get_tags(story) story_link = self.get_permalink(story) replace_story_date = False - + try: - existing_story, story_has_changed = _1(story, story_content, - existing_stories, new_story_hashes) + existing_story, story_has_changed = _1( + story, story_content, existing_stories, new_story_hashes + ) except TimeoutError: - logging.debug(' ---> [%-30s] ~SB~FRExisting story check timed out...' % (self.log_title[:30])) + logging.debug( + " ---> [%-30s] ~SB~FRExisting story check timed out..." % (self.log_title[:30]) + ) existing_story = None story_has_changed = False - + if existing_story is None: if settings.DEBUG and False: - logging.debug(' ---> New story in feed (%s - %s): %s' % (self.feed_title, story.get('title'), len(story_content))) - - s = MStory(story_feed_id = self.pk, - story_date = story.get('published'), - story_title = story.get('title'), - story_content = story_content, - story_author_name = story.get('author'), - story_permalink = story_link, - story_guid = story.get('guid'), - story_tags = story_tags + logging.debug( + " ---> New story in feed (%s - %s): %s" + % (self.feed_title, story.get("title"), len(story_content)) + ) + + s = MStory( + story_feed_id=self.pk, + story_date=story.get("published"), + story_title=story.get("title"), + story_content=story_content, + story_author_name=story.get("author"), + story_permalink=story_link, + story_guid=story.get("guid"), + story_tags=story_tags, ) try: s.save() - ret_values['new'] += 1 + ret_values["new"] += 1 s.publish_to_subscribers() except (IntegrityError, OperationError) as e: - ret_values['error'] += 1 + ret_values["error"] += 1 if settings.DEBUG: - logging.info(' ---> [%-30s] ~SN~FRIntegrityError on new story: %s - %s' % (self.feed_title[:30], story.get('guid'), e)) + logging.info( + " ---> [%-30s] ~SN~FRIntegrityError on new story: %s - %s" + % (self.feed_title[:30], story.get("guid"), e) + ) if self.search_indexed: s.index_story_for_search() - elif existing_story and story_has_changed and not updates_off and ret_values['updated'] < 3: + elif existing_story and story_has_changed and not updates_off and ret_values["updated"] < 3: # update story original_content = None try: @@ -1415,19 +1493,22 @@ class Feed(models.Model): try: existing_story = MStory.objects.get(id=existing_story.id) except ValidationError: - existing_story, _ = MStory.find_story(existing_story.story_feed_id, - existing_story.id, - original_only=True) + existing_story, _ = MStory.find_story( + existing_story.story_feed_id, existing_story.id, original_only=True + ) elif existing_story and existing_story.story_hash: - existing_story, _ = MStory.find_story(existing_story.story_feed_id, - existing_story.story_hash, - original_only=True) + existing_story, _ = MStory.find_story( + existing_story.story_feed_id, existing_story.story_hash, original_only=True + ) else: raise MStory.DoesNotExist except (MStory.DoesNotExist, OperationError) as e: - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FROperation on existing story: %s - %s' % (self.feed_title[:30], story.get('guid'), e)) + logging.info( + " ---> [%-30s] ~SN~FROperation on existing story: %s - %s" + % (self.feed_title[:30], story.get("guid"), e) + ) continue if existing_story.story_original_content_z: original_content = zlib.decompress(existing_story.story_original_content_z) @@ -1445,60 +1526,71 @@ class Feed(models.Model): # logging.debug("\t\tDiff content: %s" % diff.getDiff()) # if existing_story.story_title != story.get('title'): # logging.debug('\tExisting title / New: : \n\t\t- %s\n\t\t- %s' % (existing_story.story_title, story.get('title'))) - if existing_story.story_hash != story.get('story_hash'): - self.update_story_with_new_guid(existing_story, story.get('guid')) + if existing_story.story_hash != story.get("story_hash"): + self.update_story_with_new_guid(existing_story, story.get("guid")) if verbose: - logging.debug('- Updated story in feed (%s - %s): %s / %s' % (self.feed_title, story.get('title'), len(story_content_diff), len(story_content))) - + logging.debug( + "- Updated story in feed (%s - %s): %s / %s" + % (self.feed_title, story.get("title"), len(story_content_diff), len(story_content)) + ) + existing_story.story_feed = self.pk - existing_story.story_title = story.get('title') + existing_story.story_title = story.get("title") existing_story.story_content = story_content_diff existing_story.story_latest_content = story_content existing_story.story_original_content = original_content - existing_story.story_author_name = story.get('author') + existing_story.story_author_name = story.get("author") existing_story.story_permalink = story_link - existing_story.story_guid = story.get('guid') + existing_story.story_guid = story.get("guid") existing_story.story_tags = story_tags - existing_story.original_text_z = None # Reset Text view cache + existing_story.original_text_z = None # Reset Text view cache # Do not allow publishers to change the story date once a story is published. # Leads to incorrect unread story counts. if replace_story_date: - existing_story.story_date = story.get('published') # Really shouldn't do this. + existing_story.story_date = story.get("published") # Really shouldn't do this. existing_story.extract_image_urls(force=True) try: existing_story.save() - ret_values['updated'] += 1 + ret_values["updated"] += 1 except (IntegrityError, OperationError): - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FRIntegrityError on updated story: %s' % (self.feed_title[:30], story.get('title')[:30])) + logging.info( + " ---> [%-30s] ~SN~FRIntegrityError on updated story: %s" + % (self.feed_title[:30], story.get("title")[:30]) + ) except ValidationError: - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FRValidationError on updated story: %s' % (self.feed_title[:30], story.get('title')[:30])) + logging.info( + " ---> [%-30s] ~SN~FRValidationError on updated story: %s" + % (self.feed_title[:30], story.get("title")[:30]) + ) if self.search_indexed: existing_story.index_story_for_search() else: - ret_values['same'] += 1 + ret_values["same"] += 1 if verbose: - logging.debug("Unchanged story (%s): %s / %s " % (story.get('story_hash'), story.get('guid'), story.get('title'))) - + logging.debug( + "Unchanged story (%s): %s / %s " + % (story.get("story_hash"), story.get("guid"), story.get("title")) + ) + return ret_values - + def update_story_with_new_guid(self, existing_story, new_story_guid): from apps.reader.models import RUserStory from apps.social.models import MSharedStory existing_story.remove_from_redis() existing_story.remove_from_search_index() - + old_hash = existing_story.story_hash new_hash = MStory.ensure_story_hash(new_story_guid, self.pk) RUserStory.switch_hash(feed=self, old_hash=old_hash, new_hash=new_hash) - - shared_stories = MSharedStory.objects.filter(story_feed_id=self.pk, - story_hash=old_hash) + + shared_stories = MSharedStory.objects.filter(story_feed_id=self.pk, story_hash=old_hash) for story in shared_stories: story.story_guid = new_story_guid story.story_hash = new_hash @@ -1507,18 +1599,19 @@ class Feed(models.Model): except NotUniqueError: # Story is already shared, skip. pass - + def save_popular_tags(self, feed_tags=None, verbose=False): if not feed_tags: - all_tags = MStory.objects(story_feed_id=self.pk, - story_tags__exists=True).item_frequencies('story_tags') - feed_tags = sorted([(k, v) for k, v in list(all_tags.items()) if int(v) > 0], - key=itemgetter(1), - reverse=True)[:25] + all_tags = MStory.objects(story_feed_id=self.pk, story_tags__exists=True).item_frequencies( + "story_tags" + ) + feed_tags = sorted( + [(k, v) for k, v in list(all_tags.items()) if int(v) > 0], key=itemgetter(1), reverse=True + )[:25] popular_tags = json.encode(feed_tags) if verbose: print("Found %s tags: %s" % (len(feed_tags), popular_tags)) - + # TODO: This len() bullshit will be gone when feeds move to mongo # On second thought, it might stay, because we don't want # popular tags the size of a small planet. I'm looking at you @@ -1526,7 +1619,7 @@ class Feed(models.Model): if len(popular_tags) < 1024: if self.data.popular_tags != popular_tags: self.data.popular_tags = popular_tags - self.data.save(update_fields=['popular_tags']) + self.data.save(update_fields=["popular_tags"]) return tags_list = [] @@ -1534,21 +1627,21 @@ class Feed(models.Model): tags_list = json.decode(feed_tags) if len(tags_list) >= 1: self.save_popular_tags(tags_list[:-1]) - + def save_popular_authors(self, feed_authors=None): if not feed_authors: authors = defaultdict(int) - for story in MStory.objects(story_feed_id=self.pk).only('story_author_name'): + for story in MStory.objects(story_feed_id=self.pk).only("story_author_name"): authors[story.story_author_name] += 1 - feed_authors = sorted([(k, v) for k, v in list(authors.items()) if k], - key=itemgetter(1), - reverse=True)[:20] + feed_authors = sorted( + [(k, v) for k, v in list(authors.items()) if k], key=itemgetter(1), reverse=True + )[:20] popular_authors = json.encode(feed_authors) if len(popular_authors) < 1023: if self.data.popular_authors != popular_authors: self.data.popular_authors = popular_authors - self.data.save(update_fields=['popular_authors']) + self.data.save(update_fields=["popular_authors"]) return if len(feed_authors) > 1: @@ -1558,19 +1651,24 @@ class Feed(models.Model): def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0, end=None): now = datetime.datetime.now() month_ago = now - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES) - feed_count = end or Feed.objects.latest('pk').pk + feed_count = end or Feed.objects.latest("pk").pk for feed_id in range(start, feed_count): if feed_id % 1000 == 0: - print("\n\n -------------------------- %s (%s deleted so far) --------------------------\n\n" % (feed_id, total)) + print( + "\n\n -------------------------- %s (%s deleted so far) --------------------------\n\n" + % (feed_id, total) + ) try: feed = Feed.objects.get(pk=feed_id) except Feed.DoesNotExist: continue # Ensure only feeds with no active subscribers are being trimmed - if (feed.active_subscribers <= 0 and - (not feed.archive_subscribers or feed.archive_subscribers <= 0) and - (not feed.last_story_date or feed.last_story_date < month_ago)): + if ( + feed.active_subscribers <= 0 + and (not feed.archive_subscribers or feed.archive_subscribers <= 0) + and (not feed.last_story_date or feed.last_story_date < month_ago) + ): # 1 month since last story = keep 5 stories, >6 months since, only keep 1 story months_ago = 6 if feed.last_story_date: @@ -1585,18 +1683,17 @@ class Feed(models.Model): print(" DRYRUN: %s/%s cutoff - %s" % (cutoff, feed.story_cutoff, feed)) else: total += feed.trim_feed(verbose=verbose) - - + print(" ---> Deleted %s stories in total." % total) - + @property def story_cutoff(self): return self.number_of_stories_to_store() - + def number_of_stories_to_store(self, pre_archive=False): if self.archive_subscribers and self.archive_subscribers > 0 and not pre_archive: return 10000 - + cutoff = 500 if self.active_subscribers <= 0: cutoff = 25 @@ -1612,21 +1709,25 @@ class Feed(models.Model): cutoff = 400 elif self.active_premium_subscribers <= 20: cutoff = 450 - + if self.active_subscribers and self.average_stories_per_month < 5 and self.stories_last_month < 5: cutoff /= 2 - if self.active_premium_subscribers <= 1 and self.average_stories_per_month <= 1 and self.stories_last_month <= 1: + if ( + self.active_premium_subscribers <= 1 + and self.average_stories_per_month <= 1 + and self.stories_last_month <= 1 + ): cutoff /= 2 - + r = redis.Redis(connection_pool=settings.REDIS_FEED_READ_POOL) pipeline = r.pipeline() read_stories_per_week = [] now = datetime.datetime.now() # Check to see how many stories have been read each week since the feed's days of story hashes - for weeks_back in range(2*int(math.floor(settings.DAYS_OF_STORY_HASHES/7))): - weeks_ago = now - datetime.timedelta(days=7*weeks_back) - week_of_year = weeks_ago.strftime('%Y-%U') + for weeks_back in range(2 * int(math.floor(settings.DAYS_OF_STORY_HASHES / 7))): + weeks_ago = now - datetime.timedelta(days=7 * weeks_back) + week_of_year = weeks_ago.strftime("%Y-%U") feed_read_key = "fR:%s:%s" % (self.pk, week_of_year) pipeline.get(feed_read_key) read_stories_per_week = pipeline.execute() @@ -1635,16 +1736,26 @@ class Feed(models.Model): original_cutoff = cutoff cutoff = min(cutoff, 10) try: - logging.debug(" ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" % (self.log_title[:30], cutoff, original_cutoff, self.last_story_date.strftime("%Y-%m-%d") if self.last_story_date else "No last story date")) + logging.debug( + " ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" + % ( + self.log_title[:30], + cutoff, + original_cutoff, + self.last_story_date.strftime("%Y-%m-%d") + if self.last_story_date + else "No last story date", + ) + ) except ValueError as e: logging.debug(" ***> [%-30s] Error trimming: %s" % (self.log_title[:30], e)) pass - - if getattr(settings, 'OVERRIDE_STORY_COUNT_MAX', None): + + if getattr(settings, "OVERRIDE_STORY_COUNT_MAX", None): cutoff = settings.OVERRIDE_STORY_COUNT_MAX - + return int(cutoff) - + def trim_feed(self, verbose=False, cutoff=None): if not cutoff: cutoff = self.story_cutoff @@ -1664,21 +1775,25 @@ class Feed(models.Model): for story in stories: count += 1 story_with_content = story.to_mongo() - if story_with_content.get('story_content_z', None): - story_with_content['story_content'] = zlib.decompress(story_with_content['story_content_z']) - del story_with_content['story_content_z'] - if story_with_content.get('original_page_z', None): - story_with_content['original_page'] = zlib.decompress(story_with_content['original_page_z']) - del story_with_content['original_page_z'] - if story_with_content.get('original_text_z', None): - story_with_content['original_text'] = zlib.decompress(story_with_content['original_text_z']) - del story_with_content['original_text_z'] - if story_with_content.get('story_latest_content_z', None): - story_with_content['story_latest_content'] = zlib.decompress(story_with_content['story_latest_content_z']) - del story_with_content['story_latest_content_z'] - if story_with_content.get('story_original_content_z', None): - story_with_content['story_original_content'] = zlib.decompress(story_with_content['story_original_content_z']) - del story_with_content['story_original_content_z'] + if story_with_content.get("story_content_z", None): + story_with_content["story_content"] = zlib.decompress(story_with_content["story_content_z"]) + del story_with_content["story_content_z"] + if story_with_content.get("original_page_z", None): + story_with_content["original_page"] = zlib.decompress(story_with_content["original_page_z"]) + del story_with_content["original_page_z"] + if story_with_content.get("original_text_z", None): + story_with_content["original_text"] = zlib.decompress(story_with_content["original_text_z"]) + del story_with_content["original_text_z"] + if story_with_content.get("story_latest_content_z", None): + story_with_content["story_latest_content"] = zlib.decompress( + story_with_content["story_latest_content_z"] + ) + del story_with_content["story_latest_content_z"] + if story_with_content.get("story_original_content_z", None): + story_with_content["story_original_content"] = zlib.decompress( + story_with_content["story_original_content_z"] + ) + del story_with_content["story_original_content_z"] sum_bytes += len(bson.BSON.encode(story_with_content)) self.fs_size_bytes = sum_bytes @@ -1686,7 +1801,7 @@ class Feed(models.Model): self.save() return sum_bytes - + def purge_feed_stories(self, update=True): MStory.purge_feed_stories(feed=self, cutoff=self.story_cutoff) if update: @@ -1695,15 +1810,21 @@ class Feed(models.Model): def purge_author(self, author): all_stories = MStory.objects.filter(story_feed_id=self.pk) author_stories = MStory.objects.filter(story_feed_id=self.pk, story_author_name__iexact=author) - logging.debug(" ---> Deleting %s of %s stories in %s by '%s'." % (author_stories.count(), all_stories.count(), self, author)) + logging.debug( + " ---> Deleting %s of %s stories in %s by '%s'." + % (author_stories.count(), all_stories.count(), self, author) + ) author_stories.delete() def purge_tag(self, tag): all_stories = MStory.objects.filter(story_feed_id=self.pk) tagged_stories = MStory.objects.filter(story_feed_id=self.pk, story_tags__icontains=tag) - logging.debug(" ---> Deleting %s of %s stories in %s by '%s'." % (tagged_stories.count(), all_stories.count(), self, tag)) + logging.debug( + " ---> Deleting %s of %s stories in %s by '%s'." + % (tagged_stories.count(), all_stories.count(), self, tag) + ) tagged_stories.delete() - + # @staticmethod # def clean_invalid_ids(): # history = MFeedFetchHistory.objects(status_code=500, exception__contains='InvalidId:') @@ -1711,43 +1832,42 @@ class Feed(models.Model): # for h in history: # u = re.split('InvalidId: (.*?) is not a valid ObjectId\\n$', h.exception)[1] # urls.add((h.feed_id, u)) - # + # # for f, u in urls: # print "db.stories.remove({\"story_feed_id\": %s, \"_id\": \"%s\"})" % (f, u) - def get_stories(self, offset=0, limit=25, order="neweat", force=False): if order == "newest": - stories_db = MStory.objects(story_feed_id=self.pk)[offset:offset+limit] + stories_db = MStory.objects(story_feed_id=self.pk)[offset : offset + limit] elif order == "oldest": - stories_db = MStory.objects(story_feed_id=self.pk).order_by('story_date')[offset:offset+limit] + stories_db = MStory.objects(story_feed_id=self.pk).order_by("story_date")[offset : offset + limit] stories = self.format_stories(stories_db, self.pk) - + return stories - + @classmethod def find_feed_stories(cls, feed_ids, query, order="newest", offset=0, limit=25): - story_ids = SearchStory.query(feed_ids=feed_ids, query=query, order=order, - offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + story_ids = SearchStory.query(feed_ids=feed_ids, query=query, order=order, offset=offset, limit=limit) + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = cls.format_stories(stories_db) - + return stories - + @classmethod - def query_popularity(cls, query, limit, order='newest'): + def query_popularity(cls, query, limit, order="newest"): popularity = {} seen_feeds = set() feed_title_to_id = dict() - + # Collect stories, sort by feed story_ids = SearchStory.global_query(query, order=order, offset=0, limit=limit) for story_hash in story_ids: feed_id, story_id = MStory.split_story_hash(story_hash) feed = Feed.get_by_id(feed_id) - if not feed: continue + if not feed: + continue if feed.feed_title in seen_feeds: feed_id = feed_title_to_id[feed.feed_title] else: @@ -1758,250 +1878,349 @@ class Feed(models.Model): # classifiers = feed.save_classifier_counts() well_read_score = feed.well_read_score() popularity[feed_id] = { - 'feed_title': feed.feed_title, - 'feed_url': feed.feed_link, - 'num_subscribers': feed.num_subscribers, - 'feed_id': feed.pk, - 'story_ids': [], - 'authors': {}, - 'read_pct': well_read_score['read_pct'], - 'reader_count': well_read_score['reader_count'], - 'story_count': well_read_score['story_count'], - 'reach_score': well_read_score['reach_score'], - 'share_count': well_read_score['share_count'], - 'ps': 0, - 'ng': 0, - 'classifiers': json.decode(feed.data.feed_classifier_counts), + "feed_title": feed.feed_title, + "feed_url": feed.feed_link, + "num_subscribers": feed.num_subscribers, + "feed_id": feed.pk, + "story_ids": [], + "authors": {}, + "read_pct": well_read_score["read_pct"], + "reader_count": well_read_score["reader_count"], + "story_count": well_read_score["story_count"], + "reach_score": well_read_score["reach_score"], + "share_count": well_read_score["share_count"], + "ps": 0, + "ng": 0, + "classifiers": json.decode(feed.data.feed_classifier_counts), } - if popularity[feed_id]['classifiers']: - for classifier in popularity[feed_id]['classifiers'].get('feed', []): - if int(classifier['feed_id']) == int(feed_id): - popularity[feed_id]['ps'] = classifier['pos'] - popularity[feed_id]['ng'] = -1 * classifier['neg'] - popularity[feed_id]['story_ids'].append(story_hash) - - sorted_popularity = sorted(list(popularity.values()), key=lambda x: x['reach_score'], - reverse=True) - + if popularity[feed_id]["classifiers"]: + for classifier in popularity[feed_id]["classifiers"].get("feed", []): + if int(classifier["feed_id"]) == int(feed_id): + popularity[feed_id]["ps"] = classifier["pos"] + popularity[feed_id]["ng"] = -1 * classifier["neg"] + popularity[feed_id]["story_ids"].append(story_hash) + + sorted_popularity = sorted(list(popularity.values()), key=lambda x: x["reach_score"], reverse=True) + # Extract story authors from feeds for feed in sorted_popularity: - story_ids = feed['story_ids'] + story_ids = feed["story_ids"] stories_db = MStory.objects(story_hash__in=story_ids) stories = cls.format_stories(stories_db) for story in stories: - story['story_permalink'] = story['story_permalink'][:250] - if story['story_authors'] not in feed['authors']: - feed['authors'][story['story_authors']] = { - 'name': story['story_authors'], - 'count': 0, - 'ps': 0, - 'ng': 0, - 'tags': {}, - 'stories': [], + story["story_permalink"] = story["story_permalink"][:250] + if story["story_authors"] not in feed["authors"]: + feed["authors"][story["story_authors"]] = { + "name": story["story_authors"], + "count": 0, + "ps": 0, + "ng": 0, + "tags": {}, + "stories": [], } - author = feed['authors'][story['story_authors']] + author = feed["authors"][story["story_authors"]] seen = False - for seen_story in author['stories']: - if seen_story['url'] == story['story_permalink']: + for seen_story in author["stories"]: + if seen_story["url"] == story["story_permalink"]: seen = True break else: - author['stories'].append({ - 'title': story['story_title'], - 'url': story['story_permalink'], - 'date': story['story_date'], - }) - author['count'] += 1 - if seen: continue # Don't recount tags - - if feed['classifiers']: - for classifier in feed['classifiers'].get('author', []): - if classifier['author'] == author['name']: - author['ps'] = classifier['pos'] - author['ng'] = -1 * classifier['neg'] - - for tag in story['story_tags']: - if tag not in author['tags']: - author['tags'][tag] = {'name': tag, 'count': 0, 'ps': 0, 'ng': 0} - author['tags'][tag]['count'] += 1 - if feed['classifiers']: - for classifier in feed['classifiers'].get('tag', []): - if classifier['tag'] == tag: - author['tags'][tag]['ps'] = classifier['pos'] - author['tags'][tag]['ng'] = -1 * classifier['neg'] - - sorted_authors = sorted(list(feed['authors'].values()), key=lambda x: x['count']) - feed['authors'] = sorted_authors - + author["stories"].append( + { + "title": story["story_title"], + "url": story["story_permalink"], + "date": story["story_date"], + } + ) + author["count"] += 1 + if seen: + continue # Don't recount tags + + if feed["classifiers"]: + for classifier in feed["classifiers"].get("author", []): + if classifier["author"] == author["name"]: + author["ps"] = classifier["pos"] + author["ng"] = -1 * classifier["neg"] + + for tag in story["story_tags"]: + if tag not in author["tags"]: + author["tags"][tag] = {"name": tag, "count": 0, "ps": 0, "ng": 0} + author["tags"][tag]["count"] += 1 + if feed["classifiers"]: + for classifier in feed["classifiers"].get("tag", []): + if classifier["tag"] == tag: + author["tags"][tag]["ps"] = classifier["pos"] + author["tags"][tag]["ng"] = -1 * classifier["neg"] + + sorted_authors = sorted(list(feed["authors"].values()), key=lambda x: x["count"]) + feed["authors"] = sorted_authors + # pprint(sorted_popularity) return sorted_popularity - + def well_read_score(self): """Average percentage of stories read vs published across recently active subscribers""" from apps.reader.models import UserSubscription from apps.social.models import MSharedStory - + r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + shared_stories = MSharedStory.objects(story_feed_id=self.pk).count() - - subscribing_users = UserSubscription.objects.filter(feed_id=self.pk).values('user_id') - subscribing_user_ids = [sub['user_id'] for sub in subscribing_users] - + + subscribing_users = UserSubscription.objects.filter(feed_id=self.pk).values("user_id") + subscribing_user_ids = [sub["user_id"] for sub in subscribing_users] + for user_id in subscribing_user_ids: user_rs = "RS:%s:%s" % (user_id, self.pk) p.scard(user_rs) - + counts = p.execute() counts = [c for c in counts if c > 0] reader_count = len(counts) - - now = datetime.datetime.now().strftime('%s') - unread_cutoff = self.unread_cutoff.strftime('%s') + + now = datetime.datetime.now().strftime("%s") + unread_cutoff = self.unread_cutoff.strftime("%s") story_count = len(r.zrangebyscore("zF:%s" % self.pk, max=now, min=unread_cutoff)) if reader_count and story_count: average_pct = (sum(counts) / float(reader_count)) / float(story_count) else: average_pct = 0 - + reach_score = average_pct * reader_count * story_count - - return {'read_pct': average_pct, 'reader_count': reader_count, - 'reach_score': reach_score, 'story_count': story_count, - 'share_count': shared_stories} - + + return { + "read_pct": average_pct, + "reader_count": reader_count, + "reach_score": reach_score, + "story_count": story_count, + "share_count": shared_stories, + } + @classmethod def xls_query_popularity(cls, queries, limit): import xlsxwriter from xlsxwriter.utility import xl_rowcol_to_cell if isinstance(queries, str): - queries = [q.strip() for q in queries.split(',')] - - title = 'NewsBlur-%s.xlsx' % slugify('-'.join(queries)) + queries = [q.strip() for q in queries.split(",")] + + title = "NewsBlur-%s.xlsx" % slugify("-".join(queries)) workbook = xlsxwriter.Workbook(title) - bold = workbook.add_format({'bold': 1}) - date_format = workbook.add_format({'num_format': 'mmm d yyyy'}) - unread_format = workbook.add_format({'font_color': '#E0E0E0'}) - + bold = workbook.add_format({"bold": 1}) + date_format = workbook.add_format({"num_format": "mmm d yyyy"}) + unread_format = workbook.add_format({"font_color": "#E0E0E0"}) + for query in queries: worksheet = workbook.add_worksheet(query) row = 1 col = 0 - worksheet.write(0, col, 'Publisher', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, 'Feed URL', bold) - worksheet.set_column(col, col, 20); col += 1 - worksheet.write(0, col, 'Reach score', bold) - worksheet.write_comment(0, col, 'Feeds are sorted based on this score. It\'s simply the # of readers * # of stories in the past 30 days * the percentage of stories that are actually read.') - worksheet.set_column(col, col, 9); col += 1 - worksheet.write(0, col, '# subs', bold) - worksheet.write_comment(0, col, 'Total number of subscribers on NewsBlur, not necessarily active') - worksheet.set_column(col, col, 5); col += 1 - worksheet.write(0, col, '# readers', bold) - worksheet.write_comment(0, col, 'Total number of active subscribers who have read a story from the feed in the past 30 days.') - worksheet.set_column(col, col, 8); col += 1 + worksheet.write(0, col, "Publisher", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "Feed URL", bold) + worksheet.set_column(col, col, 20) + col += 1 + worksheet.write(0, col, "Reach score", bold) + worksheet.write_comment( + 0, + col, + "Feeds are sorted based on this score. It's simply the # of readers * # of stories in the past 30 days * the percentage of stories that are actually read.", + ) + worksheet.set_column(col, col, 9) + col += 1 + worksheet.write(0, col, "# subs", bold) + worksheet.write_comment(0, col, "Total number of subscribers on NewsBlur, not necessarily active") + worksheet.set_column(col, col, 5) + col += 1 + worksheet.write(0, col, "# readers", bold) + worksheet.write_comment( + 0, + col, + "Total number of active subscribers who have read a story from the feed in the past 30 days.", + ) + worksheet.set_column(col, col, 8) + col += 1 worksheet.write(0, col, "read pct", bold) - worksheet.write_comment(0, col, "Of the active subscribers reading this feed in the past 30 days, this is the percentage of stories the average subscriber reads. Values over 100 pct signify that the feed has many shared stories, which throws off the number slightly but not significantly.") - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# stories 30d', bold) - worksheet.write_comment(0, col, "It's important to ignore feeds that haven't published anything in the last 30 days, which is why this is part of the Reach Score.") - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, '# shared', bold) - worksheet.write_comment(0, col, 'Number of stories from this feed that were shared on NewsBlur. This is a strong signal of interest although it is not included in the Reach Score.') - worksheet.set_column(col, col, 7); col += 1 - worksheet.write(0, col, '# feed pos', bold) - worksheet.write_comment(0, col, 'Number of times this feed was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# feed neg', bold) - worksheet.write_comment(0, col, 'Number of times this feed was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, 'Author', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, '# author pos', bold) - worksheet.write_comment(0, col, 'Number of times this author was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, '# author neg', bold) - worksheet.write_comment(0, col, 'Number of times this author was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, 'Story title', bold) - worksheet.set_column(col, col, 30); col += 1 - worksheet.write(0, col, 'Story URL', bold) - worksheet.set_column(col, col, 20); col += 1 - worksheet.write(0, col, 'Story date', bold) - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, 'Tag', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, 'Tag count', bold) - worksheet.write_comment(0, col, 'Number of times this tag is used in other stories that also contain the search query.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# tag pos', bold) - worksheet.write_comment(0, col, 'Number of times this tag was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 7); col += 1 - worksheet.write(0, col, '# tag neg', bold) - worksheet.write_comment(0, col, 'Number of times this tag was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 7); col += 1 + worksheet.write_comment( + 0, + col, + "Of the active subscribers reading this feed in the past 30 days, this is the percentage of stories the average subscriber reads. Values over 100 pct signify that the feed has many shared stories, which throws off the number slightly but not significantly.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# stories 30d", bold) + worksheet.write_comment( + 0, + col, + "It's important to ignore feeds that haven't published anything in the last 30 days, which is why this is part of the Reach Score.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "# shared", bold) + worksheet.write_comment( + 0, + col, + "Number of stories from this feed that were shared on NewsBlur. This is a strong signal of interest although it is not included in the Reach Score.", + ) + worksheet.set_column(col, col, 7) + col += 1 + worksheet.write(0, col, "# feed pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this feed was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# feed neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this feed was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "Author", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "# author pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this author was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "# author neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this author was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "Story title", bold) + worksheet.set_column(col, col, 30) + col += 1 + worksheet.write(0, col, "Story URL", bold) + worksheet.set_column(col, col, 20) + col += 1 + worksheet.write(0, col, "Story date", bold) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "Tag", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "Tag count", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag is used in other stories that also contain the search query.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# tag pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 7) + col += 1 + worksheet.write(0, col, "# tag neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 7) + col += 1 popularity = cls.query_popularity(query, limit=limit) - + for feed in popularity: col = 0 - worksheet.write(row, col, feed['feed_title']); col += 1 - worksheet.write_url(row, col, feed.get('feed_url') or ""); col += 1 - worksheet.conditional_format(row, col, row, col+8, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col, "=%s*%s*%s" % ( - xl_rowcol_to_cell(row, col+2), - xl_rowcol_to_cell(row, col+3), - xl_rowcol_to_cell(row, col+4), - )); col += 1 - worksheet.write(row, col, feed['num_subscribers']); col += 1 - worksheet.write(row, col, feed['reader_count']); col += 1 - worksheet.write(row, col, feed['read_pct']); col += 1 - worksheet.write(row, col, feed['story_count']); col += 1 - worksheet.write(row, col, feed['share_count']); col += 1 - worksheet.write(row, col, feed['ps']); col += 1 - worksheet.write(row, col, feed['ng']); col += 1 - for author in feed['authors']: + worksheet.write(row, col, feed["feed_title"]) + col += 1 + worksheet.write_url(row, col, feed.get("feed_url") or "") + col += 1 + worksheet.conditional_format( + row, + col, + row, + col + 8, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write( + row, + col, + "=%s*%s*%s" + % ( + xl_rowcol_to_cell(row, col + 2), + xl_rowcol_to_cell(row, col + 3), + xl_rowcol_to_cell(row, col + 4), + ), + ) + col += 1 + worksheet.write(row, col, feed["num_subscribers"]) + col += 1 + worksheet.write(row, col, feed["reader_count"]) + col += 1 + worksheet.write(row, col, feed["read_pct"]) + col += 1 + worksheet.write(row, col, feed["story_count"]) + col += 1 + worksheet.write(row, col, feed["share_count"]) + col += 1 + worksheet.write(row, col, feed["ps"]) + col += 1 + worksheet.write(row, col, feed["ng"]) + col += 1 + for author in feed["authors"]: row += 1 - worksheet.conditional_format(row, col, row, col+2, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col, author['name']) - worksheet.write(row, col+1, author['ps']) - worksheet.write(row, col+2, author['ng']) - for story in author['stories']: - worksheet.write(row, col+3, story['title']) - worksheet.write_url(row, col+4, story['url']) - worksheet.write_datetime(row, col+5, story['date'], date_format) + worksheet.conditional_format( + row, + col, + row, + col + 2, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write(row, col, author["name"]) + worksheet.write(row, col + 1, author["ps"]) + worksheet.write(row, col + 2, author["ng"]) + for story in author["stories"]: + worksheet.write(row, col + 3, story["title"]) + worksheet.write_url(row, col + 4, story["url"]) + worksheet.write_datetime(row, col + 5, story["date"], date_format) row += 1 - for tag in list(author['tags'].values()): - worksheet.conditional_format(row, col+7, row, col+9, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col+6, tag['name']) - worksheet.write(row, col+7, tag['count']) - worksheet.write(row, col+8, tag['ps']) - worksheet.write(row, col+9, tag['ng']) + for tag in list(author["tags"].values()): + worksheet.conditional_format( + row, + col + 7, + row, + col + 9, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write(row, col + 6, tag["name"]) + worksheet.write(row, col + 7, tag["count"]) + worksheet.write(row, col + 8, tag["ps"]) + worksheet.write(row, col + 9, tag["ng"]) row += 1 workbook.close() return title - + def find_stories(self, query, order="newest", offset=0, limit=25): - story_ids = SearchStory.query(feed_ids=[self.pk], query=query, order=order, - offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + story_ids = SearchStory.query( + feed_ids=[self.pk], query=query, order=order, offset=offset, limit=limit + ) + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = self.format_stories(stories_db, self.pk) - + return stories - + @classmethod def format_stories(cls, stories_db, feed_id=None, include_permalinks=False): stories = [] @@ -2009,33 +2228,34 @@ class Feed(models.Model): for story_db in stories_db: story = cls.format_story(story_db, feed_id, include_permalinks=include_permalinks) stories.append(story) - + return stories - + @classmethod - def format_story(cls, story_db, feed_id=None, text=False, include_permalinks=False, - show_changes=False): + def format_story(cls, story_db, feed_id=None, text=False, include_permalinks=False, show_changes=False): if isinstance(story_db.story_content_z, str): story_db.story_content_z = base64.b64decode(story_db.story_content_z) - - story_content = '' + + story_content = "" latest_story_content = None has_changes = False - if (not show_changes and - hasattr(story_db, 'story_latest_content_z') and - story_db.story_latest_content_z): + if ( + not show_changes + and hasattr(story_db, "story_latest_content_z") + and story_db.story_latest_content_z + ): try: latest_story_content = smart_str(zlib.decompress(story_db.story_latest_content_z)) except DjangoUnicodeDecodeError: latest_story_content = zlib.decompress(story_db.story_latest_content_z) if story_db.story_content_z: story_content = smart_str(zlib.decompress(story_db.story_content_z)) - - if ' 80: - story_title = story_title[:80] + '...' - - story = {} - story['story_hash'] = getattr(story_db, 'story_hash', None) - story['story_tags'] = story_db.story_tags or [] - story['story_date'] = story_db.story_date.replace(tzinfo=None) - story['story_timestamp'] = story_db.story_date.strftime('%s') - story['story_authors'] = story_db.story_author_name or "" - story['story_title'] = story_title + story_title = story_title[:80] + "..." + + story = {} + story["story_hash"] = getattr(story_db, "story_hash", None) + story["story_tags"] = story_db.story_tags or [] + story["story_date"] = story_db.story_date.replace(tzinfo=None) + story["story_timestamp"] = story_db.story_date.strftime("%s") + story["story_authors"] = story_db.story_author_name or "" + story["story_title"] = story_title if blank_story_title: - story['story_title_blank'] = True - story['story_content'] = story_content - story['story_permalink'] = story_db.story_permalink - story['image_urls'] = story_db.image_urls - story['secure_image_urls']= cls.secure_image_urls(story_db.image_urls) - story['secure_image_thumbnails']= cls.secure_image_thumbnails(story_db.image_urls) - story['story_feed_id'] = feed_id or story_db.story_feed_id - story['has_modifications']= has_changes - story['comment_count'] = story_db.comment_count if hasattr(story_db, 'comment_count') else 0 - story['comment_user_ids'] = story_db.comment_user_ids if hasattr(story_db, 'comment_user_ids') else [] - story['share_count'] = story_db.share_count if hasattr(story_db, 'share_count') else 0 - story['share_user_ids'] = story_db.share_user_ids if hasattr(story_db, 'share_user_ids') else [] - story['guid_hash'] = story_db.guid_hash if hasattr(story_db, 'guid_hash') else None - if hasattr(story_db, 'source_user_id'): - story['source_user_id'] = story_db.source_user_id - story['id'] = story_db.story_guid or story_db.story_date - if hasattr(story_db, 'starred_date'): - story['starred_date'] = story_db.starred_date - if hasattr(story_db, 'user_tags'): - story['user_tags'] = story_db.user_tags - if hasattr(story_db, 'user_notes'): - story['user_notes'] = story_db.user_notes - if hasattr(story_db, 'highlights'): - story['highlights'] = story_db.highlights - if hasattr(story_db, 'shared_date'): - story['shared_date'] = story_db.shared_date - if hasattr(story_db, 'comments'): - story['comments'] = story_db.comments - if hasattr(story_db, 'user_id'): - story['user_id'] = story_db.user_id - if include_permalinks and hasattr(story_db, 'blurblog_permalink'): - story['blurblog_permalink'] = story_db.blurblog_permalink() + story["story_title_blank"] = True + story["story_content"] = story_content + story["story_permalink"] = story_db.story_permalink + story["image_urls"] = story_db.image_urls + story["secure_image_urls"] = cls.secure_image_urls(story_db.image_urls) + story["secure_image_thumbnails"] = cls.secure_image_thumbnails(story_db.image_urls) + story["story_feed_id"] = feed_id or story_db.story_feed_id + story["has_modifications"] = has_changes + story["comment_count"] = story_db.comment_count if hasattr(story_db, "comment_count") else 0 + story["comment_user_ids"] = story_db.comment_user_ids if hasattr(story_db, "comment_user_ids") else [] + story["share_count"] = story_db.share_count if hasattr(story_db, "share_count") else 0 + story["share_user_ids"] = story_db.share_user_ids if hasattr(story_db, "share_user_ids") else [] + story["guid_hash"] = story_db.guid_hash if hasattr(story_db, "guid_hash") else None + if hasattr(story_db, "source_user_id"): + story["source_user_id"] = story_db.source_user_id + story["id"] = story_db.story_guid or story_db.story_date + if hasattr(story_db, "starred_date"): + story["starred_date"] = story_db.starred_date + if hasattr(story_db, "user_tags"): + story["user_tags"] = story_db.user_tags + if hasattr(story_db, "user_notes"): + story["user_notes"] = story_db.user_notes + if hasattr(story_db, "highlights"): + story["highlights"] = story_db.highlights + if hasattr(story_db, "shared_date"): + story["shared_date"] = story_db.shared_date + if hasattr(story_db, "comments"): + story["comments"] = story_db.comments + if hasattr(story_db, "user_id"): + story["user_id"] = story_db.user_id + if include_permalinks and hasattr(story_db, "blurblog_permalink"): + story["blurblog_permalink"] = story_db.blurblog_permalink() if text: - soup = BeautifulSoup(story['story_content'], features="lxml") - text = ''.join(soup.findAll(text=True)) - text = re.sub(r'\n+', '\n\n', text) - text = re.sub(r'\t+', '\t', text) - story['text'] = text - + soup = BeautifulSoup(story["story_content"], features="lxml") + text = "".join(soup.findAll(text=True)) + text = re.sub(r"\n+", "\n\n", text) + text = re.sub(r"\t+", "\t", text) + story["text"] = text + return story - + @classmethod def secure_image_urls(cls, urls): - signed_urls = [create_imageproxy_signed_url(settings.IMAGES_URL, - settings.IMAGES_SECRET_KEY, - url) for url in urls] + signed_urls = [ + create_imageproxy_signed_url(settings.IMAGES_URL, settings.IMAGES_SECRET_KEY, url) for url in urls + ] return dict(zip(urls, signed_urls)) - + @classmethod def secure_image_thumbnails(cls, urls, size=192): - signed_urls = [create_imageproxy_signed_url(settings.IMAGES_URL, - settings.IMAGES_SECRET_KEY, - url, - size) for url in urls] + signed_urls = [ + create_imageproxy_signed_url(settings.IMAGES_URL, settings.IMAGES_SECRET_KEY, url, size) + for url in urls + ] return dict(zip(urls, signed_urls)) - + def get_tags(self, entry): fcat = [] - if 'tags' in entry: + if "tags" in entry: for tcat in entry.tags: term = None - if hasattr(tcat, 'label') and tcat.label: + if hasattr(tcat, "label") and tcat.label: term = tcat.label - elif hasattr(tcat, 'term') and tcat.term: + elif hasattr(tcat, "term") and tcat.term: term = tcat.term if not term or "CDATA" in term: continue qcat = term.strip() - if ',' in qcat or '/' in qcat: - qcat = qcat.replace(',', '/').split('/') + if "," in qcat or "/" in qcat: + qcat = qcat.replace(",", "/").split("/") else: qcat = [qcat] for zcat in qcat: tagname = zcat.lower() - while ' ' in tagname: - tagname = tagname.replace(' ', ' ') + while " " in tagname: + tagname = tagname.replace(" ", " ") tagname = tagname.strip() - if not tagname or tagname == ' ': + if not tagname or tagname == " ": continue fcat.append(tagname) fcat = [strip_tags(t)[:250] for t in fcat[:12]] return fcat - + @classmethod def get_permalink(cls, entry): - link = entry.get('link') + link = entry.get("link") if not link: - links = entry.get('links') + links = entry.get("links") if links: - link = links[0].get('href') + link = links[0].get("href") if not link: - link = entry.get('id') + link = entry.get("id") return link - + def _exists_story(self, story, story_content, existing_stories, new_story_hashes, lightweight=False): story_in_system = None story_has_changed = False story_link = self.get_permalink(story) existing_stories_hashes = list(existing_stories.keys()) - story_pub_date = story.get('published') + story_pub_date = story.get("published") # story_published_now = story.get('published_now', False) # start_date = story_pub_date - datetime.timedelta(hours=8) # end_date = story_pub_date + datetime.timedelta(hours=8) @@ -2166,110 +2386,146 @@ class Feed(models.Model): if isinstance(existing_story.id, str): # Correcting a MongoDB bug existing_story.story_guid = existing_story.id - - if story.get('story_hash') == existing_story.story_hash: + + if story.get("story_hash") == existing_story.story_hash: story_in_system = existing_story - elif (story.get('story_hash') in existing_stories_hashes and - story.get('story_hash') != existing_story.story_hash): + elif ( + story.get("story_hash") in existing_stories_hashes + and story.get("story_hash") != existing_story.story_hash + ): # Story already exists but is not this one continue - elif (existing_story.story_hash in new_story_hashes and - story.get('story_hash') != existing_story.story_hash): - # Story coming up later + elif ( + existing_story.story_hash in new_story_hashes + and story.get("story_hash") != existing_story.story_hash + ): + # Story coming up later continue - if 'story_latest_content_z' in existing_story: + if "story_latest_content_z" in existing_story: existing_story_content = smart_str(zlib.decompress(existing_story.story_latest_content_z)) - elif 'story_latest_content' in existing_story: + elif "story_latest_content" in existing_story: existing_story_content = existing_story.story_latest_content - elif 'story_content_z' in existing_story: + elif "story_content_z" in existing_story: existing_story_content = smart_str(zlib.decompress(existing_story.story_content_z)) - elif 'story_content' in existing_story: + elif "story_content" in existing_story: existing_story_content = existing_story.story_content else: - existing_story_content = '' - - + existing_story_content = "" + # Title distance + content distance, checking if story changed - story_title_difference = abs(levenshtein_distance(story.get('title'), - existing_story.story_title)) - - title_ratio = difflib.SequenceMatcher(None, story.get('title', ""), - existing_story.story_title).ratio() - if title_ratio < .75: continue - + story_title_difference = abs(levenshtein_distance(story.get("title"), existing_story.story_title)) + + title_ratio = difflib.SequenceMatcher( + None, story.get("title", ""), existing_story.story_title + ).ratio() + if title_ratio < 0.75: + continue + story_timedelta = existing_story.story_date - story_pub_date # logging.debug('Story pub date: %s %s (%s, %s)' % (existing_story.story_date, story_pub_date, title_ratio, story_timedelta)) - if abs(story_timedelta.days) >= 2: continue - - seq = difflib.SequenceMatcher(None, story_content, existing_story_content) - - similiar_length_min = 1000 - if (existing_story.story_permalink == story_link and - existing_story.story_title == story.get('title')): - similiar_length_min = 20 - - # Skip content check if already failed due to a timeout. This way we catch titles - if lightweight: continue + if abs(story_timedelta.days) >= 2: + continue - if (seq + seq = difflib.SequenceMatcher(None, story_content, existing_story_content) + + similiar_length_min = 1000 + if existing_story.story_permalink == story_link and existing_story.story_title == story.get( + "title" + ): + similiar_length_min = 20 + + # Skip content check if already failed due to a timeout. This way we catch titles + if lightweight: + continue + + if ( + seq and story_content and len(story_content) > similiar_length_min and existing_story_content - and seq.real_quick_ratio() > .9 - and seq.quick_ratio() > .95): + and seq.real_quick_ratio() > 0.9 + and seq.quick_ratio() > 0.95 + ): content_ratio = seq.ratio() - if story_title_difference > 0 and content_ratio > .98: + if story_title_difference > 0 and content_ratio > 0.98: story_in_system = existing_story if story_title_difference > 0 or content_ratio < 1.0: if settings.DEBUG: - logging.debug(" ---> Title difference - %s/%s (%s): %s" % (story.get('title'), existing_story.story_title, story_title_difference, content_ratio)) + logging.debug( + " ---> Title difference - %s/%s (%s): %s" + % ( + story.get("title"), + existing_story.story_title, + story_title_difference, + content_ratio, + ) + ) story_has_changed = True break - + # More restrictive content distance, still no story match - if not story_in_system and content_ratio > .98: + if not story_in_system and content_ratio > 0.98: if settings.DEBUG: - logging.debug(" ---> Content difference - %s/%s (%s): %s" % (story.get('title'), existing_story.story_title, story_title_difference, content_ratio)) + logging.debug( + " ---> Content difference - %s/%s (%s): %s" + % ( + story.get("title"), + existing_story.story_title, + story_title_difference, + content_ratio, + ) + ) story_in_system = existing_story story_has_changed = True break - + if story_in_system and not story_has_changed: if story_content != existing_story_content: if settings.DEBUG: - logging.debug(" ---> Content difference - %s (%s)/%s (%s)" % (story.get('title'), len(story_content), existing_story.story_title, len(existing_story_content))) + logging.debug( + " ---> Content difference - %s (%s)/%s (%s)" + % ( + story.get("title"), + len(story_content), + existing_story.story_title, + len(existing_story_content), + ) + ) story_has_changed = True if story_link != existing_story.story_permalink: if settings.DEBUG: - logging.debug(" ---> Permalink difference - %s/%s" % (story_link, existing_story.story_permalink)) + logging.debug( + " ---> Permalink difference - %s/%s" + % (story_link, existing_story.story_permalink) + ) story_has_changed = True # if story_pub_date != existing_story.story_date: # story_has_changed = True break - - + # if story_has_changed or not story_in_system: - # print 'New/updated story: %s' % (story), + # print 'New/updated story: %s' % (story), return story_in_system, story_has_changed - + def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=False, pro_speed=False): if self.min_to_decay and not force and not premium_speed: return self.min_to_decay - + from apps.notifications.models import MUserFeedNotification - + if premium_speed: self.active_premium_subscribers += 1 if pro_speed: self.pro_subscribers += 1 - - spd = self.stories_last_month / 30.0 - subs = (self.active_premium_subscribers + - ((self.active_subscribers - self.active_premium_subscribers) / 10.0)) + + spd = self.stories_last_month / 30.0 + subs = self.active_premium_subscribers + ( + (self.active_subscribers - self.active_premium_subscribers) / 10.0 + ) notification_count = MUserFeedNotification.objects.filter(feed_id=self.pk).count() - # Calculate sub counts: + # Calculate sub counts: # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 10 AND stories_last_month >= 30; # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND active_premium_subscribers < 10 AND stories_last_month >= 30; # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers = 1 AND stories_last_month >= 30; @@ -2295,7 +2551,7 @@ class Feed(models.Model): if subs > 1: total = 60 - (spd * 60) else: - total = 60*6 - (spd * 60*6) + total = 60 * 6 - (spd * 60 * 6) elif spd == 0: if subs > 1: total = 60 * 6 @@ -2303,7 +2559,7 @@ class Feed(models.Model): total = 60 * 12 else: total = 60 * 24 - months_since_last_story = seconds_timesince(self.last_story_date) / (60*60*24*30) + months_since_last_story = seconds_timesince(self.last_story_date) / (60 * 60 * 24 * 30) total *= max(1, months_since_last_story) # updates_per_day_delay = 3 * 60 / max(.25, ((max(0, self.active_subscribers)**.2) # * (self.stories_last_month**0.25))) @@ -2324,27 +2580,27 @@ class Feed(models.Model): if self.is_push: fetch_history = MFetchHistory.feed(self.pk) - if len(fetch_history['push_history']): + if len(fetch_history["push_history"]): total = total * 12 - + # Any notifications means a 30 min minumum if notification_count > 0: total = min(total, 30) # 4 hour max for premiums, 48 hour max for free if subs >= 1: - total = min(total, 60*4*1) + total = min(total, 60 * 4 * 1) else: - total = min(total, 60*24*2) + total = min(total, 60 * 24 * 2) # Craigslist feeds get 6 hours minimum - if 'craigslist' in self.feed_address: - total = max(total, 60*6) + if "craigslist" in self.feed_address: + total = max(total, 60 * 6) # Twitter feeds get 2 hours minimum - if 'twitter' in self.feed_address: - total = max(total, 60*2) - + if "twitter" in self.feed_address: + total = max(total, 60 * 2) + # Pro subscribers get absolute minimum if self.pro_subscribers and self.pro_subscribers >= 1: if self.stories_last_month == 0: @@ -2353,72 +2609,80 @@ class Feed(models.Model): total = min(total, settings.PRO_MINUTES_BETWEEN_FETCHES) if verbose: - logging.debug(" ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s/%s/%s Stories/day: %s" % ( - self.log_title[:30], total, - self.num_subscribers, - self.active_subscribers, - self.active_premium_subscribers, - self.archive_subscribers, - self.pro_subscribers, - spd)) + logging.debug( + " ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s/%s/%s Stories/day: %s" + % ( + self.log_title[:30], + total, + self.num_subscribers, + self.active_subscribers, + self.active_premium_subscribers, + self.archive_subscribers, + self.pro_subscribers, + spd, + ) + ) return total - + def set_next_scheduled_update(self, verbose=False, skip_scheduling=False): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) total = self.get_next_scheduled_update(force=True, verbose=verbose) error_count = self.error_count - + if error_count: total = total * error_count - total = min(total, 60*24*7) + total = min(total, 60 * 24 * 7) if verbose: - logging.debug(' ---> [%-30s] ~FBScheduling feed fetch geometrically: ' - '~SB%s errors. Time: %s min' % ( - self.log_title[:30], self.errors_since_good, total)) - + logging.debug( + " ---> [%-30s] ~FBScheduling feed fetch geometrically: " + "~SB%s errors. Time: %s min" % (self.log_title[:30], self.errors_since_good, total) + ) + random_factor = random.randint(0, int(total)) / 4 - next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta( - minutes = total + random_factor) + next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(minutes=total + random_factor) original_min_to_decay = self.min_to_decay self.min_to_decay = total - + delta = self.next_scheduled_update - datetime.datetime.now() minutes_to_next_fetch = (delta.seconds + (delta.days * 24 * 3600)) / 60 if minutes_to_next_fetch > self.min_to_decay or not skip_scheduling: self.next_scheduled_update = next_scheduled_update if self.active_subscribers >= 1: - r.zadd('scheduled_updates', { self.pk: self.next_scheduled_update.strftime('%s') }) - r.zrem('tasked_feeds', self.pk) - r.srem('queued_feeds', self.pk) - - updated_fields = ['last_update', 'next_scheduled_update'] + r.zadd("scheduled_updates", {self.pk: self.next_scheduled_update.strftime("%s")}) + r.zrem("tasked_feeds", self.pk) + r.srem("queued_feeds", self.pk) + + updated_fields = ["last_update", "next_scheduled_update"] if self.min_to_decay != original_min_to_decay: - updated_fields.append('min_to_decay') + updated_fields.append("min_to_decay") self.save(update_fields=updated_fields) - + @property def error_count(self): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - fetch_errors = int(r.zscore('error_feeds', self.pk) or 0) - + fetch_errors = int(r.zscore("error_feeds", self.pk) or 0) + return fetch_errors + self.errors_since_good - + def schedule_feed_fetch_immediately(self, verbose=True): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) if not self.num_subscribers: - logging.debug(' ---> [%-30s] Not scheduling feed fetch immediately, no subs.' % (self.log_title[:30])) + logging.debug( + " ---> [%-30s] Not scheduling feed fetch immediately, no subs." % (self.log_title[:30]) + ) return self - + if verbose: - logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (self.log_title[:30])) - + logging.debug(" ---> [%-30s] Scheduling feed fetch immediately..." % (self.log_title[:30])) + self.next_scheduled_update = datetime.datetime.utcnow() - r.zadd('scheduled_updates', { self.pk: self.next_scheduled_update.strftime('%s') }) + r.zadd("scheduled_updates", {self.pk: self.next_scheduled_update.strftime("%s")}) return self.save() - + def setup_push(self): from apps.push.models import PushSubscription + try: push = self.push except PushSubscription.DoesNotExist: @@ -2426,35 +2690,38 @@ class Feed(models.Model): else: self.is_push = push.verified self.save() - + def queue_pushed_feed_xml(self, xml, latest_push_date_delta=None): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) queue_size = r.llen("push_feeds") - + if latest_push_date_delta: - latest_push_date_delta = "%s" % str(latest_push_date_delta).split('.', 2)[0] + latest_push_date_delta = "%s" % str(latest_push_date_delta).split(".", 2)[0] if queue_size > 1000: self.schedule_feed_fetch_immediately() else: - logging.debug(' ---> [%-30s] [%s] ~FB~SBQueuing pushed stories, last pushed %s...' % (self.log_title[:30], self.pk, latest_push_date_delta)) + logging.debug( + " ---> [%-30s] [%s] ~FB~SBQueuing pushed stories, last pushed %s..." + % (self.log_title[:30], self.pk, latest_push_date_delta) + ) self.set_next_scheduled_update() - PushFeeds.apply_async(args=(self.pk, xml), queue='push_feeds') - + PushFeeds.apply_async(args=(self.pk, xml), queue="push_feeds") + # def calculate_collocations_story_content(self, # collocation_measures=TrigramAssocMeasures, # collocation_finder=TrigramCollocationFinder): # stories = MStory.objects.filter(story_feed_id=self.pk) # story_content = ' '.join([s.story_content for s in stories if s.story_content]) # return self.calculate_collocations(story_content, collocation_measures, collocation_finder) - # + # # def calculate_collocations_story_title(self, # collocation_measures=BigramAssocMeasures, # collocation_finder=BigramCollocationFinder): # stories = MStory.objects.filter(story_feed_id=self.pk) # story_titles = ' '.join([s.story_title for s in stories if s.story_title]) # return self.calculate_collocations(story_titles, collocation_measures, collocation_finder) - # + # # def calculate_collocations(self, content, # collocation_measures=TrigramAssocMeasures, # collocation_finder=TrigramCollocationFinder): @@ -2467,35 +2734,37 @@ class Feed(models.Model): # print "ValueError, ignoring: %s" % e # content = re.sub(r']*>', '', content) # content = re.split(r"[^A-Za-z-'&]+", content) - # + # # finder = collocation_finder.from_words(content) # finder.apply_freq_filter(3) # best = finder.nbest(collocation_measures.pmi, 10) # phrases = [' '.join(phrase) for phrase in best] - # + # # return phrases # class FeedCollocations(models.Model): # feed = models.ForeignKey(Feed) # phrase = models.CharField(max_length=500) - + + class FeedData(models.Model): - feed = AutoOneToOneField(Feed, related_name='data', on_delete=models.CASCADE) + feed = AutoOneToOneField(Feed, related_name="data", on_delete=models.CASCADE) feed_tagline = models.CharField(max_length=1024, blank=True, null=True) story_count_history = models.TextField(blank=True, null=True) feed_classifier_counts = models.TextField(blank=True, null=True) popular_tags = models.CharField(max_length=1024, blank=True, null=True) popular_authors = models.CharField(max_length=2048, blank=True, null=True) - + def save(self, *args, **kwargs): if self.feed_tagline and len(self.feed_tagline) >= 1000: self.feed_tagline = self.feed_tagline[:1000] - - try: + + try: super(FeedData, self).save(*args, **kwargs) except (IntegrityError, OperationError): - if hasattr(self, 'id') and self.id: self.delete() + if hasattr(self, "id") and self.id: + self.delete() except DatabaseError as e: # Nothing updated logging.debug(" ---> ~FRNothing updated in FeedData (%s): %s" % (self.feed, e)) @@ -2503,49 +2772,49 @@ class FeedData(models.Model): class MFeedIcon(mongo.Document): - feed_id = mongo.IntField(primary_key=True) - color = mongo.StringField(max_length=6) - data = mongo.StringField() - icon_url = mongo.StringField() - not_found = mongo.BooleanField(default=False) - + feed_id = mongo.IntField(primary_key=True) + color = mongo.StringField(max_length=6) + data = mongo.StringField() + icon_url = mongo.StringField() + not_found = mongo.BooleanField(default=False) + meta = { - 'collection' : 'feed_icons', - 'allow_inheritance' : False, + "collection": "feed_icons", + "allow_inheritance": False, } - + @classmethod def get_feed(cls, feed_id, create=True): try: - feed_icon = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + feed_icon = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(feed_id=feed_id) except cls.DoesNotExist: if create: feed_icon = cls.objects.create(feed_id=feed_id) else: feed_icon = None - + return feed_icon - + def save(self, *args, **kwargs): if self.icon_url: self.icon_url = str(self.icon_url) - try: + try: return super(MFeedIcon, self).save(*args, **kwargs) except (IntegrityError, OperationError): # print "Error on Icon: %s" % e - if hasattr(self, '_id'): self.delete() + if hasattr(self, "_id"): + self.delete() class MFeedPage(mongo.Document): feed_id = mongo.IntField(primary_key=True) page_data = mongo.BinaryField() - + meta = { - 'collection': 'feed_pages', - 'allow_inheritance': False, + "collection": "feed_pages", + "allow_inheritance": False, } - + def page(self): try: return zlib.decompress(self.page_data) @@ -2553,8 +2822,8 @@ class MFeedPage(mongo.Document): logging.debug(" ***> Zlib decompress error: %s" % e) self.page_data = None self.save() - return - + return + @classmethod def get_data(cls, feed_id): data = None @@ -2568,8 +2837,8 @@ class MFeedPage(mongo.Document): logging.debug(" ***> Zlib decompress error: %s" % e) self.page_data = None self.save() - return - + return + if not data: dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if dupe_feed: @@ -2582,66 +2851,71 @@ class MFeedPage(mongo.Document): return data + class MStory(mongo.Document): - '''A feed item''' - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + """A feed item""" + + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - story_latest_content = mongo.StringField() - story_latest_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - original_page_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField() - story_hash = mongo.StringField() - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - comment_count = mongo.IntField() - comment_user_ids = mongo.ListField(mongo.IntField()) - share_count = mongo.IntField() - share_user_ids = mongo.ListField(mongo.IntField()) + story_latest_content = mongo.StringField() + story_latest_content_z = mongo.BinaryField() + original_text_z = mongo.BinaryField() + original_page_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField() + story_hash = mongo.StringField() + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + comment_count = mongo.IntField() + comment_user_ids = mongo.ListField(mongo.IntField()) + share_count = mongo.IntField() + share_user_ids = mongo.ListField(mongo.IntField()) meta = { - 'collection': 'stories', - 'indexes': [('story_feed_id', '-story_date'), - {'fields': ['story_hash'], - 'unique': True, - }], - 'ordering': ['-story_date'], - 'allow_inheritance': False, - 'cascade': False, - 'strict': False, + "collection": "stories", + "indexes": [ + ("story_feed_id", "-story_date"), + { + "fields": ["story_hash"], + "unique": True, + }, + ], + "ordering": ["-story_date"], + "allow_inheritance": False, + "cascade": False, + "strict": False, } - + RE_STORY_HASH = re.compile(r"^(\d{1,10}):(\w{6})$") RE_RS_KEY = re.compile(r"^RS:(\d+):(\d+)$") def __str__(self): content = self.story_content_z if self.story_content_z else "" return f"{self.story_hash}: {self.story_title[:20]} ({len(self.story_content_z) if self.story_content_z else 0} bytes)" - + @property def guid_hash(self): - return hashlib.sha1((self.story_guid).encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1((self.story_guid).encode(encoding="utf-8")).hexdigest()[:6] @classmethod def guid_hash_unsaved(self, guid): - return hashlib.sha1(guid.encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1(guid.encode(encoding="utf-8")).hexdigest()[:6] @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id, self.guid_hash) - + @classmethod def feed_guid_hash_unsaved(cls, feed_id, guid): return "%s:%s" % (feed_id, cls.guid_hash_unsaved(guid)) - + @property def decoded_story_title(self): return html.unescape(self.story_title) @@ -2653,17 +2927,16 @@ class MStory(mongo.Document): story_content = smart_str(zlib.decompress(self.story_content_z)) else: story_content = smart_str(story_content) - + return story_content - def save(self, *args, **kwargs): - story_title_max = MStory._fields['story_title'].max_length - story_content_type_max = MStory._fields['story_content_type'].max_length + story_title_max = MStory._fields["story_title"].max_length + story_content_type_max = MStory._fields["story_content_type"].max_length self.story_hash = self.feed_guid_hash - + self.extract_image_urls() - + if self.story_content: self.story_content_z = zlib.compress(smart_bytes(self.story_content)) self.story_content = None @@ -2677,48 +2950,52 @@ class MStory(mongo.Document): self.story_title = self.story_title[:story_title_max] if self.story_content_type and len(self.story_content_type) > story_content_type_max: self.story_content_type = self.story_content_type[:story_content_type_max] - + super(MStory, self).save(*args, **kwargs) - + self.sync_redis() - + return self - + def delete(self, *args, **kwargs): self.remove_from_redis() self.remove_from_search_index() - + super(MStory, self).delete(*args, **kwargs) - + def publish_to_subscribers(self): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish("%s:story" % (self.story_feed_id), '%s,%s' % (self.story_hash, self.story_date.strftime('%s'))) + r.publish( + "%s:story" % (self.story_feed_id), "%s,%s" % (self.story_hash, self.story_date.strftime("%s")) + ) except redis.ConnectionError: - logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (Feed.get_by_id(self.story_feed_id).title[:30],)) - + logging.debug( + " ***> [%-30s] ~BMRedis is unavailable for real-time." + % (Feed.get_by_id(self.story_feed_id).title[:30],) + ) + @classmethod def purge_feed_stories(cls, feed, cutoff, verbose=True): stories = cls.objects(story_feed_id=feed.pk) logging.debug(" ---> Deleting %s stories from %s" % (stories.count(), feed)) - if stories.count() > cutoff*1.25: + if stories.count() > cutoff * 1.25: logging.debug(" ***> ~FRToo many stories in %s, not purging..." % (feed)) return stories.delete() - + @classmethod def index_all_for_search(cls, offset=0): if not offset: SearchStory.create_elasticsearch_mapping(delete=True) - - last_pk = Feed.objects.latest('pk').pk + + last_pk = Feed.objects.latest("pk").pk for f in range(offset, last_pk, 1000): - print(" ---> %s / %s (%.2s%%)" % (f, last_pk, float(f)/last_pk*100)) - feeds = Feed.objects.filter(pk__in=list(range(f, f+1000)), - active=True, - active_subscribers__gte=1)\ - .values_list('pk') - for f, in feeds: + print(" ---> %s / %s (%.2s%%)" % (f, last_pk, float(f) / last_pk * 100)) + feeds = Feed.objects.filter( + pk__in=list(range(f, f + 1000)), active=True, active_subscribers__gte=1 + ).values_list("pk") + for (f,) in feeds: stories = cls.objects.filter(story_feed_id=f) if not len(stories): continue @@ -2730,14 +3007,16 @@ class MStory(mongo.Document): story_content = self.story_content or "" if self.story_content_z: story_content = zlib.decompress(self.story_content_z) - SearchStory.index(story_hash=self.story_hash, - story_title=self.story_title, - story_content=prep_for_search(story_content), - story_tags=self.story_tags, - story_author=self.story_author_name, - story_feed_id=self.story_feed_id, - story_date=self.story_date) - + SearchStory.index( + story_hash=self.story_hash, + story_title=self.story_title, + story_content=prep_for_search(story_content), + story_tags=self.story_tags, + story_author=self.story_author_name, + story_feed_id=self.story_feed_id, + story_date=self.story_date, + ) + def remove_from_search_index(self): try: SearchStory.remove(self.story_hash) @@ -2750,50 +3029,50 @@ class MStory(mongo.Document): cutoff = int(cutoff) if not feed_id and not feed: return extra_stories_count - + if not feed_id: feed_id = feed.pk if not feed: feed = feed_id - - stories = cls.objects( - story_feed_id=feed_id - ).only('story_date').order_by('-story_date') - + + stories = cls.objects(story_feed_id=feed_id).only("story_date").order_by("-story_date") + if stories.count() > cutoff: - logging.debug(' ---> [%-30s] ~FMFound %s stories. Trimming to ~SB%s~SN...' % - (str(feed)[:30], stories.count(), cutoff)) + logging.debug( + " ---> [%-30s] ~FMFound %s stories. Trimming to ~SB%s~SN..." + % (str(feed)[:30], stories.count(), cutoff) + ) try: story_trim_date = stories[cutoff].story_date if story_trim_date == stories[0].story_date: # Handle case where every story is the same time story_trim_date = story_trim_date - datetime.timedelta(seconds=1) except IndexError as e: - logging.debug(' ***> [%-30s] ~BRError trimming feed: %s' % (str(feed)[:30], e)) + logging.debug(" ***> [%-30s] ~BRError trimming feed: %s" % (str(feed)[:30], e)) return extra_stories_count - - extra_stories = cls.objects(story_feed_id=feed_id, - story_date__lte=story_trim_date) + + extra_stories = cls.objects(story_feed_id=feed_id, story_date__lte=story_trim_date) extra_stories_count = extra_stories.count() shared_story_count = 0 for story in extra_stories: - if story.share_count: + if story.share_count: shared_story_count += 1 extra_stories_count -= 1 continue story.delete() if verbose: existing_story_count = cls.objects(story_feed_id=feed_id).count() - logging.debug(" ---> Deleted %s stories, %s (%s shared) left." % ( - extra_stories_count, - existing_story_count, - shared_story_count)) + logging.debug( + " ---> Deleted %s stories, %s (%s shared) left." + % (extra_stories_count, existing_story_count, shared_story_count) + ) return extra_stories_count - + @classmethod def find_story(cls, story_feed_id=None, story_id=None, story_hash=None, original_only=False): from apps.social.models import MSharedStory + original_found = False if story_hash: story_id = story_hash @@ -2804,61 +3083,73 @@ class MStory(mongo.Document): story = cls.objects(id=story_id).limit(1).first() else: story = cls.objects(story_hash=story_hash).limit(1).first() - + if story: original_found = True if not story and not original_only: - story = MSharedStory.objects.filter(story_feed_id=story_feed_id, - story_hash=story_hash).limit(1).first() + story = ( + MSharedStory.objects.filter(story_feed_id=story_feed_id, story_hash=story_hash) + .limit(1) + .first() + ) if not story and not original_only: - story = MStarredStory.objects.filter(story_feed_id=story_feed_id, - story_hash=story_hash).limit(1).first() - + story = ( + MStarredStory.objects.filter(story_feed_id=story_feed_id, story_hash=story_hash) + .limit(1) + .first() + ) + return story, original_found - + @classmethod def find_by_id(cls, story_ids): from apps.social.models import MSharedStory + count = len(story_ids) multiple = isinstance(story_ids, list) or isinstance(story_ids, tuple) - + stories = list(cls.objects(id__in=story_ids)) if len(stories) < count: shared_stories = list(MSharedStory.objects(id__in=story_ids)) stories.extend(shared_stories) - + if not multiple: stories = stories[0] - + return stories - + @classmethod def find_by_story_hashes(cls, story_hashes): from apps.social.models import MSharedStory + count = len(story_hashes) multiple = isinstance(story_hashes, list) or isinstance(story_hashes, tuple) - + stories = list(cls.objects(story_hash__in=story_hashes)) if len(stories) < count: hashes_found = [s.story_hash for s in stories] remaining_hashes = list(set(story_hashes) - set(hashes_found)) - story_feed_ids = [h.split(':')[0] for h in remaining_hashes] - shared_stories = list(MSharedStory.objects(story_feed_id__in=story_feed_ids, - story_hash__in=remaining_hashes)) + story_feed_ids = [h.split(":")[0] for h in remaining_hashes] + shared_stories = list( + MSharedStory.objects(story_feed_id__in=story_feed_ids, story_hash__in=remaining_hashes) + ) stories.extend(shared_stories) - + if not multiple: stories = stories[0] - + return stories - + @classmethod def ensure_story_hash(cls, story_id, story_feed_id): if not cls.RE_STORY_HASH.match(story_id): - story_id = "%s:%s" % (story_feed_id, hashlib.sha1(story_id.encode(encoding='utf-8')).hexdigest()[:6]) - + story_id = "%s:%s" % ( + story_feed_id, + hashlib.sha1(story_id.encode(encoding="utf-8")).hexdigest()[:6], + ) + return story_id - + @classmethod def split_story_hash(cls, story_hash): matches = cls.RE_STORY_HASH.match(story_hash) @@ -2866,7 +3157,7 @@ class MStory(mongo.Document): groups = matches.groups() return groups[0], groups[1] return None, None - + @classmethod def split_rs_key(cls, rs_key): matches = cls.RE_RS_KEY.match(rs_key) @@ -2874,36 +3165,37 @@ class MStory(mongo.Document): groups = matches.groups() return groups[0], groups[1] return None, None - + @classmethod def story_hashes(cls, story_ids): story_hashes = [] for story_id in story_ids: story_hash = cls.ensure_story_hash(story_id) - if not story_hash: continue + if not story_hash: + continue story_hashes.append(story_hash) - + return story_hashes - + def sync_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) feed = Feed.get_by_id(self.story_feed_id) if self.id and self.story_date > feed.unread_cutoff: - feed_key = 'F:%s' % self.story_feed_id + feed_key = "F:%s" % self.story_feed_id r.sadd(feed_key, self.story_hash) - r.expire(feed_key, feed.days_of_story_hashes*24*60*60) - - r.zadd('z' + feed_key, { self.story_hash: time.mktime(self.story_date.timetuple()) }) - r.expire('z' + feed_key, feed.days_of_story_hashes*24*60*60) - + r.expire(feed_key, feed.days_of_story_hashes * 24 * 60 * 60) + + r.zadd("z" + feed_key, {self.story_hash: time.mktime(self.story_date.timetuple())}) + r.expire("z" + feed_key, feed.days_of_story_hashes * 24 * 60 * 60) + def remove_from_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) if self.id: - r.srem('F:%s' % self.story_feed_id, self.story_hash) - r.zrem('zF:%s' % self.story_feed_id, self.story_hash) + r.srem("F:%s" % self.story_feed_id, self.story_hash) + r.zrem("zF:%s" % self.story_feed_id, self.story_hash) @classmethod def sync_feed_redis(cls, story_feed_id): @@ -2916,30 +3208,34 @@ class MStory(mongo.Document): # r.delete('F:%s' % story_feed_id) # r.delete('zF:%s' % story_feed_id) - logging.info(" ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" % (feed and feed.log_title[:30] or story_feed_id, stories.count())) + logging.info( + " ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" + % (feed and feed.log_title[:30] or story_feed_id, stories.count()) + ) p = r.pipeline() for story in stories: story.sync_redis(r=p) p.execute() - + def count_comments(self): from apps.social.models import MSharedStory + params = { - 'story_guid': self.story_guid, - 'story_feed_id': self.story_feed_id, + "story_guid": self.story_guid, + "story_feed_id": self.story_feed_id, } - comments = MSharedStory.objects.filter(has_comments=True, **params).only('user_id') - shares = MSharedStory.objects.filter(**params).only('user_id') + comments = MSharedStory.objects.filter(has_comments=True, **params).only("user_id") + shares = MSharedStory.objects.filter(**params).only("user_id") self.comment_count = comments.count() - self.comment_user_ids = [c['user_id'] for c in comments] + self.comment_user_ids = [c["user_id"] for c in comments] self.share_count = shares.count() - self.share_user_ids = [s['user_id'] for s in shares] + self.share_user_ids = [s["user_id"] for s in shares] self.save() - + def extract_image_urls(self, force=False, text=False): if self.image_urls and not force and not text: return self.image_urls - + story_content = None if not text: story_content = self.story_content_str @@ -2948,7 +3244,7 @@ class MStory(mongo.Document): story_content = smart_str(zlib.decompress(self.original_text_z)) if not story_content: return - + try: soup = BeautifulSoup(story_content, features="lxml") except UserWarning as e: @@ -2960,27 +3256,29 @@ class MStory(mongo.Document): else: return - images = soup.findAll('img') - - # Add youtube thumbnail and insert appropriately before/after images. + images = soup.findAll("img") + + # Add youtube thumbnail and insert appropriately before/after images. # Give the Youtube a bit of an edge. - video_thumbnails = soup.findAll('iframe', src=lambda x: x and any(y in x for y in ['youtube.com', 'ytimg.com'])) + video_thumbnails = soup.findAll( + "iframe", src=lambda x: x and any(y in x for y in ["youtube.com", "ytimg.com"]) + ) for video_thumbnail in video_thumbnails: - video_src = video_thumbnail.get('src') - video_id = re.search('.*?youtube.com/embed/([A-Za-z0-9\-_]+)', video_src) + video_src = video_thumbnail.get("src") + video_id = re.search(".*?youtube.com/embed/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?youtube.com/v/([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?youtube.com/v/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?ytimg.com/vi/([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?ytimg.com/vi/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?youtube.com/watch\?v=([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?youtube.com/watch\?v=([A-Za-z0-9\-_]+)", video_src) if not video_id: logging.debug(f" ***> Couldn't find youtube url in {video_thumbnail}: {video_src}") continue video_img_url = f"https://img.youtube.com/vi/{video_id.groups()[0]}/0.jpg" - iframe_index = story_content.index('= 1024: continue - if 'feedburner.com' in image_url: + if "feedburner.com" in image_url: continue image_url = urllib.parse.urljoin(self.story_permalink, image_url) image_urls.append(image_url) - + if not image_urls: if not text: return self.extract_image_urls(force=force, text=True) else: return - + if text: urls = [] for url in image_urls: - if 'http://' in url[1:] or 'https://' in url[1:]: + if "http://" in url[1:] or "https://" in url[1:]: continue urls.append(url) image_urls = urls - + ordered_image_urls = [] for image_url in list(set(image_urls)): - if 'feedburner' in image_url: + if "feedburner" in image_url: ordered_image_urls.append(image_url) else: ordered_image_urls.insert(0, image_url) image_urls = ordered_image_urls - + if len(image_urls): self.image_urls = [u for u in image_urls if u] else: return - + max_length = MStory.image_urls.field.max_length - while len(''.join(self.image_urls)) > max_length: + while len("".join(self.image_urls)) > max_length: if len(self.image_urls) <= 1: - self.image_urls[0] = self.image_urls[0][:max_length-1] + self.image_urls[0] = self.image_urls[0][: max_length - 1] break else: self.image_urls.pop() @@ -3051,23 +3349,24 @@ class MStory(mongo.Document): def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z - + if not original_text_z or force: feed = Feed.get_by_id(self.story_feed_id) self.extract_image_urls(force=force, text=False) ti = TextImporter(self, feed=feed, request=request, debug=debug) original_doc = ti.fetch(return_document=True) - original_text = original_doc.get('content') if original_doc else None + original_text = original_doc.get("content") if original_doc else None self.extract_image_urls(force=force, text=True) self.save() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text def fetch_original_page(self, force=False, request=None, debug=False): from apps.rss_feeds.page_importer import PageImporter + if not self.original_page_z or force: feed = Feed.get_by_id(self.story_feed_id) importer = PageImporter(request=request, feed=feed, story=self) @@ -3075,42 +3374,47 @@ class MStory(mongo.Document): else: logging.user(request, "~FYFetching ~FGoriginal~FY story page, ~SBfound.") original_page = zlib.decompress(self.original_page_z) - + return original_page class MStarredStory(mongo.DynamicDocument): """Like MStory, but not inherited due to large overhead of _cls and _type in - mongoengine's inheritance model on every single row.""" - user_id = mongo.IntField(unique_with=('story_guid',)) - starred_date = mongo.DateTimeField() - starred_updated = mongo.DateTimeField() - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + mongoengine's inheritance model on every single row.""" + + user_id = mongo.IntField(unique_with=("story_guid",)) + starred_date = mongo.DateTimeField() + starred_updated = mongo.DateTimeField() + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField() - story_hash = mongo.StringField() - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - user_notes = mongo.StringField() - user_tags = mongo.ListField(mongo.StringField(max_length=128)) - highlights = mongo.ListField(mongo.StringField(max_length=16384)) - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + original_text_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField() + story_hash = mongo.StringField() + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + user_notes = mongo.StringField() + user_tags = mongo.ListField(mongo.StringField(max_length=128)) + highlights = mongo.ListField(mongo.StringField(max_length=16384)) + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) meta = { - 'collection': 'starred_stories', - 'indexes': [('user_id', '-starred_date'), ('user_id', 'story_feed_id'), - ('user_id', 'story_hash'), 'story_feed_id'], - 'ordering': ['-starred_date'], - 'allow_inheritance': False, - 'strict': False, + "collection": "starred_stories", + "indexes": [ + ("user_id", "-starred_date"), + ("user_id", "story_feed_id"), + ("user_id", "story_hash"), + "story_feed_id", + ], + "ordering": ["-starred_date"], + "allow_inheritance": False, + "strict": False, } def __unicode__(self): @@ -3118,11 +3422,9 @@ class MStarredStory(mongo.DynamicDocument): user = User.objects.get(pk=self.user_id) username = user.username except User.DoesNotExist: - username = '[deleted]' - return "%s: %s (%s)" % (username, - self.story_title[:20], - self.story_feed_id) - + username = "[deleted]" + return "%s: %s (%s)" % (username, self.story_title[:20], self.story_feed_id) + def save(self, *args, **kwargs): if self.story_content: self.story_content_z = zlib.compress(smart_bytes(self.story_content)) @@ -3134,100 +3436,106 @@ class MStarredStory(mongo.DynamicDocument): self.starred_updated = datetime.datetime.now() return super(MStarredStory, self).save(*args, **kwargs) - + @classmethod def find_stories(cls, query, user_id, tag=None, offset=0, limit=25, order="newest"): stories_db = cls.objects( - Q(user_id=user_id) & - (Q(story_title__icontains=query) | - Q(story_author_name__icontains=query) | - Q(story_tags__icontains=query)) + Q(user_id=user_id) + & ( + Q(story_title__icontains=query) + | Q(story_author_name__icontains=query) + | Q(story_tags__icontains=query) + ) ) if tag: stories_db = stories_db.filter(user_tags__contains=tag) - - stories_db = stories_db.order_by('%sstarred_date' % - ('-' if order == "newest" else ""))[offset:offset+limit] + + stories_db = stories_db.order_by("%sstarred_date" % ("-" if order == "newest" else ""))[ + offset : offset + limit + ] stories = Feed.format_stories(stories_db) - + return stories - + @classmethod def find_stories_by_user_tag(cls, user_tag, user_id, offset=0, limit=25): - stories_db = cls.objects( - Q(user_id=user_id), - Q(user_tags__icontains=user_tag) - ).order_by('-starred_date')[offset:offset+limit] + stories_db = cls.objects(Q(user_id=user_id), Q(user_tags__icontains=user_tag)).order_by( + "-starred_date" + )[offset : offset + limit] stories = Feed.format_stories(stories_db) - + return stories @classmethod def trim_old_stories(cls, stories=10, days=90, dryrun=False): print(" ---> Fetching starred story counts...") - stats = settings.MONGODB.newsblur.starred_stories.aggregate([{ - "$group": { - "_id": "$user_id", - "stories": {"$sum": 1}, - }, - }, { - "$match": { - "stories": {"$gte": stories} - }, - }]) + stats = settings.MONGODB.newsblur.starred_stories.aggregate( + [ + { + "$group": { + "_id": "$user_id", + "stories": {"$sum": 1}, + }, + }, + { + "$match": {"stories": {"$gte": stories}}, + }, + ] + ) month_ago = datetime.datetime.now() - datetime.timedelta(days=days) user_ids = list(stats) - user_ids = sorted(user_ids, key=lambda x:x['stories'], reverse=True) + user_ids = sorted(user_ids, key=lambda x: x["stories"], reverse=True) print(" ---> Found %s users with more than %s starred stories" % (len(user_ids), stories)) total = 0 for stat in user_ids: try: - user = User.objects.select_related('profile').get(pk=stat['_id']) + user = User.objects.select_related("profile").get(pk=stat["_id"]) except User.DoesNotExist: user = None - + if user and (user.profile.is_premium or user.profile.last_seen_on > month_ago): continue - - total += stat['stories'] - username = "%s (%s)" % (user and user.username or " - ", stat['_id']) - print(" ---> %19.19s: %-20.20s %s stories" % (user and user.profile.last_seen_on or "Deleted", - username, - stat['stories'])) - if not dryrun and stat['_id']: - cls.objects.filter(user_id=stat['_id']).delete() - elif not dryrun and stat['_id'] == 0: + + total += stat["stories"] + username = "%s (%s)" % (user and user.username or " - ", stat["_id"]) + print( + " ---> %19.19s: %-20.20s %s stories" + % (user and user.profile.last_seen_on or "Deleted", username, stat["stories"]) + ) + if not dryrun and stat["_id"]: + cls.objects.filter(user_id=stat["_id"]).delete() + elif not dryrun and stat["_id"] == 0: print(" ---> Deleting unstarred stories (user_id = 0)") - cls.objects.filter(user_id=stat['_id']).delete() - - + cls.objects.filter(user_id=stat["_id"]).delete() + print(" ---> Deleted %s stories in total." % total) @property def guid_hash(self): - return hashlib.sha1(self.story_guid.encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1(self.story_guid.encode(encoding="utf-8")).hexdigest()[:6] @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id or "0", self.guid_hash) - + def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z feed = Feed.get_by_id(self.story_feed_id) - + if not original_text_z or force: ti = TextImporter(self, feed=feed, request=request, debug=debug) original_text = ti.fetch() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text - + def fetch_original_page(self, force=False, request=None, debug=False): return None - + + class MStarredStoryCounts(mongo.Document): user_id = mongo.IntField() tag = mongo.StringField(max_length=128) @@ -3237,12 +3545,12 @@ class MStarredStoryCounts(mongo.Document): count = mongo.IntField(default=0) meta = { - 'collection': 'starred_stories_counts', - 'indexes': ['user_id'], - 'ordering': ['tag'], - 'allow_inheritance': False, + "collection": "starred_stories_counts", + "indexes": ["user_id"], + "ordering": ["tag"], + "allow_inheritance": False, } - + def __unicode__(self): if self.tag: return "Tag: %s (%s)" % (self.tag, self.count) @@ -3250,69 +3558,74 @@ class MStarredStoryCounts(mongo.Document): return "Feed: %s (%s)" % (self.feed_id, self.count) elif self.is_highlights: return "Highlights: %s (%s)" % (self.is_highlights, self.count) - + return "%s/%s/%s" % (self.tag, self.feed_id, self.is_highlights) @property def rss_url(self, secret_token=None): if self.feed_id: return - + if not secret_token: - user = User.objects.select_related('profile').get(pk=self.user_id) + user = User.objects.select_related("profile").get(pk=self.user_id) secret_token = user.profile.secret_token - + slug = self.slug if self.slug else "" if not self.slug and self.tag: slug = slugify(self.tag) self.slug = slug self.save() - return "%s/reader/starred_rss/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, - secret_token, slug) - + return "%s/reader/starred_rss/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, secret_token, slug) + @classmethod def user_counts(cls, user_id, include_total=False, try_counting=True): counts = cls.objects.filter(user_id=user_id) - counts = sorted([{'tag': c.tag, - 'count': c.count, - 'is_highlights': c.is_highlights, - 'feed_address': c.rss_url, - 'active': True, - 'feed_id': c.feed_id} - for c in counts], - key=lambda x: (x.get('tag', '') or '').lower()) - + counts = sorted( + [ + { + "tag": c.tag, + "count": c.count, + "is_highlights": c.is_highlights, + "feed_address": c.rss_url, + "active": True, + "feed_id": c.feed_id, + } + for c in counts + ], + key=lambda x: (x.get("tag", "") or "").lower(), + ) + total = 0 feed_total = 0 for c in counts: - if not c['tag'] and not c['feed_id'] and not c['is_highlights']: - total = c['count'] - if c['feed_id']: - feed_total += c['count'] - + if not c["tag"] and not c["feed_id"] and not c["is_highlights"]: + total = c["count"] + if c["feed_id"]: + feed_total += c["count"] + if try_counting and (total != feed_total or not len(counts)): user = User.objects.get(pk=user_id) - logging.user(user, "~FC~SBCounting~SN saved stories (%s total vs. %s counted)..." % - (total, feed_total)) + logging.user( + user, "~FC~SBCounting~SN saved stories (%s total vs. %s counted)..." % (total, feed_total) + ) cls.count_for_user(user_id) - return cls.user_counts(user_id, include_total=include_total, - try_counting=False) - + return cls.user_counts(user_id, include_total=include_total, try_counting=False) + if include_total: return counts, total return counts - + @classmethod def schedule_count_tags_for_user(cls, user_id): ScheduleCountTagsForUser.apply_async(kwargs=dict(user_id=user_id)) - + @classmethod def count_for_user(cls, user_id, total_only=False): user_tags = [] user_feeds = [] highlights = 0 - + if not total_only: cls.objects(user_id=user_id).delete() try: @@ -3323,45 +3636,47 @@ class MStarredStoryCounts(mongo.Document): logging.debug(" ---> ~FBOperationError on mongo: ~SB%s" % e) total_stories_count = MStarredStory.objects(user_id=user_id).count() - cls.objects(user_id=user_id, tag=None, feed_id=None, is_highlights=None).update_one(set__count=total_stories_count, - upsert=True) + cls.objects(user_id=user_id, tag=None, feed_id=None, is_highlights=None).update_one( + set__count=total_stories_count, upsert=True + ) return dict(total=total_stories_count, tags=user_tags, feeds=user_feeds, highlights=highlights) @classmethod def count_tags_for_user(cls, user_id): - all_tags = MStarredStory.objects(user_id=user_id, - user_tags__exists=True).item_frequencies('user_tags') - user_tags = sorted([(k, v) for k, v in list(all_tags.items()) if int(v) > 0 and k], - key=lambda x: x[0].lower(), - reverse=True) - + all_tags = MStarredStory.objects(user_id=user_id, user_tags__exists=True).item_frequencies( + "user_tags" + ) + user_tags = sorted( + [(k, v) for k, v in list(all_tags.items()) if int(v) > 0 and k], + key=lambda x: x[0].lower(), + reverse=True, + ) + for tag, count in list(dict(user_tags).items()): - cls.objects(user_id=user_id, tag=tag, slug=slugify(tag)).update_one(set__count=count, - upsert=True) - + cls.objects(user_id=user_id, tag=tag, slug=slugify(tag)).update_one(set__count=count, upsert=True) + return user_tags - + @classmethod def count_highlights_for_user(cls, user_id): - highlighted_count = MStarredStory.objects(user_id=user_id, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"}).count() + highlighted_count = MStarredStory.objects( + user_id=user_id, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ).count() if highlighted_count > 0: - cls.objects(user_id=user_id, - is_highlights=True, - slug="highlights" - ).update_one(set__count=highlighted_count, upsert=True) + cls.objects(user_id=user_id, is_highlights=True, slug="highlights").update_one( + set__count=highlighted_count, upsert=True + ) else: cls.objects(user_id=user_id, is_highlights=True, slug="highlights").delete() - + return highlighted_count - + @classmethod def count_feeds_for_user(cls, user_id): - all_feeds = MStarredStory.objects(user_id=user_id).item_frequencies('story_feed_id') + all_feeds = MStarredStory.objects(user_id=user_id).item_frequencies("story_feed_id") user_feeds = dict([(k, v) for k, v in list(all_feeds.items()) if v]) - + # Clean up None'd and 0'd feed_ids, so they can be counted against the total if user_feeds.get(None, False): user_feeds[0] = user_feeds.get(0, 0) @@ -3370,26 +3685,26 @@ class MStarredStoryCounts(mongo.Document): if user_feeds.get(0, False): user_feeds[-1] = user_feeds.get(0, 0) del user_feeds[0] - + too_many_feeds = False if len(user_feeds) < 1000 else True for feed_id, count in list(user_feeds.items()): - if too_many_feeds and count <= 1: continue - cls.objects(user_id=user_id, - feed_id=feed_id, - slug="feed:%s" % feed_id).update_one(set__count=count, - upsert=True) - + if too_many_feeds and count <= 1: + continue + cls.objects(user_id=user_id, feed_id=feed_id, slug="feed:%s" % feed_id).update_one( + set__count=count, upsert=True + ) + return user_feeds - + @classmethod def adjust_count(cls, user_id, feed_id=None, tag=None, highlights=None, amount=0): params = dict(user_id=user_id) if feed_id: - params['feed_id'] = feed_id + params["feed_id"] = feed_id if tag: - params['tag'] = tag + params["tag"] = tag if highlights: - params['is_highlights'] = True + params["is_highlights"] = True cls.objects(**params).update_one(inc__count=amount, upsert=True) try: @@ -3399,6 +3714,7 @@ class MStarredStoryCounts(mongo.Document): if story_count and story_count.count <= 0: story_count.delete() + class MSavedSearch(mongo.Document): user_id = mongo.IntField() query = mongo.StringField(max_length=1024) @@ -3406,58 +3722,61 @@ class MSavedSearch(mongo.Document): slug = mongo.StringField(max_length=128) meta = { - 'collection': 'saved_searches', - 'indexes': ['user_id', - {'fields': ['user_id', 'feed_id', 'query'], - 'unique': True, - }], - 'ordering': ['query'], - 'allow_inheritance': False, + "collection": "saved_searches", + "indexes": [ + "user_id", + { + "fields": ["user_id", "feed_id", "query"], + "unique": True, + }, + ], + "ordering": ["query"], + "allow_inheritance": False, } @property def rss_url(self, secret_token=None): if not secret_token: - user = User.objects.select_related('profile').get(pk=self.user_id) + user = User.objects.select_related("profile").get(pk=self.user_id) secret_token = user.profile.secret_token - + slug = self.slug if self.slug else "" - return "%s/reader/saved_search/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, - secret_token, slug) - + return "%s/reader/saved_search/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, secret_token, slug) + @classmethod def user_searches(cls, user_id): searches = cls.objects.filter(user_id=user_id) - searches = sorted([{'query': s.query, - 'feed_address': s.rss_url, - 'feed_id': s.feed_id, - 'active': True, - } for s in searches], - key=lambda x: (x.get('query', '') or '').lower()) + searches = sorted( + [ + { + "query": s.query, + "feed_address": s.rss_url, + "feed_id": s.feed_id, + "active": True, + } + for s in searches + ], + key=lambda x: (x.get("query", "") or "").lower(), + ) return searches - + @classmethod def save_search(cls, user_id, feed_id, query): user = User.objects.get(pk=user_id) - params = dict(user_id=user_id, - feed_id=feed_id, - query=query, - slug=slugify(query)) + params = dict(user_id=user_id, feed_id=feed_id, query=query, slug=slugify(query)) try: saved_search = cls.objects.get(**params) logging.user(user, "~FRSaved search already exists: ~SB%s" % query) except cls.DoesNotExist: logging.user(user, "~FCCreating a saved search: ~SB%s~SN/~SB%s" % (feed_id, query)) saved_search = cls.objects.create(**params) - + return saved_search - + @classmethod def delete_search(cls, user_id, feed_id, query): user = User.objects.get(pk=user_id) - params = dict(user_id=user_id, - feed_id=feed_id, - query=query) + params = dict(user_id=user_id, feed_id=feed_id, query=query) try: saved_search = cls.objects.get(**params) logging.user(user, "~FCDeleting saved search: ~SB%s" % query) @@ -3465,89 +3784,90 @@ class MSavedSearch(mongo.Document): except cls.DoesNotExist: logging.user(user, "~FRCan't delete saved search, missing: ~SB%s~SN/~SB%s" % (feed_id, query)) except cls.MultipleObjectsReturned: - logging.user(user, "~FRFound multiple saved searches, deleting: ~SB%s~SN/~SB%s" % (feed_id, query)) + logging.user( + user, "~FRFound multiple saved searches, deleting: ~SB%s~SN/~SB%s" % (feed_id, query) + ) cls.objects(**params).delete() - - + + class MFetchHistory(mongo.Document): feed_id = mongo.IntField(unique=True) feed_fetch_history = mongo.DynamicField() page_fetch_history = mongo.DynamicField() push_history = mongo.DynamicField() raw_feed_history = mongo.DynamicField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'fetch_history', - 'allow_inheritance': False, + "db_alias": "nbanalytics", + "collection": "fetch_history", + "allow_inheritance": False, } @classmethod def feed(cls, feed_id, timezone=None, fetch_history=None): if not fetch_history: try: - fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get( + feed_id=feed_id + ) except cls.DoesNotExist: fetch_history = cls.objects.create(feed_id=feed_id) history = {} - for fetch_type in ['feed_fetch_history', 'page_fetch_history', 'push_history']: + for fetch_type in ["feed_fetch_history", "page_fetch_history", "push_history"]: history[fetch_type] = getattr(fetch_history, fetch_type) if not history[fetch_type]: history[fetch_type] = [] for f, fetch in enumerate(history[fetch_type]): - date_key = 'push_date' if fetch_type == 'push_history' else 'fetch_date' + date_key = "push_date" if fetch_type == "push_history" else "fetch_date" history[fetch_type][f] = { - date_key: localtime_for_timezone(fetch[0], - timezone).strftime("%Y-%m-%d %H:%M:%S"), - 'status_code': fetch[1], - 'message': fetch[2] + date_key: localtime_for_timezone(fetch[0], timezone).strftime("%Y-%m-%d %H:%M:%S"), + "status_code": fetch[1], + "message": fetch[2], } return history - + @classmethod def add(cls, feed_id, fetch_type, date=None, message=None, code=None, exception=None): if not date: date = datetime.datetime.now() try: - fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(feed_id=feed_id) except cls.DoesNotExist: fetch_history = cls.objects.create(feed_id=feed_id) - - if fetch_type == 'feed': + + if fetch_type == "feed": history = fetch_history.feed_fetch_history or [] - elif fetch_type == 'page': + elif fetch_type == "page": history = fetch_history.page_fetch_history or [] - elif fetch_type == 'push': + elif fetch_type == "push": history = fetch_history.push_history or [] - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": history = fetch_history.raw_feed_history or [] history = [[date, code, message]] + history any_exceptions = any([c for d, c, m in history if c not in [200, 304]]) if any_exceptions: history = history[:25] - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": history = history[:10] else: history = history[:5] - if fetch_type == 'feed': + if fetch_type == "feed": fetch_history.feed_fetch_history = history - elif fetch_type == 'page': + elif fetch_type == "page": fetch_history.page_fetch_history = history - elif fetch_type == 'push': + elif fetch_type == "push": fetch_history.push_history = history - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": fetch_history.raw_feed_history = history - + fetch_history.save() - - if fetch_type == 'feed': - RStats.add('feed_fetch') - + + if fetch_type == "feed": + RStats.add("feed_fetch") + return cls.feed(feed_id, fetch_history=fetch_history) @@ -3555,33 +3875,34 @@ class DuplicateFeed(models.Model): duplicate_address = models.CharField(max_length=764, db_index=True) duplicate_link = models.CharField(max_length=764, null=True, db_index=True) duplicate_feed_id = models.CharField(max_length=255, null=True, db_index=True) - feed = models.ForeignKey(Feed, related_name='duplicate_addresses', on_delete=models.CASCADE) - + feed = models.ForeignKey(Feed, related_name="duplicate_addresses", on_delete=models.CASCADE) + def __str__(self): return "%s: %s / %s" % (self.feed, self.duplicate_address, self.duplicate_link) - + def canonical(self): return { - 'duplicate_address': self.duplicate_address, - 'duplicate_link': self.duplicate_link, - 'duplicate_feed_id': self.duplicate_feed_id, - 'feed_id': self.feed_id + "duplicate_address": self.duplicate_address, + "duplicate_link": self.duplicate_link, + "duplicate_feed_id": self.duplicate_feed_id, + "feed_id": self.feed_id, } - + def save(self, *args, **kwargs): - max_address = DuplicateFeed._meta.get_field('duplicate_address').max_length + max_address = DuplicateFeed._meta.get_field("duplicate_address").max_length if len(self.duplicate_address) > max_address: self.duplicate_address = self.duplicate_address[:max_address] - max_link = DuplicateFeed._meta.get_field('duplicate_link').max_length + max_link = DuplicateFeed._meta.get_field("duplicate_link").max_length if self.duplicate_link and len(self.duplicate_link) > max_link: self.duplicate_link = self.duplicate_link[:max_link] - + super(DuplicateFeed, self).save(*args, **kwargs) + def merge_feeds(original_feed_id, duplicate_feed_id, force=False): from apps.reader.models import UserSubscription from apps.social.models import MSharedStory - + if original_feed_id == duplicate_feed_id: logging.info(" ***> Merging the same feed. Ignoring...") return original_feed_id @@ -3591,7 +3912,7 @@ def merge_feeds(original_feed_id, duplicate_feed_id, force=False): except Feed.DoesNotExist: logging.info(" ***> Already deleted feed: %s" % duplicate_feed_id) return original_feed_id - + heavier_dupe = original_feed.num_subscribers < duplicate_feed.num_subscribers branched_original = original_feed.branch_from_feed and not duplicate_feed.branch_from_feed if (heavier_dupe or branched_original) and not force: @@ -3599,74 +3920,86 @@ def merge_feeds(original_feed_id, duplicate_feed_id, force=False): original_feed_id, duplicate_feed_id = duplicate_feed_id, original_feed_id if branched_original: original_feed.feed_address = duplicate_feed.feed_address - - logging.info(" ---> Feed: [%s - %s] %s - %s" % (original_feed_id, duplicate_feed_id, - original_feed, original_feed.feed_link)) - logging.info(" Orig ++> %s: (%s subs) %s / %s %s" % (original_feed.pk, - original_feed.num_subscribers, - original_feed.feed_address, - original_feed.feed_link, - " [B: %s]" % original_feed.branch_from_feed.pk if original_feed.branch_from_feed else "")) - logging.info(" Dupe --> %s: (%s subs) %s / %s %s" % (duplicate_feed.pk, - duplicate_feed.num_subscribers, - duplicate_feed.feed_address, - duplicate_feed.feed_link, - " [B: %s]" % duplicate_feed.branch_from_feed.pk if duplicate_feed.branch_from_feed else "")) + + logging.info( + " ---> Feed: [%s - %s] %s - %s" + % (original_feed_id, duplicate_feed_id, original_feed, original_feed.feed_link) + ) + logging.info( + " Orig ++> %s: (%s subs) %s / %s %s" + % ( + original_feed.pk, + original_feed.num_subscribers, + original_feed.feed_address, + original_feed.feed_link, + " [B: %s]" % original_feed.branch_from_feed.pk if original_feed.branch_from_feed else "", + ) + ) + logging.info( + " Dupe --> %s: (%s subs) %s / %s %s" + % ( + duplicate_feed.pk, + duplicate_feed.num_subscribers, + duplicate_feed.feed_address, + duplicate_feed.feed_link, + " [B: %s]" % duplicate_feed.branch_from_feed.pk if duplicate_feed.branch_from_feed else "", + ) + ) original_feed.branch_from_feed = None - - user_subs = UserSubscription.objects.filter(feed=duplicate_feed).order_by('-pk') + + user_subs = UserSubscription.objects.filter(feed=duplicate_feed).order_by("-pk") for user_sub in user_subs: user_sub.switch_feed(original_feed, duplicate_feed) - def delete_story_feed(model, feed_field='feed_id'): + def delete_story_feed(model, feed_field="feed_id"): duplicate_stories = model.objects(**{feed_field: duplicate_feed.pk}) # if duplicate_stories.count(): # logging.info(" ---> Deleting %s %s" % (duplicate_stories.count(), model)) duplicate_stories.delete() - - delete_story_feed(MStory, 'story_feed_id') - delete_story_feed(MFeedPage, 'feed_id') + + delete_story_feed(MStory, "story_feed_id") + delete_story_feed(MFeedPage, "feed_id") try: DuplicateFeed.objects.create( duplicate_address=duplicate_feed.feed_address, duplicate_link=duplicate_feed.feed_link, duplicate_feed_id=duplicate_feed.pk, - feed=original_feed + feed=original_feed, ) except (IntegrityError, OperationError) as e: logging.info(" ***> Could not save DuplicateFeed: %s" % e) - + # Switch this dupe feed's dupe feeds over to the new original. duplicate_feeds_duplicate_feeds = DuplicateFeed.objects.filter(feed=duplicate_feed) for dupe_feed in duplicate_feeds_duplicate_feeds: dupe_feed.feed = original_feed dupe_feed.duplicate_feed_id = duplicate_feed.pk dupe_feed.save() - - logging.debug(' ---> Dupe subscribers (%s): %s, Original subscribers (%s): %s' % - (duplicate_feed.pk, duplicate_feed.num_subscribers, - original_feed.pk, original_feed.num_subscribers)) + + logging.debug( + " ---> Dupe subscribers (%s): %s, Original subscribers (%s): %s" + % (duplicate_feed.pk, duplicate_feed.num_subscribers, original_feed.pk, original_feed.num_subscribers) + ) if duplicate_feed.pk != original_feed.pk: duplicate_feed.delete() else: logging.debug(" ***> Duplicate feed is the same as original feed. Panic!") - logging.debug(' ---> Deleted duplicate feed: %s/%s' % (duplicate_feed, duplicate_feed_id)) + logging.debug(" ---> Deleted duplicate feed: %s/%s" % (duplicate_feed, duplicate_feed_id)) original_feed.branch_from_feed = None original_feed.count_subscribers() original_feed.save() - logging.debug(' ---> Now original subscribers: %s' % - (original_feed.num_subscribers)) - - + logging.debug(" ---> Now original subscribers: %s" % (original_feed.num_subscribers)) + MSharedStory.switch_feed(original_feed_id, duplicate_feed_id) - + return original_feed_id - + + def rewrite_folders(folders, original_feed, duplicate_feed): new_folders = [] - + for k, folder in enumerate(folders): if isinstance(folder, int): if folder == duplicate_feed.pk: diff --git a/apps/rss_feeds/page_importer.py b/apps/rss_feeds/page_importer.py index a3d2f321e..58b11cda3 100644 --- a/apps/rss_feeds/page_importer.py +++ b/apps/rss_feeds/page_importer.py @@ -26,51 +26,55 @@ from utils.feed_functions import TimeoutError, timelimit # from utils.feed_functions import mail_feed_error_to_admin BROKEN_PAGES = [ - 'tag:', - 'info:', - 'uuid:', - 'urn:', - '[]', + "tag:", + "info:", + "uuid:", + "urn:", + "[]", ] # Also change in reader_utils.js. BROKEN_PAGE_URLS = [ - 'nytimes.com', - 'github.com', - 'washingtonpost.com', - 'stackoverflow.com', - 'stackexchange.com', - 'twitter.com', - 'rankexploits', - 'gamespot.com', - 'espn.com', - 'royalroad.com', + "nytimes.com", + "github.com", + "washingtonpost.com", + "stackoverflow.com", + "stackexchange.com", + "twitter.com", + "rankexploits", + "gamespot.com", + "espn.com", + "royalroad.com", ] + class PageImporter(object): - def __init__(self, feed, story=None, request=None): self.feed = feed self.story = story self.request = request - + @property def headers(self): return { - 'User-Agent': 'NewsBlur Page Fetcher - %s subscriber%s - %s %s' % ( + "User-Agent": "NewsBlur Page Fetcher - %s subscriber%s - %s %s" + % ( self.feed.num_subscribers, - 's' if self.feed.num_subscribers != 1 else '', + "s" if self.feed.num_subscribers != 1 else "", self.feed.permalink, self.feed.fake_user_agent, ), } - + def fetch_page(self, urllib_fallback=False, requests_exception=None): try: self.fetch_page_timeout(urllib_fallback=urllib_fallback, requests_exception=requests_exception) except TimeoutError: - logging.user(self.request, ' ***> [%-30s] ~FBPage fetch ~SN~FRfailed~FB due to timeout' % (self.feed.log_title[:30])) - + logging.user( + self.request, + " ***> [%-30s] ~FBPage fetch ~SN~FRfailed~FB due to timeout" % (self.feed.log_title[:30]), + ) + @timelimit(10) def fetch_page_timeout(self, urllib_fallback=False, requests_exception=None): html = None @@ -79,8 +83,8 @@ class PageImporter(object): self.save_no_page(reason="No feed link") return - if feed_link.startswith('www'): - self.feed.feed_link = 'http://' + feed_link + if feed_link.startswith("www"): + self.feed.feed_link = "http://" + feed_link try: if any(feed_link.startswith(s) for s in BROKEN_PAGES): self.save_no_page(reason="Broken page") @@ -88,36 +92,45 @@ class PageImporter(object): elif any(s in feed_link.lower() for s in BROKEN_PAGE_URLS): self.save_no_page(reason="Banned") return - elif feed_link.startswith('http'): + elif feed_link.startswith("http"): if urllib_fallback: request = urllib.request.Request(feed_link, headers=self.headers) response = urllib.request.urlopen(request) - time.sleep(0.01) # Grrr, GIL. - data = response.read().decode(response.headers.get_content_charset() or 'utf-8') + time.sleep(0.01) # Grrr, GIL. + data = response.read().decode(response.headers.get_content_charset() or "utf-8") else: try: response = requests.get(feed_link, headers=self.headers, timeout=10) response.connection.close() except requests.exceptions.TooManyRedirects: response = requests.get(feed_link, timeout=10) - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, TypeError, - requests.adapters.ReadTimeout) as e: - logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + TypeError, + requests.adapters.ReadTimeout, + ) as e: + logging.debug( + " ***> [%-30s] Page fetch failed using requests: %s" + % (self.feed.log_title[:30], e) + ) self.save_no_page(reason="Page fetch failed") return data = response.text - if response.encoding and response.encoding.lower() != 'utf-8': + if response.encoding and response.encoding.lower() != "utf-8": logging.debug(f" -> ~FBEncoding is {response.encoding}, re-encoding...") try: - data = data.encode('utf-8').decode('utf-8') + data = data.encode("utf-8").decode("utf-8") except (LookupError, UnicodeEncodeError): logging.debug(f" -> ~FRRe-encoding failed!") pass else: try: - data = open(feed_link, 'r').read() + data = open(feed_link, "r").read() except IOError: - self.feed.feed_link = 'http://' + feed_link + self.feed.feed_link = "http://" + feed_link self.fetch_page(urllib_fallback=True) return if data: @@ -130,40 +143,45 @@ class PageImporter(object): else: self.save_no_page(reason="No data found") return - except (ValueError, urllib.error.URLError, http.client.BadStatusLine, http.client.InvalidURL, - requests.exceptions.ConnectionError) as e: - logging.debug(' ***> [%-30s] Page fetch failed: %s' % (self.feed.log_title[:30], e)) + except ( + ValueError, + urllib.error.URLError, + http.client.BadStatusLine, + http.client.InvalidURL, + requests.exceptions.ConnectionError, + ) as e: + logging.debug(" ***> [%-30s] Page fetch failed: %s" % (self.feed.log_title[:30], e)) self.feed.save_page_history(401, "Bad URL", e) try: fp = feedparser.parse(self.feed.feed_address) except (urllib.error.HTTPError, urllib.error.URLError) as e: return html - feed_link = fp.feed.get('link', "") + feed_link = fp.feed.get("link", "") self.feed.save() - except (http.client.IncompleteRead) as e: - logging.debug(' ***> [%-30s] Page fetch failed: %s' % (self.feed.log_title[:30], e)) + except http.client.IncompleteRead as e: + logging.debug(" ***> [%-30s] Page fetch failed: %s" % (self.feed.log_title[:30], e)) self.feed.save_page_history(500, "IncompleteRead", e) - except (requests.exceptions.RequestException, - requests.packages.urllib3.exceptions.HTTPError) as e: - logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except (requests.exceptions.RequestException, requests.packages.urllib3.exceptions.HTTPError) as e: + logging.debug( + " ***> [%-30s] Page fetch failed using requests: %s" % (self.feed.log_title[:30], e) + ) # mail_feed_error_to_admin(self.feed, e, local_vars=locals()) return self.fetch_page(urllib_fallback=True, requests_exception=e) except Exception as e: - logging.debug('[%d] ! -------------------------' % (self.feed.id,)) + logging.debug("[%d] ! -------------------------" % (self.feed.id,)) tb = traceback.format_exc() logging.debug(tb) - logging.debug('[%d] ! -------------------------' % (self.feed.id,)) + logging.debug("[%d] ! -------------------------" % (self.feed.id,)) self.feed.save_page_history(500, "Error", tb) # mail_feed_error_to_admin(self.feed, e, local_vars=locals()) - if (not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and - settings.SENTRY_DSN): + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() if not urllib_fallback: self.fetch_page(urllib_fallback=True) else: self.feed.save_page_history(200, "OK") - + return html def fetch_story(self): @@ -174,62 +192,75 @@ class PageImporter(object): logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal story~FY: timed out") except requests.exceptions.TooManyRedirects: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal story~FY: too many redirects") - + return html @timelimit(10) def _fetch_story(self): html = None story_permalink = self.story.story_permalink - + if not self.feed: return if any(story_permalink.startswith(s) for s in BROKEN_PAGES): return if any(s in story_permalink.lower() for s in BROKEN_PAGE_URLS): return - if not story_permalink.startswith('http'): + if not story_permalink.startswith("http"): return try: response = requests.get(story_permalink, headers=self.headers, timeout=10) response.connection.close() - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, - requests.exceptions.ConnectionError, - requests.exceptions.TooManyRedirects, - requests.adapters.ReadTimeout) as e: + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + requests.exceptions.ConnectionError, + requests.exceptions.TooManyRedirects, + requests.adapters.ReadTimeout, + ) as e: try: response = requests.get(story_permalink, timeout=10) - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, - requests.exceptions.ConnectionError, - requests.exceptions.TooManyRedirects, - requests.adapters.ReadTimeout) as e: - logging.debug(' ***> [%-30s] Original story fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + requests.exceptions.ConnectionError, + requests.exceptions.TooManyRedirects, + requests.adapters.ReadTimeout, + ) as e: + logging.debug( + " ***> [%-30s] Original story fetch failed using requests: %s" + % (self.feed.log_title[:30], e) + ) return # try: data = response.text # except (LookupError, TypeError): # data = response.content - # import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() - if response.encoding and response.encoding.lower() != 'utf-8': + if response.encoding and response.encoding.lower() != "utf-8": logging.debug(f" -> ~FBEncoding is {response.encoding}, re-encoding...") try: - data = data.encode('utf-8').decode('utf-8') + data = data.encode("utf-8").decode("utf-8") except (LookupError, UnicodeEncodeError): logging.debug(f" -> ~FRRe-encoding failed!") pass if data: - data = data.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - data = data.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + data = data.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + data = data.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 html = self.rewrite_page(data) if not html: return self.save_story(html) - + return html - + def save_story(self, html): self.story.original_page_z = zlib.compress(smart_bytes(html)) try: @@ -237,77 +268,83 @@ class PageImporter(object): except NotUniqueError: pass - def save_no_page(self, reason=None): - logging.debug(' ---> [%-30s] ~FYNo original page: %s / %s' % (self.feed.log_title[:30], reason, self.feed.feed_link)) + logging.debug( + " ---> [%-30s] ~FYNo original page: %s / %s" + % (self.feed.log_title[:30], reason, self.feed.feed_link) + ) self.feed.has_page = False self.feed.save() self.feed.save_page_history(404, f"Feed has no original page: {reason}") def rewrite_page(self, response): - BASE_RE = re.compile(r'', re.I) + BASE_RE = re.compile(r"", re.I) base_code = '' % (self.feed.feed_link,) - html = BASE_RE.sub(' '+base_code, response) - - if ' " + base_code, response) + + if " tags. You know, like # Google Analytics. Ugh. - + FIND_RE = re.compile(r'\b(href|src)\s*=\s*("[^"]*"|\'[^\']*\'|[^"\'<>=\s]+)') ret = [] last_end = 0 - + for match in FIND_RE.finditer(document): url = match.group(2) if url[0] in "\"'": url = url.strip(url[0]) parsed = urllib.parse.urlparse(url) - if parsed.scheme == parsed.netloc == '': #relative to domain + if parsed.scheme == parsed.netloc == "": # relative to domain url = urllib.parse.urljoin(self.feed.feed_link, url) - ret.append(document[last_end:match.start(2)]) + ret.append(document[last_end : match.start(2)]) ret.append('"%s"' % (url,)) last_end = match.end(2) ret.append(document[last_end:]) - - return ''.join(ret) - + + return "".join(ret) + def save_page(self, html): saved = False - + if not html or len(html) < 100: return - - if settings.BACKED_BY_AWS.get('pages_on_node'): + + if settings.BACKED_BY_AWS.get("pages_on_node"): saved = self.save_page_node(html) - if saved and self.feed.s3_page and settings.BACKED_BY_AWS.get('pages_on_s3'): + if saved and self.feed.s3_page and settings.BACKED_BY_AWS.get("pages_on_s3"): self.delete_page_s3() - - if settings.BACKED_BY_AWS.get('pages_on_s3') and not saved: + + if settings.BACKED_BY_AWS.get("pages_on_s3") and not saved: saved = self.save_page_s3(html) - + if not saved: try: feed_page = MFeedPage.objects.get(feed_id=self.feed.pk) # feed_page.page_data = html.encode('utf-8') if feed_page.page() == html: - logging.debug(' ---> [%-30s] ~FYNo change in page data: %s' % (self.feed.log_title[:30], self.feed.feed_link)) + logging.debug( + " ---> [%-30s] ~FYNo change in page data: %s" + % (self.feed.log_title[:30], self.feed.feed_link) + ) else: # logging.debug(' ---> [%-30s] ~FYChange in page data: %s (%s/%s %s/%s)' % (self.feed.log_title[:30], self.feed.feed_link, type(html), type(feed_page.page()), len(html), len(feed_page.page()))) feed_page.page_data = zlib.compress(smart_bytes(html)) feed_page.save() except MFeedPage.DoesNotExist: - feed_page = MFeedPage.objects.create(feed_id=self.feed.pk, - page_data=zlib.compress(smart_bytes(html))) + feed_page = MFeedPage.objects.create( + feed_id=self.feed.pk, page_data=zlib.compress(smart_bytes(html)) + ) return feed_page - + def save_page_node(self, html): domain = "node-page.service.consul:8008" if settings.DOCKERBUILD: @@ -317,42 +354,47 @@ class PageImporter(object): self.feed.pk, ) compressed_html = zlib.compress(smart_bytes(html)) - response = requests.post(url, files={ - 'original_page': compressed_html, - # 'original_page': html, - }) + response = requests.post( + url, + files={ + "original_page": compressed_html, + # 'original_page': html, + }, + ) if response.status_code == 200: return True else: - logging.debug(' ---> [%-30s] ~FRFailed to save page to node: %s (%s bytes)' % (self.feed.log_title[:30], response.status_code, len(compressed_html))) + logging.debug( + " ---> [%-30s] ~FRFailed to save page to node: %s (%s bytes)" + % (self.feed.log_title[:30], response.status_code, len(compressed_html)) + ) - def save_page_s3(self, html): - s3_object = settings.S3_CONN.Object(settings.S3_PAGES_BUCKET_NAME, - self.feed.s3_pages_key) - s3_object.put(Body=compress_string_with_gzip(html.encode('utf-8')), - ContentType='text/html', - ContentEncoding='gzip', - Expires=expires, - ACL='public-read' - ) - + s3_object = settings.S3_CONN.Object(settings.S3_PAGES_BUCKET_NAME, self.feed.s3_pages_key) + s3_object.put( + Body=compress_string_with_gzip(html.encode("utf-8")), + ContentType="text/html", + ContentEncoding="gzip", + Expires=expires, + ACL="public-read", + ) + try: feed_page = MFeedPage.objects.get(feed_id=self.feed.pk) feed_page.delete() - logging.debug(' ---> [%-30s] ~FYTransfering page data to S3...' % (self.feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FYTransfering page data to S3..." % (self.feed.log_title[:30])) except MFeedPage.DoesNotExist: pass - + if not self.feed.s3_page: self.feed.s3_page = True self.feed.save() - + return True - + def delete_page_s3(self): k = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key) k.delete() - + self.feed.s3_page = False self.feed.save() diff --git a/apps/rss_feeds/tasks.py b/apps/rss_feeds/tasks.py index 1ad600358..2340e55b5 100644 --- a/apps/rss_feeds/tasks.py +++ b/apps/rss_feeds/tasks.py @@ -14,204 +14,227 @@ from utils.redis_raw_log_middleware import RedisDumpMiddleware FEED_TASKING_MAX = 10000 -@app.task(name='task-feeds') + +@app.task(name="task-feeds") def TaskFeeds(): - from apps.rss_feeds.models import Feed + from apps.rss_feeds.models import Feed + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - tasked_feeds_size = r.zcard('tasked_feeds') - + tasked_feeds_size = r.zcard("tasked_feeds") + hour_ago = now - datetime.timedelta(hours=1) - r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) - + r.zremrangebyscore("fetched_feeds_last_hour", 0, int(hour_ago.strftime("%s"))) + now_timestamp = int(now.strftime("%s")) - queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) - r.zremrangebyscore('scheduled_updates', 0, now_timestamp) + queued_feeds = r.zrangebyscore("scheduled_updates", 0, now_timestamp) + r.zremrangebyscore("scheduled_updates", 0, now_timestamp) if not queued_feeds: logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...") return - - r.sadd('queued_feeds', *queued_feeds) - logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - len(queued_feeds), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) - + + r.sadd("queued_feeds", *queued_feeds) + logging.debug( + " ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % (len(queued_feeds), r.zcard("tasked_feeds"), r.scard("queued_feeds"), r.zcard("scheduled_updates")) + ) + # Regular feeds if tasked_feeds_size < FEED_TASKING_MAX: - feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX) + feeds = r.srandmember("queued_feeds", FEED_TASKING_MAX) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 feeds = [] - - logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - active_count, - int((time.time() - start)), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) + + logging.debug( + " ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % ( + active_count, + int((time.time() - start)), + r.zcard("tasked_feeds"), + r.scard("queued_feeds"), + r.zcard("scheduled_updates"), + ) + ) logging.debug(" ---> ~FBFeeds being tasked: ~SB%s" % feeds) -@app.task(name='task-broken-feeds') + +@app.task(name="task-broken-feeds") def TaskBrokenFeeds(): - from apps.rss_feeds.models import Feed + from apps.rss_feeds.models import Feed + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + logging.debug(" ---> ~SN~FBQueuing broken feeds...") - + # Force refresh feeds - refresh_feeds = Feed.objects.filter( - active=True, - fetched_once=False, - active_subscribers__gte=1 - ).order_by('?')[:100] + refresh_feeds = Feed.objects.filter(active=True, fetched_once=False, active_subscribers__gte=1).order_by( + "?" + )[:100] refresh_count = refresh_feeds.count() cp1 = time.time() - + logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count) # Mistakenly inactive feeds - hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') - old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) + hours_ago = (now - datetime.timedelta(minutes=10)).strftime("%s") + old_tasked_feeds = r.zrangebyscore("tasked_feeds", 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: - r.zremrangebyscore('tasked_feeds', 0, hours_ago) + r.zremrangebyscore("tasked_feeds", 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: - r.zincrby('error_feeds', 1, feed_id) + r.zincrby("error_feeds", 1, feed_id) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() - logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % ( - inactive_count, - r.scard('queued_feeds'), - r.zcard('tasked_feeds'))) + logging.debug( + " ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" + % (inactive_count, r.scard("queued_feeds"), r.zcard("tasked_feeds")) + ) cp2 = time.time() - + old = now - datetime.timedelta(days=1) - old_feeds = Feed.objects.filter( - next_scheduled_update__lte=old, - active_subscribers__gte=1 - ).order_by('?')[:500] + old_feeds = Feed.objects.filter(next_scheduled_update__lte=old, active_subscribers__gte=1).order_by("?")[ + :500 + ] old_count = old_feeds.count() cp3 = time.time() - - logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % ( - refresh_count, - inactive_count, - old_count, - cp1 - start, - cp2 - cp1, - cp3 - cp2, - )) - + + logging.debug( + " ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" + % ( + refresh_count, + inactive_count, + old_count, + cp1 - start, + cp2 - cp1, + cp3 - cp2, + ) + ) + Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) - - logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - int((time.time() - start)), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) - -@app.task(name='update-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True) + + logging.debug( + " ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % ( + int((time.time() - start)), + r.zcard("tasked_feeds"), + r.scard("queued_feeds"), + r.zcard("scheduled_updates"), + ) + ) + + +@app.task(name="update-feeds", time_limit=10 * 60, soft_time_limit=9 * 60, ignore_result=True) def UpdateFeeds(feed_pks): from apps.rss_feeds.models import Feed from apps.statistics.models import MStatistics + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) + mongodb_replication_lag = int(MStatistics.get("mongodb_replication_lag", 0)) compute_scores = bool(mongodb_replication_lag < 10) - + profiler = DBProfilerMiddleware() profiler_activated = profiler.process_celery() if profiler_activated: settings.MONGO_COMMAND_LOGGER.process_celery(profiler) redis_middleware = RedisDumpMiddleware() redis_middleware.process_celery(profiler) - + options = { - 'quick': float(MStatistics.get('quick_fetch', 0)), - 'updates_off': MStatistics.get('updates_off', False), - 'compute_scores': compute_scores, - 'mongodb_replication_lag': mongodb_replication_lag, + "quick": float(MStatistics.get("quick_fetch", 0)), + "updates_off": MStatistics.get("updates_off", False), + "compute_scores": compute_scores, + "mongodb_replication_lag": mongodb_replication_lag, } - + if not isinstance(feed_pks, list): feed_pks = [feed_pks] - + for feed_pk in feed_pks: feed = Feed.get_by_id(feed_pk) if not feed or feed.pk != int(feed_pk): - logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk)) - r.zrem('tasked_feeds', feed_pk) + logging.info( + " ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." + % (feed_pk, feed and feed.pk) + ) + r.zrem("tasked_feeds", feed_pk) if not feed: continue try: feed.update(**options) except SoftTimeLimitExceeded as e: - feed.save_feed_history(505, 'Timeout', e) + feed.save_feed_history(505, "Timeout", e) logging.info(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed) - if profiler_activated: profiler.process_celery_finished() + if profiler_activated: + profiler.process_celery_finished() -@app.task(name='new-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True) + +@app.task(name="new-feeds", time_limit=10 * 60, soft_time_limit=9 * 60, ignore_result=True) def NewFeeds(feed_pks): from apps.rss_feeds.models import Feed + if not isinstance(feed_pks, list): feed_pks = [feed_pks] - + options = {} for feed_pk in feed_pks: feed = Feed.get_by_id(feed_pk) - if not feed: continue + if not feed: + continue feed.update(options=options) -@app.task(name='push-feeds', ignore_result=True) + +@app.task(name="push-feeds", ignore_result=True) def PushFeeds(feed_id, xml): from apps.rss_feeds.models import Feed from apps.statistics.models import MStatistics - - mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) + + mongodb_replication_lag = int(MStatistics.get("mongodb_replication_lag", 0)) compute_scores = bool(mongodb_replication_lag < 60) - + options = { - 'feed_xml': xml, - 'compute_scores': compute_scores, - 'mongodb_replication_lag': mongodb_replication_lag, + "feed_xml": xml, + "compute_scores": compute_scores, + "mongodb_replication_lag": mongodb_replication_lag, } feed = Feed.get_by_id(feed_id) if feed: feed.update(options=options) + @app.task() def ScheduleImmediateFetches(feed_ids, user_id=None): from apps.rss_feeds.models import Feed - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - + Feed.schedule_feed_fetches_immediately(feed_ids, user_id=user_id) @app.task() def SchedulePremiumSetup(feed_ids): from apps.rss_feeds.models import Feed - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - + Feed.setup_feeds_for_premium_subscribers(feed_ids) - + + @app.task() def ScheduleCountTagsForUser(user_id): from apps.rss_feeds.models import MStarredStoryCounts - + MStarredStoryCounts.count_for_user(user_id) diff --git a/apps/rss_feeds/test_rss_feeds.py b/apps/rss_feeds/test_rss_feeds.py index 8e3ca41e2..6ad0dc764 100644 --- a/apps/rss_feeds/test_rss_feeds.py +++ b/apps/rss_feeds/test_rss_feeds.py @@ -10,31 +10,34 @@ from mongoengine.connection import connect, disconnect class Test_Feed(TestCase): - - fixtures = ['initial_data.json'] + fixtures = ["initial_data.json"] def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') - settings.REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=settings.REDIS_STORY['host'], port=6379, db=10) - settings.REDIS_FEED_READ_POOL = redis.ConnectionPool(host=settings.REDIS_SESSIONS['host'], port=6379, db=10) + settings.MONGODB = connect("test_newsblur") + settings.REDIS_STORY_HASH_POOL = redis.ConnectionPool( + host=settings.REDIS_STORY["host"], port=6379, db=10 + ) + settings.REDIS_FEED_READ_POOL = redis.ConnectionPool( + host=settings.REDIS_SESSIONS["host"], port=6379, db=10 + ) r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - r.delete('RS:1') - r.delete('lRS:1') - r.delete('RS:1:766') - r.delete('zF:766') - r.delete('F:766') - + r.delete("RS:1") + r.delete("lRS:1") + r.delete("RS:1:766") + r.delete("zF:766") + r.delete("F:766") + self.client = Client() def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') + settings.MONGODB.drop_database("test_newsblur") def test_load_feeds__gawker(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'gawker1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gawker1.json", verbosity=0, skip_checks=False) feed = Feed.objects.get(pk=10) stories = MStory.objects(story_feed_id=feed.pk) @@ -45,7 +48,7 @@ class Test_Feed(TestCase): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - management.call_command('loaddata', 'gawker2.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gawker2.json", verbosity=0, skip_checks=False) feed.update(force=True) @@ -53,16 +56,16 @@ class Test_Feed(TestCase): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - url = reverse('load-single-feed', kwargs=dict(feed_id=10)) + url = reverse("load-single-feed", kwargs=dict(feed_id=10)) response = self.client.get(url) feed = json.decode(response.content) - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) def test_load_feeds__gothamist(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'gothamist_aug_2009_1.json', verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='gothamist') + management.call_command("loaddata", "gothamist_aug_2009_1.json", verbosity=0, skip_checks=False) + feed = Feed.objects.get(feed_link__contains="gothamist") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) @@ -71,177 +74,179 @@ class Test_Feed(TestCase): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 42) - url = reverse('load-single-feed', kwargs=dict(feed_id=4)) + url = reverse("load-single-feed", kwargs=dict(feed_id=4)) response = self.client.get(url) content = json.decode(response.content) - self.assertEqual(len(content['stories']), 6) + self.assertEqual(len(content["stories"]), 6) - management.call_command('loaddata', 'gothamist_aug_2009_2.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gothamist_aug_2009_2.json", verbosity=0, skip_checks=False) feed.update(force=True) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 42) - url = reverse('load-single-feed', kwargs=dict(feed_id=4)) + url = reverse("load-single-feed", kwargs=dict(feed_id=4)) response = self.client.get(url) # print [c['story_title'] for c in json.decode(response.content)] content = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(content['stories']), 6) + self.assertEqual(len(content["stories"]), 6) def test_load_feeds__slashdot(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") old_story_guid = "tag:google.com,2005:reader/item/4528442633bc7b2b" - management.call_command('loaddata', 'slashdot1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "slashdot1.json", verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='slashdot') + feed = Feed.objects.get(feed_link__contains="slashdot") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=1, feed=5, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=5, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 38) + self.assertEqual(content["feeds"]["5"]["nt"], 38) - self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 5}) + self.client.post(reverse("mark-story-as-read"), {"story_id": old_story_guid, "feed_id": 5}) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 37) + self.assertEqual(content["feeds"]["5"]["nt"], 37) - management.call_command('loaddata', 'slashdot2.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=5, daemonize=False, skip_checks=False) + management.call_command("loaddata", "slashdot2.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=5, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - url = reverse('load-single-feed', kwargs=dict(feed_id=5)) + url = reverse("load-single-feed", kwargs=dict(feed_id=5)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 37) + self.assertEqual(content["feeds"]["5"]["nt"], 37) def test_load_feeds__motherjones(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'motherjones1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "motherjones1.json", verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='motherjones') + feed = Feed.objects.get(feed_link__contains="motherjones") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=1, feed=feed.pk, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=feed.pk, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 10) - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed.pk)]['nt'], 10) + self.assertEqual(content["feeds"][str(feed.pk)]["nt"], 10) - self.client.post(reverse('mark-story-as-read'), {'story_id': stories[0].story_guid, 'feed_id': feed.pk}) + self.client.post( + reverse("mark-story-as-read"), {"story_id": stories[0].story_guid, "feed_id": feed.pk} + ) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed.pk)]['nt'], 9) + self.assertEqual(content["feeds"][str(feed.pk)]["nt"], 9) - management.call_command('loaddata', 'motherjones2.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=feed.pk, daemonize=False, skip_checks=False) + management.call_command("loaddata", "motherjones2.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=feed.pk, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 10) - url = reverse('load-single-feed', kwargs=dict(feed_id=feed.pk)) + url = reverse("load-single-feed", kwargs=dict(feed_id=feed.pk)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed['feed_id'])]['nt'], 9) + self.assertEqual(content["feeds"][str(feed["feed_id"])]["nt"], 9) def test_load_feeds__google(self): # Freezegun the date to 2017-04-30 - - self.client.login(username='conesus', password='test') + + self.client.login(username="conesus", password="test") old_story_guid = "blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/" - management.call_command('loaddata', 'google1.json', verbosity=1, skip_checks=False) + management.call_command("loaddata", "google1.json", verbosity=1, skip_checks=False) print((Feed.objects.all())) feed = Feed.objects.get(pk=766) print((" Testing test_load_feeds__google: %s" % feed)) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=False, feed=766, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=False, feed=766, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 20) - response = self.client.get(reverse('load-feeds')+"?update_counts=true") + response = self.client.get(reverse("load-feeds") + "?update_counts=true") content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 20) + self.assertEqual(content["feeds"]["766"]["nt"], 20) old_story = MStory.objects.get(story_feed_id=feed.pk, story_guid__contains=old_story_guid) - self.client.post(reverse('mark-story-hashes-as-read'), {'story_hash': old_story.story_hash}) + self.client.post(reverse("mark-story-hashes-as-read"), {"story_hash": old_story.story_hash}) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 19) + self.assertEqual(content["feeds"]["766"]["nt"], 19) - management.call_command('loaddata', 'google2.json', verbosity=1, skip_checks=False) - management.call_command('refresh_feed', force=False, feed=766, daemonize=False, skip_checks=False) + management.call_command("loaddata", "google2.json", verbosity=1, skip_checks=False) + management.call_command("refresh_feed", force=False, feed=766, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 20) - url = reverse('load-single-feed', kwargs=dict(feed_id=766)) + url = reverse("load-single-feed", kwargs=dict(feed_id=766)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 19) - + self.assertEqual(content["feeds"]["766"]["nt"], 19) + def test_load_feeds__brokelyn__invalid_xml(self): BROKELYN_FEED_ID = 16 - self.client.login(username='conesus', password='test') - management.call_command('loaddata', 'brokelyn.json', verbosity=0) + self.client.login(username="conesus", password="test") + management.call_command("loaddata", "brokelyn.json", verbosity=0) self.assertEquals(Feed.objects.get(pk=BROKELYN_FEED_ID).pk, BROKELYN_FEED_ID) - management.call_command('refresh_feed', force=1, feed=BROKELYN_FEED_ID, daemonize=False) + management.call_command("refresh_feed", force=1, feed=BROKELYN_FEED_ID, daemonize=False) - management.call_command('loaddata', 'brokelyn.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=16, daemonize=False, skip_checks=False) + management.call_command("loaddata", "brokelyn.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=16, daemonize=False, skip_checks=False) - url = reverse('load-single-feed', kwargs=dict(feed_id=BROKELYN_FEED_ID)) + url = reverse("load-single-feed", kwargs=dict(feed_id=BROKELYN_FEED_ID)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) def test_all_feeds(self): pass diff --git a/apps/rss_feeds/text_importer.py b/apps/rss_feeds/text_importer.py index d89d8af05..1f2771428 100644 --- a/apps/rss_feeds/text_importer.py +++ b/apps/rss_feeds/text_importer.py @@ -18,15 +18,14 @@ from django.utils.encoding import smart_bytes from django.contrib.sites.models import Site from bs4 import BeautifulSoup from urllib.parse import urljoin - + BROKEN_URLS = [ "gamespot.com", - 'thedailyskip.com', + "thedailyskip.com", ] class TextImporter: - def __init__(self, story=None, feed=None, story_url=None, request=None, debug=False): self.story = story self.story_url = story_url @@ -38,31 +37,36 @@ class TextImporter: @property def headers(self): - num_subscribers = getattr(self.feed, 'num_subscribers', 0) + num_subscribers = getattr(self.feed, "num_subscribers", 0) return { - 'User-Agent': 'NewsBlur Content Fetcher - %s subscriber%s - %s %s' % ( - num_subscribers, - 's' if num_subscribers != 1 else '', - getattr(self.feed, 'permalink', ''), - getattr(self.feed, 'fake_user_agent', ''), - ), + "User-Agent": "NewsBlur Content Fetcher - %s subscriber%s - %s %s" + % ( + num_subscribers, + "s" if num_subscribers != 1 else "", + getattr(self.feed, "permalink", ""), + getattr(self.feed, "fake_user_agent", ""), + ), } def fetch(self, skip_save=False, return_document=False, use_mercury=True): if self.story_url and any(broken_url in self.story_url for broken_url in BROKEN_URLS): logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: banned") return - + if use_mercury: results = self.fetch_mercury(skip_save=skip_save, return_document=return_document) - + if not use_mercury or not results: - logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", warn_color=False) + logging.user( + self.request, + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", + warn_color=False, + ) results = self.fetch_manually(skip_save=skip_save, return_document=return_document) - + return results - + def fetch_mercury(self, skip_save=False, return_document=False): try: resp = self.fetch_request(use_mercury=True) @@ -72,29 +76,35 @@ class TextImporter: except requests.exceptions.TooManyRedirects: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: too many redirects") resp = None - + if not resp: return - + try: doc = resp.json() except JSONDecodeError: doc = None - if not doc or doc.get('error', False): - logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % (doc and doc.get('messages', None) or "[unknown mercury error]")) + if not doc or doc.get("error", False): + logging.user( + self.request, + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" + % (doc and doc.get("messages", None) or "[unknown mercury error]"), + ) return - - text = doc['content'] - title = doc['title'] - url = doc['url'] - image = doc['lead_image_url'] - - if image and ('http://' in image[1:] or 'https://' in image[1:]): + + text = doc["content"] + title = doc["title"] + url = doc["url"] + image = doc["lead_image_url"] + + if image and ("http://" in image[1:] or "https://" in image[1:]): logging.user(self.request, "~SN~FRRemoving broken image from text: %s" % image) image = None - - return self.process_content(text, title, url, image, skip_save=skip_save, return_document=return_document) - + + return self.process_content( + text, title, url, image, skip_save=skip_save, return_document=return_document + ) + def fetch_manually(self, skip_save=False, return_document=False): try: resp = self.fetch_request(use_mercury=False) @@ -115,15 +125,16 @@ class TextImporter: except (LookupError, TypeError): text = resp.content return text + try: text = extract_text(resp) except TimeoutError: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: timed out on resp.text") return - + # if self.debug: # logging.user(self.request, "~FBOriginal text's website: %s" % text) - + # if resp.encoding and resp.encoding != 'utf-8': # try: # text = text.encode(resp.encoding) @@ -131,11 +142,12 @@ class TextImporter: # pass if text: - text = text.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - text = text.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + text = text.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + text = text.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - original_text_doc = readability.Document(text, url=resp.url, - positive_keywords="post, entry, postProp, article, postContent, postField") + original_text_doc = readability.Document( + text, url=resp.url, positive_keywords="post, entry, postProp, article, postContent, postField" + ) try: content = original_text_doc.summary(html_partial=True) except (ParserError, Unparseable) as e: @@ -148,18 +160,29 @@ class TextImporter: title = "" url = resp.url - - return self.process_content(content, title, url, image=None, skip_save=skip_save, return_document=return_document, - original_text_doc=original_text_doc) - - def process_content(self, content, title, url, image, skip_save=False, return_document=False, original_text_doc=None): - original_story_content = self.story and self.story.story_content_z and zlib.decompress(self.story.story_content_z) + + return self.process_content( + content, + title, + url, + image=None, + skip_save=skip_save, + return_document=return_document, + original_text_doc=original_text_doc, + ) + + def process_content( + self, content, title, url, image, skip_save=False, return_document=False, original_text_doc=None + ): + original_story_content = ( + self.story and self.story.story_content_z and zlib.decompress(self.story.story_content_z) + ) if not original_story_content: original_story_content = "" story_image_urls = self.story and self.story.image_urls if not story_image_urls: story_image_urls = [] - + content = self.add_hero_image(content, story_image_urls) if content: content = self.rewrite_content(content) @@ -169,25 +192,36 @@ class TextImporter: full_content_is_longer = True elif len(content) > len(original_story_content): full_content_is_longer = True - + if content and full_content_is_longer: if self.story and not skip_save: self.story.original_text_z = zlib.compress(smart_bytes(content)) try: self.story.save() except NotUniqueError as e: - logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: %s" % (e)), warn_color=False) + logging.user( + self.request, ("~SN~FYFetched ~FGoriginal text~FY: %s" % (e)), warn_color=False + ) pass - logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" % ( - len(content), - len(original_story_content) - )), warn_color=False) + logging.user( + self.request, + ( + "~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" + % (len(content), len(original_story_content)) + ), + warn_color=False, + ) else: - logging.user(self.request, ("~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" % ( - len(original_story_content) - )), warn_color=False) + logging.user( + self.request, + ( + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" + % (len(original_story_content)) + ), + warn_color=False, + ) return - + if return_document: return dict(content=content, title=title, url=url, doc=original_text_doc, image=image) @@ -195,21 +229,22 @@ class TextImporter: def add_hero_image(self, content, image_urls): # Need to have images in the original story to add to the text that may not have any images - if not len(image_urls): + if not len(image_urls): return content - + content_soup = BeautifulSoup(content, features="lxml") - content_imgs = content_soup.findAll('img') + content_imgs = content_soup.findAll("img") for img in content_imgs: # Since NewsBlur proxies all http images over https, the url can change, so acknowledge urls # that are https on the original text but http on the feed - if not img.get('src'): continue - if img.get('src') in image_urls: - image_urls.remove(img.get('src')) - elif img.get('src').replace('https:', 'http:') in image_urls: - image_urls.remove(img.get('src').replace('https:', 'http:')) - + if not img.get("src"): + continue + if img.get("src") in image_urls: + image_urls.remove(img.get("src")) + elif img.get("src").replace("https:", "http:") in image_urls: + image_urls.remove(img.get("src").replace("https:", "http:")) + if len(image_urls): image_content = f'' content = f"{image_content}\n {content}" @@ -218,48 +253,55 @@ class TextImporter: def rewrite_content(self, content): soup = BeautifulSoup(content, features="lxml") - - for noscript in soup.findAll('noscript'): + + for noscript in soup.findAll("noscript"): if len(noscript.contents) > 0: noscript.replaceWith(noscript.contents[0]) - + content = str(soup) - - images = set([img.attrs['src'] for img in soup.findAll('img') if 'src' in img.attrs]) + + images = set([img.attrs["src"] for img in soup.findAll("img") if "src" in img.attrs]) for image_url in images: abs_image_url = urljoin(self.story_url, image_url) content = content.replace(image_url, abs_image_url) - + return content - + @timelimit(10) def fetch_request(self, use_mercury=True): headers = self.headers url = self.story_url - + if use_mercury: - mercury_api_key = getattr(settings, 'MERCURY_PARSER_API_KEY', 'abc123') + mercury_api_key = getattr(settings, "MERCURY_PARSER_API_KEY", "abc123") headers["content-type"] = "application/json" headers["x-api-key"] = mercury_api_key domain = Site.objects.get_current().domain protocol = "https" if settings.DOCKERBUILD: - domain = 'haproxy' + domain = "haproxy" protocol = "http" url = f"{protocol}://{domain}/rss_feeds/original_text_fetcher?url={url}" - + try: r = requests.get(url, headers=headers, timeout=15) r.connection.close() - except (AttributeError, SocketError, requests.ConnectionError, - requests.models.MissingSchema, requests.sessions.InvalidSchema, - requests.sessions.TooManyRedirects, - requests.models.InvalidURL, - requests.models.ChunkedEncodingError, - requests.models.ContentDecodingError, - requests.adapters.ReadTimeout, - urllib3.exceptions.LocationValueError, - LocationParseError, OpenSSLError, PyAsn1Error) as e: + except ( + AttributeError, + SocketError, + requests.ConnectionError, + requests.models.MissingSchema, + requests.sessions.InvalidSchema, + requests.sessions.TooManyRedirects, + requests.models.InvalidURL, + requests.models.ChunkedEncodingError, + requests.models.ContentDecodingError, + requests.adapters.ReadTimeout, + urllib3.exceptions.LocationValueError, + LocationParseError, + OpenSSLError, + PyAsn1Error, + ) as e: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % e) return return r diff --git a/apps/rss_feeds/urls.py b/apps/rss_feeds/urls.py index f1d7e1f7f..890b0c814 100644 --- a/apps/rss_feeds/urls.py +++ b/apps/rss_feeds/urls.py @@ -2,19 +2,27 @@ from django.conf.urls import url from apps.rss_feeds import views urlpatterns = [ - url(r'^feed_autocomplete', views.feed_autocomplete, name='feed-autocomplete'), - url(r'^search_feed', views.search_feed, name='search-feed'), - url(r'^statistics/(?P\d+)', views.load_feed_statistics, name='feed-statistics'), - url(r'^statistics_embedded/(?P\d+)', views.load_feed_statistics_embedded, name='feed-statistics-embedded'), - url(r'^feed_settings/(?P\d+)', views.load_feed_settings, name='feed-settings'), - url(r'^feed/(?P\d+)/?', views.load_single_feed, name='feed-info'), - url(r'^icon/(?P\d+)/?', views.load_feed_favicon, name='feed-favicon'), - url(r'^exception_retry', views.exception_retry, name='exception-retry'), - url(r'^exception_change_feed_address', views.exception_change_feed_address, name='exception-change-feed-address'), - url(r'^exception_change_feed_link', views.exception_change_feed_link, name='exception-change-feed-link'), - url(r'^status', views.status, name='status'), - url(r'^load_single_feed', views.load_single_feed, name='feed-canonical'), - url(r'^original_text', views.original_text, name='original-text'), - url(r'^original_story', views.original_story, name='original-story'), - url(r'^story_changes', views.story_changes, name='story-changes'), + url(r"^feed_autocomplete", views.feed_autocomplete, name="feed-autocomplete"), + url(r"^search_feed", views.search_feed, name="search-feed"), + url(r"^statistics/(?P\d+)", views.load_feed_statistics, name="feed-statistics"), + url( + r"^statistics_embedded/(?P\d+)", + views.load_feed_statistics_embedded, + name="feed-statistics-embedded", + ), + url(r"^feed_settings/(?P\d+)", views.load_feed_settings, name="feed-settings"), + url(r"^feed/(?P\d+)/?", views.load_single_feed, name="feed-info"), + url(r"^icon/(?P\d+)/?", views.load_feed_favicon, name="feed-favicon"), + url(r"^exception_retry", views.exception_retry, name="exception-retry"), + url( + r"^exception_change_feed_address", + views.exception_change_feed_address, + name="exception-change-feed-address", + ), + url(r"^exception_change_feed_link", views.exception_change_feed_link, name="exception-change-feed-link"), + url(r"^status", views.status, name="status"), + url(r"^load_single_feed", views.load_single_feed, name="feed-canonical"), + url(r"^original_text", views.original_text, name="original-text"), + url(r"^original_story", views.original_story, name="original-story"), + url(r"^story_changes", views.story_changes, name="story-changes"), ] diff --git a/apps/rss_feeds/views.py b/apps/rss_feeds/views.py index 91a916b4a..5a062a6e5 100644 --- a/apps/rss_feeds/views.py +++ b/apps/rss_feeds/views.py @@ -9,6 +9,7 @@ from django.http import HttpResponseForbidden, HttpResponseRedirect, HttpRespons from django.conf import settings from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User + # from django.db import IntegrityError from apps.rss_feeds.models import Feed, merge_feeds from apps.rss_feeds.models import MFetchHistory @@ -35,16 +36,17 @@ IGNORE_AUTOCOMPLETE = [ "latitude", ] + @ajax_login_required @json.json_view def search_feed(request): - address = request.GET.get('address') - offset = int(request.GET.get('offset', 0)) + address = request.GET.get("address") + offset = int(request.GET.get("offset", 0)) if not address: return dict(code=-1, message="Please provide a URL/address.") - + logging.user(request.user, "~FBFinding feed (search_feed): %s" % address) - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] logging.user(request.user, "~FBIP: %s" % ip) aggressive = request.user.is_authenticated feed = Feed.get_feed_from_url(address, create=False, aggressive=aggressive, offset=offset) @@ -52,7 +54,8 @@ def search_feed(request): return feed.canonical() else: return dict(code=-1, message="No feed found matching that XML or website address.") - + + @json.json_view def load_single_feed(request, feed_id): user = get_user(request) @@ -60,18 +63,20 @@ def load_single_feed(request, feed_id): classifiers = get_classifiers_for_user(user, feed_id=feed.pk) payload = feed.canonical(full=True) - payload['classifiers'] = classifiers + payload["classifiers"] = classifiers return payload + def feed_favicon_etag(request, feed_id): try: feed_icon = MFeedIcon.objects.get(feed_id=feed_id) except MFeedIcon.DoesNotExist: return - + return feed_icon.color - + + @condition(etag_func=feed_favicon_etag) def load_feed_favicon(request, feed_id): not_found = False @@ -80,112 +85,126 @@ def load_feed_favicon(request, feed_id): except MFeedIcon.DoesNotExist: logging.user(request, "~FBNo feed icon found: %s" % feed_id) not_found = True - + if not_found or not feed_icon.data: - return HttpResponseRedirect(settings.MEDIA_URL + 'img/icons/nouns/world.svg') - + return HttpResponseRedirect(settings.MEDIA_URL + "img/icons/nouns/world.svg") + icon_data = base64.b64decode(feed_icon.data) - return HttpResponse(icon_data, content_type='image/png') + return HttpResponse(icon_data, content_type="image/png") + @json.json_view def feed_autocomplete(request): - query = request.GET.get('term') or request.GET.get('query') - version = int(request.GET.get('v', 1)) - autocomplete_format = request.GET.get('format', 'autocomplete') - + query = request.GET.get("term") or request.GET.get("query") + version = int(request.GET.get("v", 1)) + autocomplete_format = request.GET.get("format", "autocomplete") + # user = get_user(request) # if True or not user.profile.is_premium: # return dict(code=-1, message="Overloaded, no autocomplete results.", feeds=[], term=query) - + if not query: return dict(code=-1, message="Specify a search 'term'.", feeds=[], term=query) - - if '.' in query: + + if "." in query: try: parts = urlparse(query) - if not parts.hostname and not query.startswith('http'): - parts = urlparse('http://%s' % query) + if not parts.hostname and not query.startswith("http"): + parts = urlparse("http://%s" % query) if parts.hostname: query = [parts.hostname] - query.extend([p for p in parts.path.split('/') if p]) - query = ' '.join(query) + query.extend([p for p in parts.path.split("/") if p]) + query = " ".join(query) except: logging.user(request, "~FGAdd search, could not parse url in ~FR%s" % query) - - query_params = query.split(' ') + + query_params = query.split(" ") tries_left = 5 while len(query_params) and tries_left: tries_left -= 1 - feed_ids = Feed.autocomplete(' '.join(query_params)) + feed_ids = Feed.autocomplete(" ".join(query_params)) if feed_ids: break else: query_params = query_params[:-1] - + feeds = list(set([Feed.get_by_id(feed_id) for feed_id in feed_ids])) feeds = [feed for feed in feeds if feed and not feed.branch_from_feed] feeds = [feed for feed in feeds if all([x not in feed.feed_address for x in IGNORE_AUTOCOMPLETE])] - - if autocomplete_format == 'autocomplete': - feeds = [{ - 'id': feed.pk, - 'value': feed.feed_address, - 'label': feed.feed_title, - 'tagline': feed.data and feed.data.feed_tagline, - 'num_subscribers': feed.num_subscribers, - } for feed in feeds] + + if autocomplete_format == "autocomplete": + feeds = [ + { + "id": feed.pk, + "value": feed.feed_address, + "label": feed.feed_title, + "tagline": feed.data and feed.data.feed_tagline, + "num_subscribers": feed.num_subscribers, + } + for feed in feeds + ] else: feeds = [feed.canonical(full=True) for feed in feeds] - feeds = sorted(feeds, key=lambda f: -1 * f['num_subscribers']) - - feed_ids = [f['id'] for f in feeds] - feed_icons = dict((icon.feed_id, icon) for icon in MFeedIcon.objects.filter(feed_id__in=feed_ids)) - - for feed in feeds: - if feed['id'] in feed_icons: - feed_icon = feed_icons[feed['id']] - if feed_icon.data: - feed['favicon_color'] = feed_icon.color - feed['favicon'] = feed_icon.data + feeds = sorted(feeds, key=lambda f: -1 * f["num_subscribers"]) + + feed_ids = [f["id"] for f in feeds] + feed_icons = dict((icon.feed_id, icon) for icon in MFeedIcon.objects.filter(feed_id__in=feed_ids)) + + for feed in feeds: + if feed["id"] in feed_icons: + feed_icon = feed_icons[feed["id"]] + if feed_icon.data: + feed["favicon_color"] = feed_icon.color + feed["favicon"] = feed_icon.data + + logging.user( + request, + "~FGAdd Search: ~SB%s ~SN(%s matches)" + % ( + query, + len(feeds), + ), + ) - logging.user(request, "~FGAdd Search: ~SB%s ~SN(%s matches)" % (query, len(feeds),)) - if version > 1: return { - 'feeds': feeds, - 'term': query, + "feeds": feeds, + "term": query, } else: return feeds - + + @ratelimit(minutes=1, requests=30) @json.json_view def load_feed_statistics(request, feed_id): user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) stats = assemble_statistics(user, feed_id) - + logging.user(request, "~FBStatistics: ~SB%s" % (feed)) return stats + def load_feed_statistics_embedded(request, feed_id): user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) stats = assemble_statistics(user, feed_id) - + logging.user(request, "~FBStatistics (~FCembedded~FB): ~SB%s" % (feed)) - + return render( request, - 'rss_feeds/statistics.xhtml', + "rss_feeds/statistics.xhtml", { - 'stats': json.json_encode(stats), - 'feed_js': json.json_encode(feed.canonical()), - 'feed': feed, - } + "stats": json.json_encode(stats), + "feed_js": json.json_encode(feed.canonical()), + "feed": feed, + }, ) + def assemble_statistics(user, feed_id): user_timezone = user.profile.timezone stats = dict() @@ -194,76 +213,82 @@ def assemble_statistics(user, feed_id): feed.set_next_scheduled_update(verbose=True, skip_scheduling=True) feed.save_feed_story_history_statistics() feed.save_classifier_counts() - + # Dates of last and next update - stats['active'] = feed.active - stats['last_update'] = relative_timesince(feed.last_update) - stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) - stats['push'] = feed.is_push - stats['fs_size_bytes'] = feed.fs_size_bytes - stats['archive_count'] = feed.archive_count + stats["active"] = feed.active + stats["last_update"] = relative_timesince(feed.last_update) + stats["next_update"] = relative_timeuntil(feed.next_scheduled_update) + stats["push"] = feed.is_push + stats["fs_size_bytes"] = feed.fs_size_bytes + stats["archive_count"] = feed.archive_count if feed.is_push: try: - stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires, - user_timezone).strftime("%Y-%m-%d %H:%M:%S") + stats["push_expires"] = localtime_for_timezone(feed.push.lease_expires, user_timezone).strftime( + "%Y-%m-%d %H:%M:%S" + ) except PushSubscription.DoesNotExist: - stats['push_expires'] = 'Missing push' + stats["push_expires"] = "Missing push" feed.is_push = False feed.save() # Minutes between updates update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False) - stats['update_interval_minutes'] = update_interval_minutes + stats["update_interval_minutes"] = update_interval_minutes original_active_premium_subscribers = feed.active_premium_subscribers original_premium_subscribers = feed.premium_subscribers - feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1) + feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1) feed.premium_subscribers += 1 - premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False, - premium_speed=True) + premium_update_interval_minutes = feed.get_next_scheduled_update( + force=True, verbose=False, premium_speed=True + ) feed.active_premium_subscribers = original_active_premium_subscribers feed.premium_subscribers = original_premium_subscribers - stats['premium_update_interval_minutes'] = premium_update_interval_minutes - stats['errors_since_good'] = feed.errors_since_good - + stats["premium_update_interval_minutes"] = premium_update_interval_minutes + stats["errors_since_good"] = feed.errors_since_good + # Stories per month - average and month-by-month breakout - average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history - stats['average_stories_per_month'] = average_stories_per_month + average_stories_per_month, story_count_history = ( + feed.average_stories_per_month, + feed.data.story_count_history, + ) + stats["average_stories_per_month"] = average_stories_per_month story_count_history = story_count_history and json.decode(story_count_history) if story_count_history and isinstance(story_count_history, dict): - stats['story_count_history'] = story_count_history['months'] - stats['story_days_history'] = story_count_history['days'] - stats['story_hours_history'] = story_count_history['hours'] + stats["story_count_history"] = story_count_history["months"] + stats["story_days_history"] = story_count_history["days"] + stats["story_hours_history"] = story_count_history["hours"] else: - stats['story_count_history'] = story_count_history - + stats["story_count_history"] = story_count_history + # Rotate hours to match user's timezone offset localoffset = user_timezone.utcoffset(datetime.datetime.utcnow()) hours_offset = int(localoffset.total_seconds() / 3600) rotated_hours = {} - for hour, value in list(stats['story_hours_history'].items()): - rotated_hours[str(int(hour)+hours_offset)] = value - stats['story_hours_history'] = rotated_hours - + for hour, value in list(stats["story_hours_history"].items()): + rotated_hours[str(int(hour) + hours_offset)] = value + stats["story_hours_history"] = rotated_hours + # Subscribers - stats['subscriber_count'] = feed.num_subscribers - stats['num_subscribers'] = feed.num_subscribers - stats['stories_last_month'] = feed.stories_last_month - stats['last_load_time'] = feed.last_load_time - stats['premium_subscribers'] = feed.premium_subscribers - stats['active_subscribers'] = feed.active_subscribers - stats['active_premium_subscribers'] = feed.active_premium_subscribers + stats["subscriber_count"] = feed.num_subscribers + stats["num_subscribers"] = feed.num_subscribers + stats["stories_last_month"] = feed.stories_last_month + stats["last_load_time"] = feed.last_load_time + stats["premium_subscribers"] = feed.premium_subscribers + stats["active_subscribers"] = feed.active_subscribers + stats["active_premium_subscribers"] = feed.active_premium_subscribers # Classifier counts - stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts) - + stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts) + # Fetch histories fetch_history = MFetchHistory.feed(feed_id, timezone=user_timezone) - stats['feed_fetch_history'] = fetch_history['feed_fetch_history'] - stats['page_fetch_history'] = fetch_history['page_fetch_history'] - stats['feed_push_history'] = fetch_history['push_history'] - + stats["feed_fetch_history"] = fetch_history["feed_fetch_history"] + stats["page_fetch_history"] = fetch_history["page_fetch_history"] + stats["feed_push_history"] = fetch_history["push_history"] + return stats + @json.json_view def load_feed_settings(request, feed_id): stats = dict() @@ -272,25 +297,26 @@ def load_feed_settings(request, feed_id): timezone = user.profile.timezone fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) - stats['feed_fetch_history'] = fetch_history['feed_fetch_history'] - stats['page_fetch_history'] = fetch_history['page_fetch_history'] - stats['feed_push_history'] = fetch_history['push_history'] - stats['duplicate_addresses'] = feed.duplicate_addresses.all() - + stats["feed_fetch_history"] = fetch_history["feed_fetch_history"] + stats["page_fetch_history"] = fetch_history["page_fetch_history"] + stats["feed_push_history"] = fetch_history["push_history"] + stats["duplicate_addresses"] = feed.duplicate_addresses.all() + return stats + @ratelimit(minutes=1, requests=30) @json.json_view def exception_retry(request): user = get_user(request) - feed_id = get_argument_or_404(request, 'feed_id') - reset_fetch = json.decode(request.POST['reset_fetch']) + feed_id = get_argument_or_404(request, "feed_id") + reset_fetch = json.decode(request.POST["reset_fetch"]) feed = Feed.get_by_id(feed_id) original_feed = feed - + if not feed: raise Http404 - + feed.schedule_feed_fetch_immediately() changed = False if feed.has_page_exception: @@ -303,18 +329,18 @@ def exception_retry(request): changed = True feed.active = True if changed: - feed.save(update_fields=['has_page_exception', 'has_feed_exception', 'active']) - + feed.save(update_fields=["has_page_exception", "has_feed_exception", "active"]) + original_fetched_once = feed.fetched_once if reset_fetch: logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed)) feed.fetched_once = False else: logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed)) - + feed.fetched_once = True if feed.fetched_once != original_fetched_once: - feed.save(update_fields=['fetched_once']) + feed.save(update_fields=["fetched_once"]) feed = feed.update(force=True, compute_scores=False, verbose=True) feed = Feed.get_by_id(feed.pk) @@ -327,26 +353,30 @@ def exception_retry(request): usersub = usersubs[0] usersub.switch_feed(feed, original_feed) else: - return {'code': -1} + return {"code": -1} usersub.calculate_feed_scores(silent=False) - + feeds = {feed.pk: usersub and usersub.canonical(full=True), feed_id: usersub.canonical(full=True)} - return {'code': 1, 'feeds': feeds} - - + return {"code": 1, "feeds": feeds} + + @ajax_login_required @json.json_view def exception_change_feed_address(request): - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed - feed_address = request.POST['feed_address'] + feed_address = request.POST["feed_address"] timezone = request.user.profile.timezone code = -1 if False and (feed.has_page_exception or feed.has_feed_exception): # Fix broken feed - logging.user(request, "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" % (feed, feed.feed_address, feed_address)) + logging.user( + request, + "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" + % (feed, feed.feed_address, feed_address), + ) feed.has_feed_exception = False feed.active = True feed.fetched_once = False @@ -364,9 +394,13 @@ def exception_change_feed_address(request): merge_feeds(new_feed.pk, feed.pk) else: # Branch good feed - logging.user(request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address)) + logging.user( + request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address) + ) try: - feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link)) + feed = Feed.objects.get( + hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link) + ) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link=feed.feed_link) code = 1 @@ -390,47 +424,50 @@ def exception_change_feed_address(request): else: fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': -1, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": -1, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } usersub.calculate_feed_scores(silent=False) - + feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id) - + feeds = { - original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers), + original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers), } - + if feed and feed.has_feed_exception: code = -1 fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': code, - 'feeds': feeds, - 'new_feed_id': usersub.feed_id, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": code, + "feeds": feeds, + "new_feed_id": usersub.feed_id, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } - + + @ajax_login_required @json.json_view def exception_change_feed_link(request): - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed - feed_link = request.POST['feed_link'] + feed_link = request.POST["feed_link"] timezone = request.user.profile.timezone code = -1 - + if False and (feed.has_page_exception or feed.has_feed_exception): # Fix broken feed - logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) + logging.user( + request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link) + ) found_feed_urls = feedfinder.find_feeds(feed_link) if len(found_feed_urls): code = 1 @@ -451,7 +488,9 @@ def exception_change_feed_link(request): # Branch good feed logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) try: - feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link)) + feed = Feed.objects.get( + hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link) + ) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed.feed_address, feed_link=feed_link) code = 1 @@ -476,81 +515,82 @@ def exception_change_feed_link(request): else: fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': -1, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": -1, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } - + usersub.calculate_feed_scores(silent=False) - + feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id) - + if feed and feed.has_feed_exception: code = -1 - + feeds = { - original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), + original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), } fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': code, - 'feeds': feeds, - 'new_feed_id': usersub.feed_id, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": code, + "feeds": feeds, + "new_feed_id": usersub.feed_id, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } + @login_required def status(request): if not request.user.is_staff and not settings.DEBUG: logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!") assert False return HttpResponseForbidden() - minutes = int(request.GET.get('minutes', 1)) - now = datetime.datetime.now() + minutes = int(request.GET.get("minutes", 1)) + now = datetime.datetime.now() hour_ago = now + datetime.timedelta(minutes=minutes) - username = request.GET.get('user', '') or request.GET.get('username', '') + username = request.GET.get("user", "") or request.GET.get("username", "") if username == "all": - feeds = Feed.objects.filter(next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update') + feeds = Feed.objects.filter(next_scheduled_update__lte=hour_ago).order_by("next_scheduled_update") else: if username: user = User.objects.get(username=username) else: user = request.user usersubs = UserSubscription.objects.filter(user=user) - feed_ids = usersubs.values('feed_id') + feed_ids = usersubs.values("feed_id") if minutes > 0: - feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update') + feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by( + "next_scheduled_update" + ) else: - feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by('-last_update') - + feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by("-last_update") + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) queues = { - 'tasked_feeds': r.zcard('tasked_feeds'), - 'queued_feeds': r.scard('queued_feeds'), - 'scheduled_updates': r.zcard('scheduled_updates'), + "tasked_feeds": r.zcard("tasked_feeds"), + "queued_feeds": r.scard("queued_feeds"), + "scheduled_updates": r.zcard("scheduled_updates"), } - return render(request, 'rss_feeds/status.xhtml', { - 'feeds': feeds, - 'queues': queues - }) + return render(request, "rss_feeds/status.xhtml", {"feeds": feeds, "queues": queues}) + @json.json_view def original_text(request): # iOS sends a POST, web sends a GET GET_POST = getattr(request, request.method) - story_id = GET_POST.get('story_id') - feed_id = GET_POST.get('feed_id') - story_hash = GET_POST.get('story_hash', None) - force = GET_POST.get('force', False) - debug = GET_POST.get('debug', False) + story_id = GET_POST.get("story_id") + feed_id = GET_POST.get("feed_id") + story_hash = GET_POST.get("story_hash", None) + force = GET_POST.get("force", False) + debug = GET_POST.get("debug", False) if not story_hash and not story_id: - return {'code': -1, 'message': 'Missing story_hash.', 'original_text': None, 'failed': True} - + return {"code": -1, "message": "Missing story_hash.", "original_text": None, "failed": True} + if story_hash: story, _ = MStory.find_story(story_hash=story_hash) else: @@ -558,25 +598,26 @@ def original_text(request): if not story: logging.user(request, "~FYFetching ~FGoriginal~FY story text: ~FRstory not found") - return {'code': -1, 'message': 'Story not found.', 'original_text': None, 'failed': True} - + return {"code": -1, "message": "Story not found.", "original_text": None, "failed": True} + original_text = story.fetch_original_text(force=force, request=request, debug=debug) return { - 'feed_id': story.story_feed_id, - 'story_hash': story.story_hash, - 'story_id': story.story_guid, - 'image_urls': story.image_urls, - 'secure_image_urls': Feed.secure_image_urls(story.image_urls), - 'original_text': original_text, - 'failed': not original_text or len(original_text) < 100, + "feed_id": story.story_feed_id, + "story_hash": story.story_hash, + "story_id": story.story_guid, + "image_urls": story.image_urls, + "secure_image_urls": Feed.secure_image_urls(story.image_urls), + "original_text": original_text, + "failed": not original_text or len(original_text) < 100, } -@required_params('story_hash', method="GET") + +@required_params("story_hash", method="GET") def original_story(request): - story_hash = request.GET.get('story_hash') - force = request.GET.get('force', False) - debug = request.GET.get('debug', False) + story_hash = request.GET.get("story_hash") + force = request.GET.get("force", False) + debug = request.GET.get("debug", False) story, _ = MStory.find_story(story_hash=story_hash) @@ -584,22 +625,20 @@ def original_story(request): logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found") # return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True} raise Http404 - + original_page = story.fetch_original_page(force=force, request=request, debug=debug) return HttpResponse(original_page or "") -@required_params('story_hash', method="GET") + +@required_params("story_hash", method="GET") @json.json_view def story_changes(request): - story_hash = request.GET.get('story_hash', None) - show_changes = is_true(request.GET.get('show_changes', True)) + story_hash = request.GET.get("story_hash", None) + show_changes = is_true(request.GET.get("show_changes", True)) story, _ = MStory.find_story(story_hash=story_hash) if not story: logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found") - return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True} - - return { - 'story': Feed.format_story(story, show_changes=show_changes) - } - \ No newline at end of file + return {"code": -1, "message": "Story not found.", "original_page": None, "failed": True} + + return {"story": Feed.format_story(story, show_changes=show_changes)} diff --git a/apps/search/management/commands/index_feeds.py b/apps/search/management/commands/index_feeds.py index c3e2ee37d..92623d723 100644 --- a/apps/search/management/commands/index_feeds.py +++ b/apps/search/management/commands/index_feeds.py @@ -1,14 +1,22 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-o", "--offset", dest="offset", type=int, default=0, help="Specify offset to start at") - parser.add_argument("-s", "--subscribers", dest="subscribers", type=int, default=2, help="Specify minimum number of subscribers") + parser.add_argument( + "-o", "--offset", dest="offset", type=int, default=0, help="Specify offset to start at" + ) + parser.add_argument( + "-s", + "--subscribers", + dest="subscribers", + type=int, + default=2, + help="Specify minimum number of subscribers", + ) def handle(self, *args, **options): - offset = options['offset'] - subscribers = options.get('subscribers', None) + offset = options["offset"] + subscribers = options.get("subscribers", None) Feed.index_all_for_search(offset=offset, subscribers=subscribers) - \ No newline at end of file diff --git a/apps/search/management/commands/index_stories.py b/apps/search/management/commands/index_stories.py index b63faa98c..7a673b0c9 100644 --- a/apps/search/management/commands/index_stories.py +++ b/apps/search/management/commands/index_stories.py @@ -4,33 +4,33 @@ from django.contrib.auth.models import User from apps.rss_feeds.models import Feed, MStory from apps.reader.models import UserSubscription -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-u", "--user", dest="user", type=str, help="Specify user id or username") - parser.add_argument("-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories.") - + parser.add_argument( + "-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories." + ) def handle(self, *args, **options): - if options['reindex']: + if options["reindex"]: MStory.index_all_for_search() return - - if not options['user']: + + if not options["user"]: print("Missing user. Did you want to reindex everything? Use -R.") return - - if re.match(r"([0-9]+)", options['user']): - user = User.objects.get(pk=int(options['user'])) + + if re.match(r"([0-9]+)", options["user"]): + user = User.objects.get(pk=int(options["user"])) else: - user = User.objects.get(username=options['user']) - + user = User.objects.get(username=options["user"]) + subscriptions = UserSubscription.objects.filter(user=user) print(" ---> Indexing %s feeds..." % subscriptions.count()) - + for sub in subscriptions: try: sub.feed.index_stories_for_search() except Feed.DoesNotExist: print(" ***> Couldn't find %s" % sub.feed_id) - \ No newline at end of file diff --git a/apps/search/models.py b/apps/search/models.py index d4a6b3fe5..59479f874 100644 --- a/apps/search/models.py +++ b/apps/search/models.py @@ -17,32 +17,33 @@ from apps.search.tasks import IndexFeedsForSearch from utils import log as logging from utils.feed_functions import chunks + class MUserSearch(mongo.Document): - '''Search index state of a user's subscriptions.''' - user_id = mongo.IntField(unique=True) - last_search_date = mongo.DateTimeField() - subscriptions_indexed = mongo.BooleanField() - subscriptions_indexing = mongo.BooleanField() - + """Search index state of a user's subscriptions.""" + + user_id = mongo.IntField(unique=True) + last_search_date = mongo.DateTimeField() + subscriptions_indexed = mongo.BooleanField() + subscriptions_indexing = mongo.BooleanField() + meta = { - 'collection': 'user_search', - 'indexes': ['user_id'], - 'allow_inheritance': False, + "collection": "user_search", + "indexes": ["user_id"], + "allow_inheritance": False, } - + @classmethod def get_user(cls, user_id, create=True): try: - user_search = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(user_id=user_id) + user_search = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(user_id=user_id) except cls.DoesNotExist: if create: user_search = cls.objects.create(user_id=user_id) else: user_search = None - + return user_search - + def touch_search_date(self): if not self.subscriptions_indexed and not self.subscriptions_indexing: self.schedule_index_subscriptions_for_search() @@ -52,62 +53,63 @@ class MUserSearch(mongo.Document): self.save() def schedule_index_subscriptions_for_search(self): - IndexSubscriptionsForSearch.apply_async(kwargs=dict(user_id=self.user_id), - queue='search_indexer') - + IndexSubscriptionsForSearch.apply_async(kwargs=dict(user_id=self.user_id), queue="search_indexer") + # Should be run as a background task def index_subscriptions_for_search(self): from apps.rss_feeds.models import Feed from apps.reader.models import UserSubscription - + SearchStory.create_elasticsearch_mapping() - + start = time.time() user = User.objects.get(pk=self.user_id) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'search_index_complete:start') - - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + r.publish(user.username, "search_index_complete:start") + + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() - + feed_ids = [] for sub in subscriptions: try: feed_ids.append(sub.feed.pk) except Feed.DoesNotExist: continue - + feed_id_chunks = [c for c in chunks(feed_ids, 6)] - logging.user(user, "~FCIndexing ~SB%s feeds~SN in %s chunks..." % - (total, len(feed_id_chunks))) - - search_chunks = [IndexSubscriptionsChunkForSearch.s(feed_ids=feed_id_chunk, - user_id=self.user_id - ).set(queue='search_indexer') - for feed_id_chunk in feed_id_chunks] - callback = FinishIndexSubscriptionsForSearch.s(user_id=self.user_id, - start=start).set(queue='search_indexer') + logging.user(user, "~FCIndexing ~SB%s feeds~SN in %s chunks..." % (total, len(feed_id_chunks))) + + search_chunks = [ + IndexSubscriptionsChunkForSearch.s(feed_ids=feed_id_chunk, user_id=self.user_id).set( + queue="search_indexer" + ) + for feed_id_chunk in feed_id_chunks + ] + callback = FinishIndexSubscriptionsForSearch.s(user_id=self.user_id, start=start).set( + queue="search_indexer" + ) celery.chord(search_chunks)(callback) def finish_index_subscriptions_for_search(self, start): from apps.reader.models import UserSubscription - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=self.user_id) - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() duration = time.time() - start - logging.user(user, "~FCIndexed ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % - (total, round(duration, 2))) - r.publish(user.username, 'search_index_complete:done') - + logging.user(user, "~FCIndexed ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % (total, round(duration, 2))) + r.publish(user.username, "search_index_complete:done") + self.subscriptions_indexed = True self.subscriptions_indexing = False self.save() - + def index_subscriptions_chunk_for_search(self, feed_ids): from apps.rss_feeds.models import Feed + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=self.user_id) @@ -115,40 +117,41 @@ class MUserSearch(mongo.Document): for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.index_stories_for_search() - - r.publish(user.username, 'search_index_complete:feeds:%s' % - ','.join([str(f) for f in feed_ids])) - + + r.publish(user.username, "search_index_complete:feeds:%s" % ",".join([str(f) for f in feed_ids])) + @classmethod def schedule_index_feeds_for_search(cls, feed_ids, user_id): user_search = cls.get_user(user_id, create=False) - if (not user_search or - not user_search.subscriptions_indexed or - user_search.subscriptions_indexing): + if not user_search or not user_search.subscriptions_indexed or user_search.subscriptions_indexing: # User hasn't searched before. return - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - IndexFeedsForSearch.apply_async(kwargs=dict(feed_ids=feed_ids, user_id=user_id), - queue='search_indexer') - + IndexFeedsForSearch.apply_async( + kwargs=dict(feed_ids=feed_ids, user_id=user_id), queue="search_indexer" + ) + @classmethod def index_feeds_for_search(cls, feed_ids, user_id): from apps.rss_feeds.models import Feed + user = User.objects.get(pk=user_id) logging.user(user, "~SB~FCIndexing %s~FC by request..." % feed_ids) for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.index_stories_for_search() - + @classmethod def remove_all(cls, drop_index=False): # You only need to drop the index if there is data you want to clear. @@ -156,7 +159,7 @@ class MUserSearch(mongo.Document): if drop_index: logging.info(" ---> ~FRRemoving stories search index...") SearchStory.drop() - + user_searches = cls.objects.all() logging.info(" ---> ~SN~FRRemoving ~SB%s~SN user searches..." % user_searches.count()) for user_search in user_searches: @@ -164,7 +167,7 @@ class MUserSearch(mongo.Document): user_search.remove() except Exception as e: print(" ****> Error on search removal: %s" % e) - + def remove(self): from apps.rss_feeds.models import Feed from apps.reader.models import UserSubscription @@ -173,7 +176,7 @@ class MUserSearch(mongo.Document): subscriptions = UserSubscription.objects.filter(user=self.user_id) total = subscriptions.count() removed = 0 - + for sub in subscriptions: try: feed = sub.feed @@ -184,33 +187,36 @@ class MUserSearch(mongo.Document): feed.search_indexed = False feed.save() removed += 1 - - logging.user(user, "~FCRemoved ~SB%s/%s feed's search indexes~SN for ~SB~FB%s~FC~SN." % - (removed, total, user.username)) + + logging.user( + user, + "~FCRemoved ~SB%s/%s feed's search indexes~SN for ~SB~FB%s~FC~SN." + % (removed, total, user.username), + ) self.delete() + class SearchStory: - _es_client = None name = "stories" - + @classmethod def ES(cls): if cls._es_client is None: cls._es_client = elasticsearch.Elasticsearch(settings.ELASTICSEARCH_STORY_HOST) cls.create_elasticsearch_mapping() return cls._es_client - + @classmethod def index_name(cls): return "%s-index" % cls.name - + @classmethod def doc_type(cls): - if settings.DOCKERBUILD or getattr(settings, 'ES_IGNORE_TYPE', True): + if settings.DOCKERBUILD or getattr(settings, "ES_IGNORE_TYPE", True): return None return "%s-type" % cls.name - + @classmethod def create_elasticsearch_mapping(cls, delete=False): if delete: @@ -222,83 +228,76 @@ class SearchStory: if cls.ES().indices.exists(cls.index_name()): return - + try: cls.ES().indices.create(cls.index_name()) logging.debug(" ---> ~FCCreating search index for ~FM%s" % cls.index_name()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRCould not create search index for ~FM%s: %s" % (cls.index_name(), e)) return - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError, - urllib3.exceptions.ConnectTimeoutError) as e: - logging.debug( - f" ***> ~FRNo search server available for creating story mapping: {e}") + except ( + elasticsearch.exceptions.ConnectionError, + urllib3.exceptions.NewConnectionError, + urllib3.exceptions.ConnectTimeoutError, + ) as e: + logging.debug(f" ***> ~FRNo search server available for creating story mapping: {e}") return mapping = { - 'title': { - 'store': False, - 'type': 'text', - 'analyzer': 'snowball', - "term_vector": "yes", - }, - 'content': { - 'store': False, - 'type': 'text', - 'analyzer': 'snowball', - "term_vector": "yes", - }, - 'tags': { - 'store': False, + "title": { + "store": False, "type": "text", - "fields": { - "raw": { - "type": "text", - "analyzer": "keyword", - "term_vector": "yes" - } - } + "analyzer": "snowball", + "term_vector": "yes", }, - 'author': { - 'store': False, - 'type': 'text', - 'analyzer': 'default', + "content": { + "store": False, + "type": "text", + "analyzer": "snowball", + "term_vector": "yes", }, - 'feed_id': { - 'store': False, - 'type': 'integer' + "tags": { + "store": False, + "type": "text", + "fields": {"raw": {"type": "text", "analyzer": "keyword", "term_vector": "yes"}}, + }, + "author": { + "store": False, + "type": "text", + "analyzer": "default", + }, + "feed_id": {"store": False, "type": "integer"}, + "date": { + "store": False, + "type": "date", }, - 'date': { - 'store': False, - 'type': 'date', - } } - cls.ES().indices.put_mapping(body={ - 'properties': mapping, - }, index=cls.index_name()) + cls.ES().indices.put_mapping( + body={ + "properties": mapping, + }, + index=cls.index_name(), + ) cls.ES().indices.flush(cls.index_name()) @classmethod - def index(cls, story_hash, story_title, story_content, story_tags, story_author, story_feed_id, - story_date): + def index( + cls, story_hash, story_title, story_content, story_tags, story_author, story_feed_id, story_date + ): cls.create_elasticsearch_mapping() doc = { "content": story_content, "title": story_title, - "tags": ', '.join(story_tags), + "tags": ", ".join(story_tags), "author": story_author, "feed_id": story_feed_id, "date": story_date, } try: - cls.ES().create(index=cls.index_name(), id=story_hash, - body=doc, doc_type=cls.doc_type()) - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError) as e: - logging.debug( - f" ***> ~FRNo search server available for story indexing: {e}") + cls.ES().create(index=cls.index_name(), id=story_hash, body=doc, doc_type=cls.doc_type()) + except (elasticsearch.exceptions.ConnectionError, urllib3.exceptions.NewConnectionError) as e: + logging.debug(f" ***> ~FRNo search server available for story indexing: {e}") except elasticsearch.exceptions.ConflictError as e: logging.debug(f" ***> ~FBAlready indexed story: {e}") # if settings.DEBUG: @@ -312,10 +311,10 @@ class SearchStory: try: cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type=cls.doc_type()) except elasticsearch.exceptions.NotFoundError: - cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type='story-type') + cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type="story-type") except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available for story deletion: {e}") - + @classmethod def drop(cls): try: @@ -323,7 +322,6 @@ class SearchStory: except elasticsearch.exceptions.NotFoundError: logging.debug(" ***> ~FBNo index found, nothing to drop.") - @classmethod def query(cls, feed_ids, query, order, offset, limit, strip=False): try: @@ -331,26 +329,26 @@ class SearchStory: except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available: {e}") return [] - + if strip: - query = re.sub(r'([^\s\w_\-])+', ' ', query) # Strip non-alphanumeric + query = re.sub(r"([^\s\w_\-])+", " ", query) # Strip non-alphanumeric query = html.unescape(query) body = { "query": { "bool": { "must": [ - {"query_string": { "query": query, "default_operator": "AND" }}, - {"terms": { "feed_id": feed_ids[:2000] }}, + {"query_string": {"query": query, "default_operator": "AND"}}, + {"terms": {"feed_id": feed_ids[:2000]}}, ] } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] @@ -373,44 +371,46 @@ class SearchStory: # logging.debug(" ***> ~FRNo search server available.") # return [] - logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" % - (query, len(results['hits']['hits']), len(feed_ids), 's' if len(feed_ids) != 1 else '')) - + logging.info( + " ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" + % (query, len(results["hits"]["hits"]), len(feed_ids), "s" if len(feed_ids) != 1 else "") + ) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids - + @classmethod def global_query(cls, query, order, offset, limit, strip=False): cls.create_elasticsearch_mapping() cls.ES().indices.flush() - + if strip: - query = re.sub(r'([^\s\w_\-])+', ' ', query) # Strip non-alphanumeric + query = re.sub(r"([^\s\w_\-])+", " ", query) # Strip non-alphanumeric query = html.unescape(query) body = { "query": { "bool": { "must": [ - {"query_string": { "query": query, "default_operator": "AND" }}, + {"query_string": {"query": query, "default_operator": "AND"}}, ] } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] - + # sort = "date:desc" if order == "newest" else "date:asc" # string_q = pyes.query.QueryStringQuery(query, default_operator="AND") # try: @@ -420,17 +420,16 @@ class SearchStory: # logging.debug(" ***> ~FRNo search server available.") # return [] - logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN (across all feeds)" % - (query)) - + logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN (across all feeds)" % (query)) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids - + @classmethod def more_like_this(cls, feed_ids, story_hash, order, offset, limit): try: @@ -438,52 +437,54 @@ class SearchStory: except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available: {e}") return [] - + body = { "query": { "bool": { - "filter": [{ - "more_like_this": { - "fields": [ "title", "content" ], - "like": [ - { - "_index": cls.index_name(), - "_id": story_hash, - } - ], - "min_term_freq": 3, - "min_doc_freq": 2, - "min_word_length": 4, + "filter": [ + { + "more_like_this": { + "fields": ["title", "content"], + "like": [ + { + "_index": cls.index_name(), + "_id": story_hash, + } + ], + "min_term_freq": 3, + "min_doc_freq": 2, + "min_word_length": 4, + }, }, - },{ - "terms": { "feed_id": feed_ids[:2000] } - }], + {"terms": {"feed_id": feed_ids[:2000]}}, + ], } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] - logging.info(" ---> ~FG~SNMore like this ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" % - (story_hash, len(results['hits']['hits']), len(feed_ids), 's' if len(feed_ids) != 1 else '')) - + logging.info( + " ---> ~FG~SNMore like this ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" + % (story_hash, len(results["hits"]["hits"]), len(feed_ids), "s" if len(feed_ids) != 1 else "") + ) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids class SearchFeed: - _es_client = None name = "feeds" @@ -493,18 +494,18 @@ class SearchFeed: cls._es_client = elasticsearch.Elasticsearch(settings.ELASTICSEARCH_FEED_HOST) cls.create_elasticsearch_mapping() return cls._es_client - + @classmethod def index_name(cls): # feeds-index return "%s-index" % cls.name - + @classmethod def doc_type(cls): - if settings.DOCKERBUILD or getattr(settings, 'ES_IGNORE_TYPE', True): + if settings.DOCKERBUILD or getattr(settings, "ES_IGNORE_TYPE", True): return None return "%s-type" % cls.name - + @classmethod def create_elasticsearch_mapping(cls, delete=False): if delete: @@ -518,22 +519,18 @@ class SearchFeed: return index_settings = { - "index" : { + "index": { "analysis": { "analyzer": { "edgengram_analyzer": { "filter": ["edgengram_analyzer"], "tokenizer": "lowercase", - "type": "custom" + "type": "custom", }, }, "filter": { - "edgengram_analyzer": { - "max_gram": "15", - "min_gram": "1", - "type": "edge_ngram" - }, - } + "edgengram_analyzer": {"max_gram": "15", "min_gram": "1", "type": "edge_ngram"}, + }, } } } @@ -544,43 +541,42 @@ class SearchFeed: except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRCould not create search index for ~FM%s: %s" % (cls.index_name(), e)) return - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError, - urllib3.exceptions.ConnectTimeoutError) as e: + except ( + elasticsearch.exceptions.ConnectionError, + urllib3.exceptions.NewConnectionError, + urllib3.exceptions.ConnectTimeoutError, + ) as e: logging.debug(f" ***> ~FRNo search server available for creating feed mapping: {e}") return - + mapping = { "feed_address": { - 'analyzer': 'snowball', + "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" - }, - "feed_id": { - "store": True, - "type": "text" - }, - "num_subscribers": { - "store": True, - "type": "long" + "type": "text", }, + "feed_id": {"store": True, "type": "text"}, + "num_subscribers": {"store": True, "type": "long"}, "title": { "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" + "type": "text", }, "link": { "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" - } + "type": "text", + }, } - cls.ES().indices.put_mapping(body={ - 'properties': mapping, - }, index=cls.index_name()) + cls.ES().indices.put_mapping( + body={ + "properties": mapping, + }, + index=cls.index_name(), + ) cls.ES().indices.flush(cls.index_name()) @classmethod @@ -594,8 +590,7 @@ class SearchFeed: } try: cls.ES().create(index=cls.index_name(), id=feed_id, body=doc, doc_type=cls.doc_type()) - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError) as e: + except (elasticsearch.exceptions.ConnectionError, urllib3.exceptions.NewConnectionError) as e: logging.debug(f" ***> ~FRNo search server available for feed indexing: {e}") @classmethod @@ -615,21 +610,45 @@ class SearchFeed: if settings.DEBUG: max_subscribers = 1 - + body = { "query": { "bool": { "should": [ - {"match": { "address": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, - {"match": { "title": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, - {"match": { "link": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, + { + "match": { + "address": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, + { + "match": { + "title": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, + { + "match": { + "link": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, ] } }, - 'sort': [{'num_subscribers': {'order': 'desc'}}], + "sort": [{"num_subscribers": {"order": "desc"}}], } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] @@ -651,19 +670,22 @@ class SearchFeed: # q.add_should(pyes.query.MatchQuery('title', text, analyzer="simple", cutoff_frequency=0.0005, minimum_should_match="75%")) # q = pyes.Search(q, min_score=1) # results = cls.ES().search(query=q, size=max_subscribers, sort="num_subscribers:desc") - - logging.info("~FGSearch ~FCfeeds~FG: ~SB%s~SN, ~SB%s~SN results" % (text, len(results['hits']['hits']))) - return results['hits']['hits'] - + logging.info( + "~FGSearch ~FCfeeds~FG: ~SB%s~SN, ~SB%s~SN results" % (text, len(results["hits"]["hits"])) + ) + + return results["hits"]["hits"] + @classmethod def export_csv(cls): import djqscsv from apps.rss_feeds.models import Feed - qs = Feed.objects.filter(num_subscribers__gte=20).values('id', 'feed_title', 'feed_address', 'feed_link', 'num_subscribers') + qs = Feed.objects.filter(num_subscribers__gte=20).values( + "id", "feed_title", "feed_address", "feed_link", "num_subscribers" + ) csv = djqscsv.render_to_csv_response(qs).content - f = open('feeds.csv', 'w+') + f = open("feeds.csv", "w+") f.write(csv) f.close() - diff --git a/apps/search/tasks.py b/apps/search/tasks.py index 3ae7acf84..d56c892cf 100644 --- a/apps/search/tasks.py +++ b/apps/search/tasks.py @@ -1,27 +1,31 @@ from newsblur_web.celeryapp import app from utils import log as logging + @app.task() def IndexSubscriptionsForSearch(user_id): from apps.search.models import MUserSearch - + user_search = MUserSearch.get_user(user_id) user_search.index_subscriptions_for_search() + @app.task() def IndexSubscriptionsChunkForSearch(feed_ids, user_id): logging.debug(" ---> Indexing: %s for %s" % (feed_ids, user_id)) from apps.search.models import MUserSearch - + user_search = MUserSearch.get_user(user_id) user_search.index_subscriptions_chunk_for_search(feed_ids) + @app.task() def IndexFeedsForSearch(feed_ids, user_id): from apps.search.models import MUserSearch - + MUserSearch.index_feeds_for_search(feed_ids, user_id) + @app.task() def FinishIndexSubscriptionsForSearch(results, user_id, start): logging.debug(" ---> Indexing finished for %s" % (user_id)) diff --git a/apps/search/urls.py b/apps/search/urls.py index e29e860a9..fcc841b86 100644 --- a/apps/search/urls.py +++ b/apps/search/urls.py @@ -3,5 +3,5 @@ from apps.search import views urlpatterns = [ # url(r'^$', views.index), - url(r'^more_like_this', views.more_like_this, name='more-like-this'), + url(r"^more_like_this", views.more_like_this, name="more-like-this"), ] diff --git a/apps/search/views.py b/apps/search/views.py index e06731571..37630b5ee 100644 --- a/apps/search/views.py +++ b/apps/search/views.py @@ -5,23 +5,24 @@ from utils import json_functions as json from utils.view_functions import required_params from utils.user_functions import get_user, ajax_login_required + # @required_params('story_hash') @json.json_view def more_like_this(request): user = get_user(request) get_post = getattr(request, request.method) - order = get_post.get('order', 'newest') - page = int(get_post.get('page', 1)) - limit = int(get_post.get('limit', 10)) - offset = limit * (page-1) - story_hash = get_post.get('story_hash') - + order = get_post.get("order", "newest") + page = int(get_post.get("page", 1)) + limit = int(get_post.get("limit", 10)) + offset = limit * (page - 1) + story_hash = get_post.get("story_hash") + feed_ids = [us.feed_id for us in UserSubscription.objects.filter(user=user)] feed_ids, _ = MStory.split_story_hash(story_hash) story_ids = SearchStory.more_like_this([feed_ids], story_hash, order, offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = Feed.format_stories(stories_db) return { diff --git a/apps/social/management/commands/popular_stories.py b/apps/social/management/commands/popular_stories.py index 93cb394e0..093dc049a 100644 --- a/apps/social/management/commands/popular_stories.py +++ b/apps/social/management/commands/popular_stories.py @@ -1,7 +1,7 @@ from django.core.management.base import BaseCommand from apps.social.models import MSharedStory -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - MSharedStory.share_popular_stories() \ No newline at end of file + MSharedStory.share_popular_stories() diff --git a/apps/social/migrations/0001_username_unique.py b/apps/social/migrations/0001_username_unique.py index f9f45fb47..ba55f6399 100644 --- a/apps/social/migrations/0001_username_unique.py +++ b/apps/social/migrations/0001_username_unique.py @@ -4,19 +4,17 @@ from django.db import migrations from django.conf import settings import pymongo + def remove_unique_index(apps, schema_editor): - social_profile = sp = settings.MONGODB[settings.MONGO_DB_NAME].social_profile + social_profile = sp = settings.MONGODB[settings.MONGO_DB_NAME].social_profile try: - social_profile.drop_index('username_1') + social_profile.drop_index("username_1") except pymongo.errors.OperationFailure: print(" ***> Couldn't delete username_1 index on social_profile collection. Already deleted?") pass + class Migration(migrations.Migration): + dependencies = [] - dependencies = [ - ] - - operations = [ - migrations.RunPython(remove_unique_index) - ] + operations = [migrations.RunPython(remove_unique_index)] diff --git a/apps/social/models.py b/apps/social/models.py index 2b62e679b..f4950a6a2 100644 --- a/apps/social/models.py +++ b/apps/social/models.py @@ -26,7 +26,12 @@ from django.core.mail import EmailMultiAlternatives from django.utils.encoding import smart_bytes, smart_str from apps.reader.models import UserSubscription, RUserStory from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.rss_feeds.models import Feed, MStory from apps.rss_feeds.text_importer import TextImporter from apps.rss_feeds.page_importer import PageImporter @@ -47,36 +52,35 @@ except ImportError: pass RECOMMENDATIONS_LIMIT = 5 -IGNORE_IMAGE_SOURCES = [ - "http://feeds.feedburner.com" -] +IGNORE_IMAGE_SOURCES = ["http://feeds.feedburner.com"] + class MRequestInvite(mongo.Document): - email = mongo.EmailField() - request_date = mongo.DateTimeField(default=datetime.datetime.now) - invite_sent = mongo.BooleanField(default=False) + email = mongo.EmailField() + request_date = mongo.DateTimeField(default=datetime.datetime.now) + invite_sent = mongo.BooleanField(default=False) invite_sent_date = mongo.DateTimeField() meta = { - 'collection': 'social_invites', - 'allow_inheritance': False, + "collection": "social_invites", + "allow_inheritance": False, } - + def __str__(self): - return "%s%s" % (self.email, '*' if self.invite_sent else '') - + return "%s%s" % (self.email, "*" if self.invite_sent else "") + @classmethod def blast(cls): invites = cls.objects.filter(email_sent=None) - print(' ---> Found %s invites...' % invites.count()) - + print(" ---> Found %s invites..." % invites.count()) + for invite in invites: try: invite.send_email() except: - print(' ***> Could not send invite to: %s. Deleting.' % invite.username) + print(" ***> Could not send invite to: %s. Deleting." % invite.username) invite.delete() - + def send_email(self): user = User.objects.filter(username__iexact=self.username) if not user: @@ -86,84 +90,88 @@ class MRequestInvite(mongo.Document): email = user.email or self.username else: user = { - 'username': self.username, - 'profile': { - 'autologin_url': '/', - } + "username": self.username, + "profile": { + "autologin_url": "/", + }, } email = self.username params = { - 'user': user, + "user": user, } - text = render_to_string('mail/email_social_beta.txt', params) - html = render_to_string('mail/email_social_beta.xhtml', params) + text = render_to_string("mail/email_social_beta.txt", params) + html = render_to_string("mail/email_social_beta.xhtml", params) subject = "Psst, you're in..." - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['<%s>' % (email)]) + msg = EmailMultiAlternatives( + subject, text, from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, to=["<%s>" % (email)] + ) msg.attach_alternative(html, "text/html") msg.send() - + self.email_sent = True self.save() - + logging.debug(" ---> ~BB~FM~SBSending email for social beta: %s" % self.username) class MSocialProfile(mongo.Document): - user_id = mongo.IntField(unique=True) - username = mongo.StringField(max_length=30) - email = mongo.StringField() - bio = mongo.StringField(max_length=160) - blurblog_title = mongo.StringField(max_length=256) - custom_bgcolor = mongo.StringField(max_length=50) - custom_css = mongo.StringField() - photo_url = mongo.StringField() - photo_service = mongo.StringField() - location = mongo.StringField(max_length=40) - website = mongo.StringField(max_length=200) - bb_permalink_direct = mongo.BooleanField() - subscription_count = mongo.IntField(default=0) + user_id = mongo.IntField(unique=True) + username = mongo.StringField(max_length=30) + email = mongo.StringField() + bio = mongo.StringField(max_length=160) + blurblog_title = mongo.StringField(max_length=256) + custom_bgcolor = mongo.StringField(max_length=50) + custom_css = mongo.StringField() + photo_url = mongo.StringField() + photo_service = mongo.StringField() + location = mongo.StringField(max_length=40) + website = mongo.StringField(max_length=200) + bb_permalink_direct = mongo.BooleanField() + subscription_count = mongo.IntField(default=0) shared_stories_count = mongo.IntField(default=0) - following_count = mongo.IntField(default=0) - follower_count = mongo.IntField(default=0) - following_user_ids = mongo.ListField(mongo.IntField()) - follower_user_ids = mongo.ListField(mongo.IntField()) - unfollowed_user_ids = mongo.ListField(mongo.IntField()) + following_count = mongo.IntField(default=0) + follower_count = mongo.IntField(default=0) + following_user_ids = mongo.ListField(mongo.IntField()) + follower_user_ids = mongo.ListField(mongo.IntField()) + unfollowed_user_ids = mongo.ListField(mongo.IntField()) requested_follow_user_ids = mongo.ListField(mongo.IntField()) - muting_user_ids = mongo.ListField(mongo.IntField()) - muted_by_user_ids = mongo.ListField(mongo.IntField()) - popular_publishers = mongo.StringField() - stories_last_month = mongo.IntField(default=0) + muting_user_ids = mongo.ListField(mongo.IntField()) + muted_by_user_ids = mongo.ListField(mongo.IntField()) + popular_publishers = mongo.StringField() + stories_last_month = mongo.IntField(default=0) average_stories_per_month = mongo.IntField(default=0) - story_count_history = mongo.ListField() - story_days_history = mongo.DictField() - story_hours_history = mongo.DictField() - story_email_history = mongo.ListField() + story_count_history = mongo.ListField() + story_days_history = mongo.DictField() + story_hours_history = mongo.DictField() + story_email_history = mongo.ListField() feed_classifier_counts = mongo.DictField() - favicon_color = mongo.StringField(max_length=6) - protected = mongo.BooleanField() - private = mongo.BooleanField() - + favicon_color = mongo.StringField(max_length=6) + protected = mongo.BooleanField() + private = mongo.BooleanField() + meta = { - 'collection': 'social_profile', - 'indexes': [ - 'user_id', - 'username', - 'following_user_ids', - 'follower_user_ids', - 'unfollowed_user_ids', - 'requested_follow_user_ids', - 'muting_user_ids', - 'muted_by_user_ids', + "collection": "social_profile", + "indexes": [ + "user_id", + "username", + "following_user_ids", + "follower_user_ids", + "unfollowed_user_ids", + "requested_follow_user_ids", + "muting_user_ids", + "muted_by_user_ids", ], - 'allow_inheritance': False, + "allow_inheritance": False, } - + def __str__(self): - return "%s following %s/%s, shared %s" % (self.user, - self.following_count, self.follower_count, self.shared_stories_count) - + return "%s following %s/%s, shared %s" % ( + self.user, + self.following_count, + self.follower_count, + self.shared_stories_count, + ) + @classmethod def get_user(cls, user_id): try: @@ -176,7 +184,7 @@ class MSocialProfile(mongo.Document): profile.save() return profile - + @property def user(self): try: @@ -199,35 +207,38 @@ class MSocialProfile(mongo.Document): self.location = strip_tags(self.location) if self.custom_css: self.custom_css = strip_tags(self.custom_css) - + super(MSocialProfile, self).save(*args, **kwargs) if self.user_id not in self.following_user_ids: self.follow_user(self.user_id, force=True) self.count_follows() - + return self - + @property def blurblog_url(self): - return "https://%s.%s/" % ( - self.username_slug, - Site.objects.get_current().domain.replace('www.', '')) - + return "https://%s.%s/" % (self.username_slug, Site.objects.get_current().domain.replace("www.", "")) + @property def blurblog_rss(self): - return "%s%s" % (self.blurblog_url, reverse('shared-stories-rss-feed', - kwargs={'user_id': self.user_id, - 'username': self.username_slug})) + return "%s%s" % ( + self.blurblog_url, + reverse( + "shared-stories-rss-feed", kwargs={"user_id": self.user_id, "username": self.username_slug} + ), + ) def find_stories(self, query, offset=0, limit=25): stories_db = MSharedStory.objects( - Q(user_id=self.user_id) & - (Q(story_title__icontains=query) | - Q(story_author_name__icontains=query) | - Q(story_tags__icontains=query)) - ).order_by('-shared_date')[offset:offset+limit] + Q(user_id=self.user_id) + & ( + Q(story_title__icontains=query) + | Q(story_author_name__icontains=query) + | Q(story_tags__icontains=query) + ) + ).order_by("-shared_date")[offset : offset + limit] stories = Feed.format_stories(stories_db) - + return stories def recommended_users(self): @@ -235,13 +246,21 @@ class MSocialProfile(mongo.Document): following_key = "F:%s:F" % (self.user_id) social_follow_key = "FF:%s:F" % (self.user_id) profile_user_ids = [] - + # Find potential twitter/fb friends services = MSocialServices.get_user(self.user_id) - facebook_user_ids = [u.user_id for u in - MSocialServices.objects.filter(facebook_uid__in=services.facebook_friend_ids).only('user_id')] - twitter_user_ids = [u.user_id for u in - MSocialServices.objects.filter(twitter_uid__in=services.twitter_friend_ids).only('user_id')] + facebook_user_ids = [ + u.user_id + for u in MSocialServices.objects.filter(facebook_uid__in=services.facebook_friend_ids).only( + "user_id" + ) + ] + twitter_user_ids = [ + u.user_id + for u in MSocialServices.objects.filter(twitter_uid__in=services.twitter_friend_ids).only( + "user_id" + ) + ] social_user_ids = facebook_user_ids + twitter_user_ids # Find users not currently followed by this user r.delete(social_follow_key) @@ -251,10 +270,10 @@ class MSocialProfile(mongo.Document): nonfriend_user_ids = r.sdiff(social_follow_key, following_key) profile_user_ids = [int(f) for f in nonfriend_user_ids] r.delete(social_follow_key) - + # Not enough? Grab popular users. if len(nonfriend_user_ids) < RECOMMENDATIONS_LIMIT: - homepage_user = User.objects.get(username='popular') + homepage_user = User.objects.get(username="popular") suggested_users_list = r.sdiff("F:%s:F" % homepage_user.pk, following_key) suggested_users_list = [int(f) for f in suggested_users_list] suggested_user_ids = [] @@ -262,32 +281,40 @@ class MSocialProfile(mongo.Document): for slot in range(slots_left): suggested_user_ids.append(random.choice(suggested_users_list)) profile_user_ids.extend(suggested_user_ids) - + # Sort by shared story count - profiles = MSocialProfile.profiles(profile_user_ids).order_by('-shared_stories_count')[:RECOMMENDATIONS_LIMIT] + profiles = MSocialProfile.profiles(profile_user_ids).order_by("-shared_stories_count")[ + :RECOMMENDATIONS_LIMIT + ] return profiles - + @property def username_slug(self): return slugify(self.user.username if self.user else "[deleted]") - + def count_stories(self): # Popular Publishers self.save_popular_publishers() - + def save_popular_publishers(self, feed_publishers=None): if not feed_publishers: publishers = defaultdict(int) - for story in MSharedStory.objects(user_id=self.user_id).only('story_feed_id')[:500]: + for story in MSharedStory.objects(user_id=self.user_id).only("story_feed_id")[:500]: publishers[story.story_feed_id] += 1 - feed_titles = dict((f.id, f.feed_title) - for f in Feed.objects.filter(pk__in=list(publishers.keys())).only('id', 'feed_title')) - feed_publishers = sorted([{'id': k, 'feed_title': feed_titles[k], 'story_count': v} - for k, v in list(publishers.items()) - if k in feed_titles], - key=lambda f: f['story_count'], - reverse=True)[:20] + feed_titles = dict( + (f.id, f.feed_title) + for f in Feed.objects.filter(pk__in=list(publishers.keys())).only("id", "feed_title") + ) + feed_publishers = sorted( + [ + {"id": k, "feed_title": feed_titles[k], "story_count": v} + for k, v in list(publishers.items()) + if k in feed_titles + ], + key=lambda f: f["story_count"], + reverse=True, + )[:20] popular_publishers = json.encode(feed_publishers) if len(popular_publishers) < 1023: @@ -297,12 +324,12 @@ class MSocialProfile(mongo.Document): if len(popular_publishers) > 1: self.save_popular_publishers(feed_publishers=feed_publishers[:-1]) - + @classmethod def profile(cls, user_id, include_follows=True): profile = cls.get_user(user_id) return profile.canonical(include_follows=True) - + @classmethod def profiles(cls, user_ids): profiles = cls.objects.filter(user_id__in=user_ids) @@ -313,148 +340,180 @@ class MSocialProfile(mongo.Document): profiles = cls.objects.filter(user_id__in=user_ids) profiles = dict((p.user_id, p.feed()) for p in profiles) return profiles - + @classmethod def sync_all_redis(cls): for profile in cls.objects.all(): profile.sync_redis(force=True) - + def sync_redis(self, force=False): self.following_user_ids = list(set(self.following_user_ids)) self.save() - + for user_id in self.following_user_ids: self.follow_user(user_id, force=force) - + self.follow_user(self.user_id, force=force) - + @property def title(self): - return self.blurblog_title if self.blurblog_title else (self.user.username if self.user else "[deleted]") + "'s blurblog" - + return ( + self.blurblog_title + if self.blurblog_title + else (self.user.username if self.user else "[deleted]") + "'s blurblog" + ) + def feed(self): params = self.canonical(compact=True) - params.update({ - 'feed_title': self.title, - 'page_url': reverse('load-social-page', kwargs={'user_id': self.user_id, 'username': self.username_slug}), - 'shared_stories_count': self.shared_stories_count, - }) + params.update( + { + "feed_title": self.title, + "page_url": reverse( + "load-social-page", kwargs={"user_id": self.user_id, "username": self.username_slug} + ), + "shared_stories_count": self.shared_stories_count, + } + ) return params - + def page(self): params = self.canonical(include_follows=True) - params.update({ - 'feed_title': self.title, - 'custom_css': self.custom_css, - }) + params.update( + { + "feed_title": self.title, + "custom_css": self.custom_css, + } + ) return params - + @property def profile_photo_url(self): if self.photo_url: return self.photo_url - return settings.MEDIA_URL + 'img/reader/default_profile_photo.png' - + return settings.MEDIA_URL + "img/reader/default_profile_photo.png" + @property def large_photo_url(self): photo_url = self.email_photo_url - if 'graph.facebook.com' in photo_url: - return photo_url + '?type=large' - elif 'twimg' in photo_url: - return photo_url.replace('_normal', '') - elif '/avatars/' in photo_url: - return photo_url.replace('thumbnail_', 'large_') + if "graph.facebook.com" in photo_url: + return photo_url + "?type=large" + elif "twimg" in photo_url: + return photo_url.replace("_normal", "") + elif "/avatars/" in photo_url: + return photo_url.replace("thumbnail_", "large_") return photo_url - + @property def email_photo_url(self): if self.photo_url: - if self.photo_url.startswith('//'): - self.photo_url = 'https:' + self.photo_url + if self.photo_url.startswith("//"): + self.photo_url = "https:" + self.photo_url return self.photo_url domain = Site.objects.get_current().domain - return 'https://' + domain + settings.MEDIA_URL + 'img/reader/default_profile_photo.png' - - def canonical(self, compact=False, include_follows=False, common_follows_with_user=None, - include_settings=False, include_following_user=None): + return "https://" + domain + settings.MEDIA_URL + "img/reader/default_profile_photo.png" + + def canonical( + self, + compact=False, + include_follows=False, + common_follows_with_user=None, + include_settings=False, + include_following_user=None, + ): domain = Site.objects.get_current().domain params = { - 'id': 'social:%s' % self.user_id, - 'user_id': self.user_id, - 'username': self.user.username if self.user else "[deleted]", - 'photo_url': self.email_photo_url, - 'large_photo_url': self.large_photo_url, - 'location': self.location, - 'num_subscribers': self.follower_count, - 'feed_title': self.title, - 'feed_address': "http://%s%s" % (domain, reverse('shared-stories-rss-feed', - kwargs={'user_id': self.user_id, 'username': self.username_slug})), - 'feed_link': self.blurblog_url, - 'protected': self.protected, - 'private': self.private, - 'active': True, + "id": "social:%s" % self.user_id, + "user_id": self.user_id, + "username": self.user.username if self.user else "[deleted]", + "photo_url": self.email_photo_url, + "large_photo_url": self.large_photo_url, + "location": self.location, + "num_subscribers": self.follower_count, + "feed_title": self.title, + "feed_address": "http://%s%s" + % ( + domain, + reverse( + "shared-stories-rss-feed", + kwargs={"user_id": self.user_id, "username": self.username_slug}, + ), + ), + "feed_link": self.blurblog_url, + "protected": self.protected, + "private": self.private, + "active": True, } if not compact: - params.update({ - 'large_photo_url': self.large_photo_url, - 'bio': self.bio, - 'website': self.website, - 'shared_stories_count': self.shared_stories_count, - 'following_count': self.following_count, - 'follower_count': self.follower_count, - 'popular_publishers': json.decode(self.popular_publishers), - 'stories_last_month': self.stories_last_month, - 'average_stories_per_month': self.average_stories_per_month, - }) + params.update( + { + "large_photo_url": self.large_photo_url, + "bio": self.bio, + "website": self.website, + "shared_stories_count": self.shared_stories_count, + "following_count": self.following_count, + "follower_count": self.follower_count, + "popular_publishers": json.decode(self.popular_publishers), + "stories_last_month": self.stories_last_month, + "average_stories_per_month": self.average_stories_per_month, + } + ) if include_settings: - params.update({ - 'custom_css': self.custom_css, - 'custom_bgcolor': self.custom_bgcolor, - 'bb_permalink_direct': self.bb_permalink_direct, - }) + params.update( + { + "custom_css": self.custom_css, + "custom_bgcolor": self.custom_bgcolor, + "bb_permalink_direct": self.bb_permalink_direct, + } + ) if include_follows: - params.update({ - 'photo_service': self.photo_service, - 'following_user_ids': self.following_user_ids_without_self[:48], - 'follower_user_ids': self.follower_user_ids_without_self[:48], - }) + params.update( + { + "photo_service": self.photo_service, + "following_user_ids": self.following_user_ids_without_self[:48], + "follower_user_ids": self.follower_user_ids_without_self[:48], + } + ) if common_follows_with_user: FOLLOWERS_LIMIT = 128 with_user = MSocialProfile.get_user(common_follows_with_user) - followers_youknow, followers_everybody = with_user.common_follows(self.user_id, direction='followers') - following_youknow, following_everybody = with_user.common_follows(self.user_id, direction='following') - params['followers_youknow'] = followers_youknow[:FOLLOWERS_LIMIT] - params['followers_everybody'] = followers_everybody[:FOLLOWERS_LIMIT] - params['following_youknow'] = following_youknow[:FOLLOWERS_LIMIT] - params['following_everybody'] = following_everybody[:FOLLOWERS_LIMIT] - params['requested_follow'] = common_follows_with_user in self.requested_follow_user_ids + followers_youknow, followers_everybody = with_user.common_follows( + self.user_id, direction="followers" + ) + following_youknow, following_everybody = with_user.common_follows( + self.user_id, direction="following" + ) + params["followers_youknow"] = followers_youknow[:FOLLOWERS_LIMIT] + params["followers_everybody"] = followers_everybody[:FOLLOWERS_LIMIT] + params["following_youknow"] = following_youknow[:FOLLOWERS_LIMIT] + params["following_everybody"] = following_everybody[:FOLLOWERS_LIMIT] + params["requested_follow"] = common_follows_with_user in self.requested_follow_user_ids if include_following_user or common_follows_with_user: if not include_following_user: include_following_user = common_follows_with_user if include_following_user != self.user_id: - params['followed_by_you'] = bool(self.is_followed_by_user(include_following_user)) - params['following_you'] = self.is_following_user(include_following_user) - params['muted'] = include_following_user in self.muted_by_user_ids + params["followed_by_you"] = bool(self.is_followed_by_user(include_following_user)) + params["following_you"] = self.is_following_user(include_following_user) + params["muted"] = include_following_user in self.muted_by_user_ids return params - + @property def following_user_ids_without_self(self): if self.user_id in self.following_user_ids: return [u for u in self.following_user_ids if u != self.user_id] return self.following_user_ids - + @property def follower_user_ids_without_self(self): if self.user_id in self.follower_user_ids: return [u for u in self.follower_user_ids if u != self.user_id] return self.follower_user_ids - + def import_user_fields(self): user = User.objects.get(pk=self.user_id) self.username = user.username self.email = user.email - + def count_follows(self, skip_save=False): self.subscription_count = UserSubscription.objects.filter(user__pk=self.user_id).count() self.shared_stories_count = MSharedStory.objects.filter(user_id=self.user_id).count() @@ -462,31 +521,31 @@ class MSocialProfile(mongo.Document): self.follower_count = len(self.follower_user_ids_without_self) if not skip_save: self.save() - + def follow_user(self, user_id, check_unfollowed=False, force=False): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if check_unfollowed and user_id in self.unfollowed_user_ids: return - + if self.user_id == user_id: followee = self else: followee = MSocialProfile.get_user(user_id) - + logging.debug(" ---> ~FB~SB%s~SN (%s) following %s" % (self.user.username, self.user_id, user_id)) - + if not followee.protected or force: if user_id not in self.following_user_ids: self.following_user_ids.append(user_id) elif not force: return - + if user_id in self.unfollowed_user_ids: self.unfollowed_user_ids.remove(user_id) self.count_follows() self.save() - + if followee.protected and user_id != self.user_id and not force: if self.user_id not in followee.requested_follow_user_ids: followee.requested_follow_user_ids.append(self.user_id) @@ -498,11 +557,13 @@ class MSocialProfile(mongo.Document): if followee.protected and user_id != self.user_id and not force: from apps.social.tasks import EmailFollowRequest - EmailFollowRequest.apply_async(kwargs=dict(follower_user_id=self.user_id, - followee_user_id=user_id), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) + + EmailFollowRequest.apply_async( + kwargs=dict(follower_user_id=self.user_id, followee_user_id=user_id), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) return - + following_key = "F:%s:F" % (self.user_id) r.sadd(following_key, user_id) follower_key = "F:%s:f" % (user_id) @@ -511,7 +572,7 @@ class MSocialProfile(mongo.Document): if user_id != self.user_id: MInteraction.new_follow(follower_user_id=self.user_id, followee_user_id=user_id) MActivity.new_follow(follower_user_id=self.user_id, followee_user_id=user_id) - + params = dict(user_id=self.user_id, subscription_user_id=user_id) try: socialsub = MSocialSubscription.objects.get(**params) @@ -519,31 +580,33 @@ class MSocialProfile(mongo.Document): socialsub = MSocialSubscription.objects.create(**params) socialsub.needs_unread_recalc = True socialsub.save() - + MFollowRequest.remove(self.user_id, user_id) - + if not force: from apps.social.tasks import EmailNewFollower - EmailNewFollower.apply_async(kwargs=dict(follower_user_id=self.user_id, - followee_user_id=user_id), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - + + EmailNewFollower.apply_async( + kwargs=dict(follower_user_id=self.user_id, followee_user_id=user_id), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + return socialsub - + def is_following_user(self, user_id): # XXX TODO: Outsource to redis return user_id in self.following_user_ids - + def is_followed_by_user(self, user_id): # XXX TODO: Outsource to redis return user_id in self.follower_user_ids - + def unfollow_user(self, user_id): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if not isinstance(user_id, int): user_id = int(user_id) - + if user_id == self.user_id: # Only unfollow other people, not yourself. return @@ -554,7 +617,7 @@ class MSocialProfile(mongo.Document): self.unfollowed_user_ids.append(user_id) self.count_follows() self.save() - + followee = MSocialProfile.get_user(user_id) if self.user_id in followee.follower_user_ids: followee.follower_user_ids.remove(self.user_id) @@ -565,34 +628,34 @@ class MSocialProfile(mongo.Document): followee.count_follows() followee.save() MFollowRequest.remove(self.user_id, user_id) - + following_key = "F:%s:F" % (self.user_id) r.srem(following_key, user_id) follower_key = "F:%s:f" % (user_id) r.srem(follower_key, self.user_id) - + try: MSocialSubscription.objects.get(user_id=self.user_id, subscription_user_id=user_id).delete() except MSocialSubscription.DoesNotExist: return False - - def common_follows(self, user_id, direction='followers'): + + def common_follows(self, user_id, direction="followers"): r = redis.Redis(connection_pool=settings.REDIS_POOL) - - my_followers = "F:%s:%s" % (self.user_id, 'F' if direction == 'followers' else 'F') - their_followers = "F:%s:%s" % (user_id, 'f' if direction == 'followers' else 'F') - follows_inter = r.sinter(their_followers, my_followers) - follows_diff = r.sdiff(their_followers, my_followers) - follows_inter = [int(f) for f in follows_inter] - follows_diff = [int(f) for f in follows_diff] - + + my_followers = "F:%s:%s" % (self.user_id, "F" if direction == "followers" else "F") + their_followers = "F:%s:%s" % (user_id, "f" if direction == "followers" else "F") + follows_inter = r.sinter(their_followers, my_followers) + follows_diff = r.sdiff(their_followers, my_followers) + follows_inter = [int(f) for f in follows_inter] + follows_diff = [int(f) for f in follows_diff] + if user_id in follows_inter: follows_inter.remove(user_id) if user_id in follows_diff: follows_diff.remove(user_id) - + return follows_inter, follows_diff - + def send_email_for_new_follower(self, follower_user_id): user = User.objects.get(pk=self.user_id) if follower_user_id not in self.follower_user_ids: @@ -606,45 +669,49 @@ class MSocialProfile(mongo.Document): return if self.user_id == follower_user_id: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk, - sending_user_id=follower_user_id, - email_type='new_follower') + + emails_sent = MSentEmail.objects.filter( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="new_follower" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago: logging.user(user, "~SK~FMNot sending new follower email, already sent before. NBD.") return - + follower_profile = MSocialProfile.get_user(follower_user_id) - common_followers, _ = self.common_follows(follower_user_id, direction='followers') - common_followings, _ = self.common_follows(follower_user_id, direction='following') + common_followers, _ = self.common_follows(follower_user_id, direction="followers") + common_followings, _ = self.common_follows(follower_user_id, direction="following") if self.user_id in common_followers: common_followers.remove(self.user_id) if self.user_id in common_followings: common_followings.remove(self.user_id) common_followers = MSocialProfile.profiles(common_followers) common_followings = MSocialProfile.profiles(common_followings) - + data = { - 'user': user, - 'follower_profile': follower_profile, - 'common_followers': common_followers, - 'common_followings': common_followings, + "user": user, + "follower_profile": follower_profile, + "common_followers": common_followers, + "common_followings": common_followings, } - - text = render_to_string('mail/email_new_follower.txt', data) - html = render_to_string('mail/email_new_follower.xhtml', data) + + text = render_to_string("mail/email_new_follower.txt", data) + html = render_to_string("mail/email_new_follower.xhtml", data) subject = "%s is now following your Blurblog on NewsBlur!" % follower_profile.user.username - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id, - email_type='new_follower') - + + MSentEmail.record( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="new_follower" + ) + logging.user(user, "~BB~FM~SBSending email for new follower: %s" % follower_profile.user.username) def send_email_for_follow_request(self, follower_user_id): @@ -660,57 +727,61 @@ class MSocialProfile(mongo.Document): return if self.user_id == follower_user_id: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk, - sending_user_id=follower_user_id, - email_type='follow_request') + + emails_sent = MSentEmail.objects.filter( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="follow_request" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago: logging.user(user, "~SK~FMNot sending follow request email, already sent before. NBD.") return - + follower_profile = MSocialProfile.get_user(follower_user_id) - common_followers, _ = self.common_follows(follower_user_id, direction='followers') - common_followings, _ = self.common_follows(follower_user_id, direction='following') + common_followers, _ = self.common_follows(follower_user_id, direction="followers") + common_followings, _ = self.common_follows(follower_user_id, direction="following") if self.user_id in common_followers: common_followers.remove(self.user_id) if self.user_id in common_followings: common_followings.remove(self.user_id) common_followers = MSocialProfile.profiles(common_followers) common_followings = MSocialProfile.profiles(common_followings) - + data = { - 'user': user, - 'follower_profile': follower_profile, - 'common_followers': common_followers, - 'common_followings': common_followings, + "user": user, + "follower_profile": follower_profile, + "common_followers": common_followers, + "common_followings": common_followings, } - - text = render_to_string('mail/email_follow_request.txt', data) - html = render_to_string('mail/email_follow_request.xhtml', data) + + text = render_to_string("mail/email_follow_request.txt", data) + html = render_to_string("mail/email_follow_request.xhtml", data) subject = "%s has requested to follow your Blurblog on NewsBlur" % follower_profile.user.username - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id, - email_type='follow_request') - + + MSentEmail.record( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="follow_request" + ) + logging.user(user, "~BB~FM~SBSending email for follow request: %s" % follower_profile.user.username) - + def mute_user(self, muting_user_id): if muting_user_id not in self.muting_user_ids: self.muting_user_ids.append(muting_user_id) self.save() - + muting_user_profile = MSocialProfile.get_user(muting_user_id) if self.user_id not in muting_user_profile.muted_by_user_ids: muting_user_profile.muted_by_user_ids.append(self.user_id) muting_user_profile.save() - + def unmute_user(self, muting_user_id): if muting_user_id in self.muting_user_ids: self.muting_user_ids.remove(muting_user_id) @@ -720,11 +791,11 @@ class MSocialProfile(mongo.Document): if self.user_id in muting_user_profile.muted_by_user_ids: muting_user_profile.muted_by_user_ids.remove(self.user_id) muting_user_profile.save() - + def save_feed_story_history_statistics(self): """ Fills in missing months between earlier occurances and now. - + Save format: [('YYYY-MM, #), ...] Example output: [(2010-12, 123), (2011-01, 146)] """ @@ -750,23 +821,23 @@ class MSocialProfile(mongo.Document): dates = defaultdict(int) hours = defaultdict(int) days = defaultdict(int) - results = MSharedStory.objects(user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline') + results = MSharedStory.objects(user_id=self.user_id).map_reduce(map_f, reduce_f, output="inline") for result in results: - dates[result.value['month']] += 1 - hours[str(int(result.value['hour']))] += 1 - days[str(int(result.value['day']))] += 1 - year = int(re.findall(r"(\d{4})-\d{1,2}", result.value['month'])[0]) + dates[result.value["month"]] += 1 + hours[str(int(result.value["hour"]))] += 1 + days[str(int(result.value["day"]))] += 1 + year = int(re.findall(r"(\d{4})-\d{1,2}", result.value["month"])[0]) if year < min_year: min_year = year - - # Assemble a list with 0's filled in for missing months, + + # Assemble a list with 0's filled in for missing months, # trimming left and right 0's. months = [] start = False - for year in range(min_year, now.year+1): - for month in range(1, 12+1): + for year in range(min_year, now.year + 1): + for month in range(1, 12 + 1): if datetime.datetime(year, month, 1) < now: - key = '%s-%s' % (year, month) + key = "%s-%s" % (year, month) if dates.get(key) or start: start = True months.append((key, dates.get(key, 0))) @@ -778,9 +849,8 @@ class MSocialProfile(mongo.Document): self.story_hours_history = hours self.average_stories_per_month = total / max(1, month_count) self.save() - + def save_classifier_counts(self): - def calculate_scores(cls, facet): map_f = """ function() { @@ -789,7 +859,9 @@ class MSocialProfile(mongo.Document): neg: this.score<0 ? Math.abs(this.score) : 0 }); } - """ % (facet) + """ % ( + facet + ) reduce_f = """ function(key, values) { var result = {pos: 0, neg: 0}; @@ -801,40 +873,42 @@ class MSocialProfile(mongo.Document): } """ scores = [] - res = cls.objects(social_user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline') + res = cls.objects(social_user_id=self.user_id).map_reduce(map_f, reduce_f, output="inline") for r in res: - facet_values = dict([(k, int(v)) for k,v in list(r.value.items())]) + facet_values = dict([(k, int(v)) for k, v in list(r.value.items())]) facet_values[facet] = r.key scores.append(facet_values) - scores = sorted(scores, key=lambda v: v['neg'] - v['pos']) + scores = sorted(scores, key=lambda v: v["neg"] - v["pos"]) return scores - + scores = {} - for cls, facet in [(MClassifierTitle, 'title'), - (MClassifierAuthor, 'author'), - (MClassifierTag, 'tag'), - (MClassifierFeed, 'feed_id')]: + for cls, facet in [ + (MClassifierTitle, "title"), + (MClassifierAuthor, "author"), + (MClassifierTag, "tag"), + (MClassifierFeed, "feed_id"), + ]: scores[facet] = calculate_scores(cls, facet) - if facet == 'feed_id' and scores[facet]: - scores['feed'] = scores[facet] - del scores['feed_id'] + if facet == "feed_id" and scores[facet]: + scores["feed"] = scores[facet] + del scores["feed_id"] elif not scores[facet]: del scores[facet] - + if scores: self.feed_classifier_counts = scores self.save() - + def save_sent_email(self, max_quota=20): if not self.story_email_history: self.story_email_history = [] - + self.story_email_history.insert(0, datetime.datetime.now()) self.story_email_history = self.story_email_history[:max_quota] - + self.save() - + def over_story_email_quota(self, quota=1, hours=24): counted = 0 day_ago = datetime.datetime.now() - datetime.timedelta(hours=hours) @@ -846,17 +920,18 @@ class MSocialProfile(mongo.Document): for sent_date in sent_emails: if sent_date > day_ago: counted += 1 - + if counted >= quota: return True - + return False - + + class MSocialSubscription(mongo.Document): UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) user_id = mongo.IntField() - subscription_user_id = mongo.IntField(unique_with='user_id') + subscription_user_id = mongo.IntField(unique_with="user_id") follow_date = mongo.DateTimeField(default=datetime.datetime.utcnow()) last_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF) mark_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF) @@ -869,24 +944,31 @@ class MSocialSubscription(mongo.Document): feed_opens = mongo.IntField(default=0) is_trained = mongo.BooleanField(default=False) active = mongo.BooleanField(default=True) - + meta = { - 'collection': 'social_subscription', - 'indexes': [('user_id', 'subscription_user_id')], - 'allow_inheritance': False, - 'strict': False, + "collection": "social_subscription", + "indexes": [("user_id", "subscription_user_id")], + "allow_inheritance": False, + "strict": False, } def __str__(self): user = User.objects.get(pk=self.user_id) subscription_user = User.objects.get(pk=self.subscription_user_id) return "Socialsub %s:%s" % (user, subscription_user) - + @classmethod - def feeds(cls, user_id=None, subscription_user_id=None, calculate_all_scores=False, - update_counts=False, *args, **kwargs): + def feeds( + cls, + user_id=None, + subscription_user_id=None, + calculate_all_scores=False, + update_counts=False, + *args, + **kwargs, + ): params = { - 'user_id': user_id, + "user_id": user_id, } if subscription_user_id: params["subscription_user_id"] = subscription_user_id @@ -895,125 +977,139 @@ class MSocialSubscription(mongo.Document): social_feeds = [] if social_subs: if calculate_all_scores: - for s in social_subs: s.calculate_feed_scores() + for s in social_subs: + s.calculate_feed_scores() # Fetch user profiles of subscriptions social_user_ids = [sub.subscription_user_id for sub in social_subs] social_profiles = MSocialProfile.profile_feeds(social_user_ids) for social_sub in social_subs: user_id = social_sub.subscription_user_id - if social_profiles[user_id]['shared_stories_count'] <= 0: + if social_profiles[user_id]["shared_stories_count"] <= 0: continue if update_counts and social_sub.needs_unread_recalc: social_sub.calculate_feed_scores() - + # Combine subscription read counts with feed/user info feed = dict(list(social_sub.canonical().items()) + list(social_profiles[user_id].items())) social_feeds.append(feed) return social_feeds - + @classmethod def feeds_with_updated_counts(cls, user, social_feed_ids=None): feeds = {} - + # Get social subscriptions for user user_subs = cls.objects.filter(user_id=user.pk) if social_feed_ids: - social_user_ids = [int(f.replace('social:', '')) for f in social_feed_ids] + social_user_ids = [int(f.replace("social:", "")) for f in social_feed_ids] user_subs = user_subs.filter(subscription_user_id__in=social_user_ids) profiles = MSocialProfile.objects.filter(user_id__in=social_user_ids) profiles = dict((p.user_id, p) for p in profiles) - + for i, sub in enumerate(user_subs): # Count unreads if subscription is stale. - if (sub.needs_unread_recalc or - (sub.unread_count_updated and - sub.unread_count_updated < user.profile.unread_cutoff) or - (sub.oldest_unread_story_date and - sub.oldest_unread_story_date < user.profile.unread_cutoff)): + if ( + sub.needs_unread_recalc + or (sub.unread_count_updated and sub.unread_count_updated < user.profile.unread_cutoff) + or ( + sub.oldest_unread_story_date and sub.oldest_unread_story_date < user.profile.unread_cutoff + ) + ): sub = sub.calculate_feed_scores(force=True, silent=True) feed_id = "social:%s" % sub.subscription_user_id feeds[feed_id] = { - 'ps': sub.unread_count_positive, - 'nt': sub.unread_count_neutral, - 'ng': sub.unread_count_negative, - 'id': feed_id, + "ps": sub.unread_count_positive, + "nt": sub.unread_count_neutral, + "ng": sub.unread_count_negative, + "id": feed_id, } if social_feed_ids and sub.subscription_user_id in profiles: - feeds[feed_id]['shared_stories_count'] = profiles[sub.subscription_user_id].shared_stories_count + feeds[feed_id]["shared_stories_count"] = profiles[ + sub.subscription_user_id + ].shared_stories_count return feeds - + def canonical(self): return { - 'user_id': self.user_id, - 'active': self.active, - 'subscription_user_id': self.subscription_user_id, - 'nt': self.unread_count_neutral, - 'ps': self.unread_count_positive, - 'ng': self.unread_count_negative, - 'is_trained': self.is_trained, - 'feed_opens': self.feed_opens, + "user_id": self.user_id, + "active": self.active, + "subscription_user_id": self.subscription_user_id, + "nt": self.unread_count_neutral, + "ps": self.unread_count_positive, + "ng": self.unread_count_negative, + "is_trained": self.is_trained, + "feed_opens": self.feed_opens, } @classmethod def subs_for_users(cls, user_id, subscription_user_ids=None, read_filter="unread"): socialsubs = cls.objects if read_filter == "unread": - socialsubs = socialsubs.filter(Q(unread_count_neutral__gt=0) | - Q(unread_count_positive__gt=0)) + socialsubs = socialsubs.filter(Q(unread_count_neutral__gt=0) | Q(unread_count_positive__gt=0)) if not subscription_user_ids: - socialsubs = socialsubs.filter(user_id=user_id)\ - .only('subscription_user_id', 'mark_read_date', 'is_trained') + socialsubs = socialsubs.filter(user_id=user_id).only( + "subscription_user_id", "mark_read_date", "is_trained" + ) else: - socialsubs = socialsubs.filter(user_id=user_id, - subscription_user_id__in=subscription_user_ids)\ - .only('subscription_user_id', 'mark_read_date', 'is_trained') - + socialsubs = socialsubs.filter( + user_id=user_id, subscription_user_id__in=subscription_user_ids + ).only("subscription_user_id", "mark_read_date", "is_trained") + return socialsubs @classmethod - def story_hashes(cls, user_id, relative_user_id, subscription_user_ids=None, socialsubs=None, - read_filter="unread", order="newest", - include_timestamps=False, group_by_user=True, cutoff_date=None): + def story_hashes( + cls, + user_id, + relative_user_id, + subscription_user_ids=None, + socialsubs=None, + read_filter="unread", + order="newest", + include_timestamps=False, + group_by_user=True, + cutoff_date=None, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) pipeline = r.pipeline() story_hashes = {} if group_by_user else [] if not socialsubs: - socialsubs = cls.subs_for_users(relative_user_id, - subscription_user_ids=subscription_user_ids, - read_filter=read_filter) + socialsubs = cls.subs_for_users( + relative_user_id, subscription_user_ids=subscription_user_ids, read_filter=read_filter + ) subscription_user_ids = [sub.subscription_user_id for sub in socialsubs] if not subscription_user_ids: return story_hashes - - current_time = int(time.time() + 60*60*24) + + current_time = int(time.time() + 60 * 60 * 24) if not cutoff_date: cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES) - unread_timestamp = int(time.mktime(cutoff_date.timetuple()))-1000 + unread_timestamp = int(time.mktime(cutoff_date.timetuple())) - 1000 feed_counter = 0 read_dates = dict() for us in socialsubs: - read_dates[us.subscription_user_id] = int(max(us.mark_read_date, cutoff_date).strftime('%s')) + read_dates[us.subscription_user_id] = int(max(us.mark_read_date, cutoff_date).strftime("%s")) for sub_user_id_group in chunks(subscription_user_ids, 20): pipeline = r.pipeline() for sub_user_id in sub_user_id_group: - stories_key = 'B:%s' % (sub_user_id) - sorted_stories_key = 'zB:%s' % (sub_user_id) - read_stories_key = 'RS:%s' % (user_id) - read_social_stories_key = 'RS:%s:B:%s' % (user_id, sub_user_id) - unread_stories_key = 'UB:%s:%s' % (user_id, sub_user_id) - sorted_stories_key = 'zB:%s' % (sub_user_id) - unread_ranked_stories_key = 'zUB:%s:%s' % (user_id, sub_user_id) + stories_key = "B:%s" % (sub_user_id) + sorted_stories_key = "zB:%s" % (sub_user_id) + read_stories_key = "RS:%s" % (user_id) + read_social_stories_key = "RS:%s:B:%s" % (user_id, sub_user_id) + unread_stories_key = "UB:%s:%s" % (user_id, sub_user_id) + sorted_stories_key = "zB:%s" % (sub_user_id) + unread_ranked_stories_key = "zUB:%s:%s" % (user_id, sub_user_id) expire_unread_stories_key = False - + max_score = current_time - if read_filter == 'unread': + if read_filter == "unread": # +1 for the intersection b/w zF and F, which carries an implicit score of 1. min_score = read_dates[sub_user_id] + 1 pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key) @@ -1023,67 +1119,78 @@ class MSocialSubscription(mongo.Document): min_score = unread_timestamp unread_stories_key = stories_key - if order == 'oldest': + if order == "oldest": byscorefunc = pipeline.zrangebyscore else: byscorefunc = pipeline.zrevrangebyscore min_score, max_score = max_score, min_score - + pipeline.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key]) byscorefunc(unread_ranked_stories_key, min_score, max_score, withscores=include_timestamps) pipeline.delete(unread_ranked_stories_key) if expire_unread_stories_key: pipeline.delete(unread_stories_key) - results = pipeline.execute() - + for hashes in results: - if not isinstance(hashes, list): continue + if not isinstance(hashes, list): + continue if group_by_user: story_hashes[subscription_user_ids[feed_counter]] = hashes feed_counter += 1 else: story_hashes.extend(hashes) - + return story_hashes - - def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', - withscores=False, hashes_only=False, cutoff_date=None, - mark_read_complement=False): + + def get_stories( + self, + offset=0, + limit=6, + order="newest", + read_filter="all", + withscores=False, + hashes_only=False, + cutoff_date=None, + mark_read_complement=False, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) ignore_user_stories = False - - stories_key = 'B:%s' % (self.subscription_user_id) - read_stories_key = 'RS:%s' % (self.user_id) - read_social_stories_key = 'RS:%s:B:%s' % (self.user_id, self.subscription_user_id) - unread_stories_key = 'UB:%s:%s' % (self.user_id, self.subscription_user_id) + + stories_key = "B:%s" % (self.subscription_user_id) + read_stories_key = "RS:%s" % (self.user_id) + read_social_stories_key = "RS:%s:B:%s" % (self.user_id, self.subscription_user_id) + unread_stories_key = "UB:%s:%s" % (self.user_id, self.subscription_user_id) if not r.exists(stories_key): return [] - elif read_filter != 'unread' or not r.exists(read_stories_key): + elif read_filter != "unread" or not r.exists(read_stories_key): ignore_user_stories = True unread_stories_key = stories_key else: r.sdiffstore(unread_stories_key, stories_key, read_stories_key) r.sdiffstore(unread_stories_key, unread_stories_key, read_social_stories_key) - sorted_stories_key = 'zB:%s' % (self.subscription_user_id) - unread_ranked_stories_key = 'z%sUB:%s:%s' % ('h' if hashes_only else '', - self.user_id, self.subscription_user_id) + sorted_stories_key = "zB:%s" % (self.subscription_user_id) + unread_ranked_stories_key = "z%sUB:%s:%s" % ( + "h" if hashes_only else "", + self.user_id, + self.subscription_user_id, + ) r.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key]) - - now = datetime.datetime.now() - current_time = int(time.time() + 60*60*24) - mark_read_time = int(time.mktime(self.mark_read_date.timetuple())) + 1 + + now = datetime.datetime.now() + current_time = int(time.time() + 60 * 60 * 24) + mark_read_time = int(time.mktime(self.mark_read_date.timetuple())) + 1 if cutoff_date: - mark_read_time = int(time.mktime(cutoff_date.timetuple())) + 1 - - if order == 'oldest': + mark_read_time = int(time.mktime(cutoff_date.timetuple())) + 1 + + if order == "oldest": byscorefunc = r.zrangebyscore min_score = mark_read_time max_score = current_time - else: # newest + else: # newest byscorefunc = r.zrevrangebyscore min_score = current_time if mark_read_complement: @@ -1092,44 +1199,58 @@ class MSocialSubscription(mongo.Document): unread_cutoff = cutoff_date if not unread_cutoff: unread_cutoff = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - max_score = int(time.mktime(unread_cutoff.timetuple()))-1 + max_score = int(time.mktime(unread_cutoff.timetuple())) - 1 + + story_ids = byscorefunc( + unread_ranked_stories_key, min_score, max_score, start=offset, num=limit, withscores=withscores + ) - story_ids = byscorefunc(unread_ranked_stories_key, min_score, - max_score, start=offset, num=limit, - withscores=withscores) - if withscores: story_ids = [(s[0], int(s[1])) for s in story_ids] - - r.expire(unread_ranked_stories_key, 1*60*60) + + r.expire(unread_ranked_stories_key, 1 * 60 * 60) if not ignore_user_stories: r.delete(unread_stories_key) return story_ids - + @classmethod - def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, - order='newest', read_filter='all', relative_user_id=None, cache=True, - socialsubs=None, cutoff_date=None, dashboard_global=False): + def feed_stories( + cls, + user_id, + social_user_ids, + offset=0, + limit=6, + order="newest", + read_filter="all", + relative_user_id=None, + cache=True, + socialsubs=None, + cutoff_date=None, + dashboard_global=False, + ): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL) - + if not relative_user_id: relative_user_id = user_id - - if order == 'oldest': + + if order == "oldest": range_func = rt.zrange else: range_func = rt.zrevrange - + if not isinstance(social_user_ids, list): social_user_ids = [social_user_ids] - ranked_stories_keys = 'zU:%s:social' % (user_id) - unread_ranked_stories_keys = 'zhU:%s:social' % (user_id) - if ((offset or dashboard_global) and cache and - rt.exists(ranked_stories_keys) and - rt.exists(unread_ranked_stories_keys)): + ranked_stories_keys = "zU:%s:social" % (user_id) + unread_ranked_stories_keys = "zhU:%s:social" % (user_id) + if ( + (offset or dashboard_global) + and cache + and rt.exists(ranked_stories_keys) + and rt.exists(unread_ranked_stories_keys) + ): story_hashes_and_dates = range_func(ranked_stories_keys, offset, limit, withscores=True) if not story_hashes_and_dates: return [], [], [] @@ -1137,22 +1258,26 @@ class MSocialSubscription(mongo.Document): if read_filter == "unread": unread_story_hashes = story_hashes else: - unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset+limit) + unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset + limit) return story_hashes, story_dates, unread_story_hashes else: rt.delete(ranked_stories_keys) rt.delete(unread_ranked_stories_keys) - - story_hashes = cls.story_hashes(user_id, relative_user_id, - subscription_user_ids=social_user_ids, - read_filter=read_filter, order=order, - include_timestamps=True, - group_by_user=False, - socialsubs=socialsubs, - cutoff_date=cutoff_date) + + story_hashes = cls.story_hashes( + user_id, + relative_user_id, + subscription_user_ids=social_user_ids, + read_filter=read_filter, + order=order, + include_timestamps=True, + group_by_user=False, + socialsubs=socialsubs, + cutoff_date=cutoff_date, + ) if not story_hashes: return [], [], [] - + pipeline = rt.pipeline() for story_hash_group in chunks(story_hashes, 100): pipeline.zadd(ranked_stories_keys, dict(story_hash_group)) @@ -1166,85 +1291,100 @@ class MSocialSubscription(mongo.Document): unread_feed_story_hashes = story_hashes rt.zunionstore(unread_ranked_stories_keys, [ranked_stories_keys]) else: - unread_story_hashes = cls.story_hashes(user_id, relative_user_id, - subscription_user_ids=social_user_ids, - read_filter="unread", order=order, - include_timestamps=True, - group_by_user=False, - socialsubs=socialsubs, - cutoff_date=cutoff_date) + unread_story_hashes = cls.story_hashes( + user_id, + relative_user_id, + subscription_user_ids=social_user_ids, + read_filter="unread", + order=order, + include_timestamps=True, + group_by_user=False, + socialsubs=socialsubs, + cutoff_date=cutoff_date, + ) if unread_story_hashes: pipeline = rt.pipeline() for unread_story_hash_group in chunks(unread_story_hashes, 100): pipeline.zadd(unread_ranked_stories_keys, dict(unread_story_hash_group)) pipeline.execute() unread_feed_story_hashes = range_func(unread_ranked_stories_keys, offset, limit) - - rt.expire(ranked_stories_keys, 60*60) - rt.expire(unread_ranked_stories_keys, 60*60) - + + rt.expire(ranked_stories_keys, 60 * 60) + rt.expire(unread_ranked_stories_keys, 60 * 60) + return story_hashes, story_dates, unread_feed_story_hashes def mark_newer_stories_read(self, cutoff_date): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + cutoff_date = cutoff_date - datetime.timedelta(seconds=1) - story_hashes = self.get_stories(limit=500, order="newest", cutoff_date=cutoff_date, - read_filter="unread", hashes_only=True) + story_hashes = self.get_stories( + limit=500, order="newest", cutoff_date=cutoff_date, read_filter="unread", hashes_only=True + ) data = self.mark_story_ids_as_read(story_hashes, aggregated=True) return data - - def mark_story_ids_as_read(self, story_hashes, feed_id=None, mark_all_read=False, request=None, aggregated=False): + + def mark_story_ids_as_read( + self, story_hashes, feed_id=None, mark_all_read=False, request=None, aggregated=False + ): data = dict(code=0, payload=story_hashes) r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if not request: request = User.objects.get(pk=self.user_id) - + if not self.needs_unread_recalc and not mark_all_read: self.needs_unread_recalc = True self.save() - + sub_username = User.objects.get(pk=self.subscription_user_id).username - + if len(story_hashes) > 1: - logging.user(request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username)) + logging.user( + request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username) + ) else: logging.user(request, "~FYRead story in social subscription: %s" % (sub_username)) - - + for story_hash in set(story_hashes): if feed_id is not None: story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=feed_id) if feed_id is None: feed_id, _ = MStory.split_story_hash(story_hash) - + if len(story_hashes) == 1: RUserStory.aggregate_mark_read(feed_id) - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (self.user_id) share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] - - RUserStory.mark_read(self.user_id, feed_id, story_hash, social_user_ids=friends_with_shares, - aggregated=(mark_all_read or aggregated)) - + + RUserStory.mark_read( + self.user_id, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + aggregated=(mark_all_read or aggregated), + ) + if self.user_id in friends_with_shares: friends_with_shares.remove(self.user_id) if friends_with_shares: socialsubs = MSocialSubscription.objects.filter( - user_id=self.user_id, - subscription_user_id__in=friends_with_shares) + user_id=self.user_id, subscription_user_id__in=friends_with_shares + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc and not mark_all_read: socialsub.needs_unread_recalc = True socialsub.save() - + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=self.user_id, feed=feed_id) if usersubs: @@ -1252,38 +1392,37 @@ class MSocialSubscription(mongo.Document): if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True usersub.save() - + return data - + @classmethod - def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_id=None, - request=None): + def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_id=None, request=None): data = dict(code=0, payload=story_ids) r = redis.Redis(connection_pool=settings.REDIS_POOL) if not request: request = User.objects.get(pk=user_id) - + if len(story_ids) > 1: logging.user(request, "~FYRead %s social stories from global" % (len(story_ids))) else: logging.user(request, "~FYRead social story from global") - + for story_id in set(story_ids): try: - story = MSharedStory.objects.get(user_id=social_user_id, - story_guid=story_id) + story = MSharedStory.objects.get(user_id=social_user_id, story_guid=story_id) except MSharedStory.DoesNotExist: continue - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (user_id) share_key = "S:%s" % (story.story_hash) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] - - RUserStory.mark_read(user_id, story.story_feed_id, story.story_hash, - social_user_ids=friends_with_shares) - + + RUserStory.mark_read( + user_id, story.story_feed_id, story.story_hash, social_user_ids=friends_with_shares + ) + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=user_id, feed=story.story_feed_id) if usersubs: @@ -1293,26 +1432,32 @@ class MSocialSubscription(mongo.Document): usersub.save() # XXX TODO: Real-time notification, just for this user return data - + def mark_feed_read(self, cutoff_date=None): user_profile = Profile.objects.get(user_id=self.user_id) recount = True - + if cutoff_date: cutoff_date = cutoff_date + datetime.timedelta(seconds=1) else: # Use the latest story to get last read time. now = datetime.datetime.now() - latest_shared_story = MSharedStory.objects(user_id=self.subscription_user_id, - shared_date__gte=user_profile.unread_cutoff, - story_date__lte=now - ).order_by('-shared_date').only('shared_date').first() + latest_shared_story = ( + MSharedStory.objects( + user_id=self.subscription_user_id, + shared_date__gte=user_profile.unread_cutoff, + story_date__lte=now, + ) + .order_by("-shared_date") + .only("shared_date") + .first() + ) if latest_shared_story: - cutoff_date = latest_shared_story['shared_date'] + datetime.timedelta(seconds=1) + cutoff_date = latest_shared_story["shared_date"] + datetime.timedelta(seconds=1) else: cutoff_date = datetime.datetime.utcnow() recount = False - + self.last_read_date = cutoff_date self.mark_read_date = cutoff_date self.oldest_unread_story_date = cutoff_date @@ -1324,18 +1469,19 @@ class MSocialSubscription(mongo.Document): self.needs_unread_recalc = False else: self.needs_unread_recalc = True - + # Manually mark all shared stories as read. - unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True, - mark_read_complement=True) + unread_story_hashes = self.get_stories( + read_filter="unread", limit=500, hashes_only=True, mark_read_complement=True + ) self.mark_story_ids_as_read(unread_story_hashes, mark_all_read=True) - + self.save() - + def calculate_feed_scores(self, force=False, silent=False): if not self.needs_unread_recalc and not force: return self - + now = datetime.datetime.now() user_profile = Profile.objects.get(user_id=self.user_id) @@ -1343,9 +1489,9 @@ class MSocialSubscription(mongo.Document): # if not silent: # logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed)) return self - + feed_scores = dict(negative=0, neutral=0, positive=0) - + # Two weeks in age. If mark_read_date is older, mark old stories as read. date_delta = user_profile.unread_cutoff if date_delta < self.mark_read_date: @@ -1353,95 +1499,117 @@ class MSocialSubscription(mongo.Document): else: self.mark_read_date = date_delta - unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True, - cutoff_date=date_delta) - stories_db = MSharedStory.objects(user_id=self.subscription_user_id, - story_hash__in=unread_story_hashes) + unread_story_hashes = self.get_stories( + read_filter="unread", limit=500, hashes_only=True, cutoff_date=date_delta + ) + stories_db = MSharedStory.objects( + user_id=self.subscription_user_id, story_hash__in=unread_story_hashes + ) story_feed_ids = set() for s in stories_db: - story_feed_ids.add(s['story_feed_id']) + story_feed_ids.add(s["story_feed_id"]) story_feed_ids = list(story_feed_ids) usersubs = UserSubscription.objects.filter(user__pk=self.user_id, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) - + oldest_unread_story_date = now unread_stories_db = [] for story in stories_db: - if story['story_hash'] not in unread_story_hashes: + if story["story_hash"] not in unread_story_hashes: continue feed_id = story.story_feed_id if usersubs_map.get(feed_id) and story.shared_date < usersubs_map[feed_id].mark_read_date: continue - + unread_stories_db.append(story) if story.shared_date < oldest_unread_story_date: oldest_unread_story_date = story.shared_date stories = Feed.format_stories(unread_stories_db) - classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_tags = list( + MClassifierTag.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) # Merge with feed specific classifiers if story_feed_ids: - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) for story in stories: scores = { - 'feed' : apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=self.subscription_user_id), - 'author' : apply_classifier_authors(classifier_authors, story), - 'tags' : apply_classifier_tags(classifier_tags, story), - 'title' : apply_classifier_titles(classifier_titles, story), + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=self.subscription_user_id + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) - + + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) + if max_score > 0: - feed_scores['positive'] += 1 + feed_scores["positive"] += 1 elif min_score < 0: - feed_scores['negative'] += 1 + feed_scores["negative"] += 1 else: - if scores['feed'] > 0: - feed_scores['positive'] += 1 - elif scores['feed'] < 0: - feed_scores['negative'] += 1 + if scores["feed"] > 0: + feed_scores["positive"] += 1 + elif scores["feed"] < 0: + feed_scores["negative"] += 1 else: - feed_scores['neutral'] += 1 - - - self.unread_count_positive = feed_scores['positive'] - self.unread_count_neutral = feed_scores['neutral'] - self.unread_count_negative = feed_scores['negative'] + feed_scores["neutral"] += 1 + + self.unread_count_positive = feed_scores["positive"] + self.unread_count_neutral = feed_scores["neutral"] + self.unread_count_negative = feed_scores["negative"] self.unread_count_updated = datetime.datetime.now() self.oldest_unread_story_date = oldest_unread_story_date self.needs_unread_recalc = False - + self.save() - if (self.unread_count_positive == 0 and - self.unread_count_neutral == 0): + if self.unread_count_positive == 0 and self.unread_count_neutral == 0: self.mark_feed_read() - + if not silent: - logging.info(' ---> [%s] Computing social scores: %s (%s/%s/%s)' % (user_profile, self.subscription_user_id, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) - + logging.info( + " ---> [%s] Computing social scores: %s (%s/%s/%s)" + % ( + user_profile, + self.subscription_user_id, + feed_scores["negative"], + feed_scores["neutral"], + feed_scores["positive"], + ) + ) + return self - + @classmethod def mark_dirty_sharing_story(cls, user_id, story_feed_id, story_guid_hash): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + friends_key = "F:%s:F" % (user_id) share_key = "S:%s:%s" % (story_feed_id, story_guid_hash) following_user_ids = r.sinter(friends_key, share_key) @@ -1455,90 +1623,99 @@ class MSocialSubscription(mongo.Document): social_sub.save() return social_subs + class MCommentReply(mongo.EmbeddedDocument): - reply_id = mongo.ObjectIdField() - user_id = mongo.IntField() - publish_date = mongo.DateTimeField() - comments = mongo.StringField() - email_sent = mongo.BooleanField(default=False) - liking_users = mongo.ListField(mongo.IntField()) - + reply_id = mongo.ObjectIdField() + user_id = mongo.IntField() + publish_date = mongo.DateTimeField() + comments = mongo.StringField() + email_sent = mongo.BooleanField(default=False) + liking_users = mongo.ListField(mongo.IntField()) + def canonical(self): reply = { - 'reply_id': self.reply_id, - 'user_id': self.user_id, - 'publish_date': relative_timesince(self.publish_date), - 'date': self.publish_date, - 'comments': self.comments, + "reply_id": self.reply_id, + "user_id": self.user_id, + "publish_date": relative_timesince(self.publish_date), + "date": self.publish_date, + "comments": self.comments, } return reply - + meta = { - 'ordering': ['publish_date'], - 'id_field': 'reply_id', - 'allow_inheritance': False, - 'strict': False, + "ordering": ["publish_date"], + "id_field": "reply_id", + "allow_inheritance": False, + "strict": False, } class MSharedStory(mongo.DynamicDocument): - user_id = mongo.IntField() - shared_date = mongo.DateTimeField() - comments = mongo.StringField() - has_comments = mongo.BooleanField(default=False) - has_replies = mongo.BooleanField(default=False) - replies = mongo.ListField(mongo.EmbeddedDocumentField(MCommentReply)) - source_user_id = mongo.IntField() - story_hash = mongo.StringField() - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + user_id = mongo.IntField() + shared_date = mongo.DateTimeField() + comments = mongo.StringField() + has_comments = mongo.BooleanField(default=False) + has_replies = mongo.BooleanField(default=False) + replies = mongo.ListField(mongo.EmbeddedDocumentField(MCommentReply)) + source_user_id = mongo.IntField() + story_hash = mongo.StringField() + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - original_page_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField(unique_with=('user_id',)) - story_guid_hash = mongo.StringField(max_length=6) - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - posted_to_services = mongo.ListField(mongo.StringField(max_length=20)) - mute_email_users = mongo.ListField(mongo.IntField()) - liking_users = mongo.ListField(mongo.IntField()) - emailed_reshare = mongo.BooleanField(default=False) - emailed_replies = mongo.ListField(mongo.ObjectIdField()) - image_count = mongo.IntField() - image_sizes = mongo.ListField(mongo.DictField()) - + original_text_z = mongo.BinaryField() + original_page_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField(unique_with=("user_id",)) + story_guid_hash = mongo.StringField(max_length=6) + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + posted_to_services = mongo.ListField(mongo.StringField(max_length=20)) + mute_email_users = mongo.ListField(mongo.IntField()) + liking_users = mongo.ListField(mongo.IntField()) + emailed_reshare = mongo.BooleanField(default=False) + emailed_replies = mongo.ListField(mongo.ObjectIdField()) + image_count = mongo.IntField() + image_sizes = mongo.ListField(mongo.DictField()) + meta = { - 'collection': 'shared_stories', - 'indexes': [('user_id', '-shared_date'), ('user_id', 'story_feed_id'), - 'shared_date', 'story_guid', 'story_feed_id', 'story_hash'], - 'ordering': ['-shared_date'], - 'allow_inheritance': False, - 'strict': False, + "collection": "shared_stories", + "indexes": [ + ("user_id", "-shared_date"), + ("user_id", "story_feed_id"), + "shared_date", + "story_guid", + "story_feed_id", + "story_hash", + ], + "ordering": ["-shared_date"], + "allow_inheritance": False, + "strict": False, } def __str__(self): user = User.objects.get(pk=self.user_id) - return "%s: %s (%s)%s%s" % (user.username, - self.decoded_story_title[:20], - self.story_feed_id, - ': ' if self.has_comments else '', - self.comments[:20]) + return "%s: %s (%s)%s%s" % ( + user.username, + self.decoded_story_title[:20], + self.story_feed_id, + ": " if self.has_comments else "", + self.comments[:20], + ) @property def guid_hash(self): - return hashlib.sha1(self.story_guid.encode('utf-8')).hexdigest()[:6] - + return hashlib.sha1(self.story_guid.encode("utf-8")).hexdigest()[:6] + @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id or "0", self.guid_hash) - + @property def decoded_story_title(self): return pyhtml.unescape(self.story_title) @@ -1550,7 +1727,7 @@ class MSharedStory(mongo.DynamicDocument): story_content = smart_str(zlib.decompress(self.story_content_z)) else: story_content = smart_str(story_content) - + return story_content def canonical(self): @@ -1561,7 +1738,7 @@ class MSharedStory(mongo.DynamicDocument): "story_content": self.story_content_z and zlib.decompress(self.story_content_z), "comments": self.comments, } - + def save(self, *args, **kwargs): scrubber = SelectiveScriptScrubber() @@ -1583,23 +1760,29 @@ class MSharedStory(mongo.DynamicDocument): self.shared_date = self.shared_date or datetime.datetime.utcnow() self.has_replies = bool(len(self.replies)) - + super(MSharedStory, self).save(*args, **kwargs) - + author = MSocialProfile.get_user(self.user_id) author.count_follows() - + self.sync_redis() - - MActivity.new_shared_story(user_id=self.user_id, source_user_id=self.source_user_id, - story_title=self.story_title, - comments=self.comments, story_feed_id=self.story_feed_id, - story_id=self.story_guid, share_date=self.shared_date) + + MActivity.new_shared_story( + user_id=self.user_id, + source_user_id=self.source_user_id, + story_title=self.story_title, + comments=self.comments, + story_feed_id=self.story_feed_id, + story_id=self.story_guid, + share_date=self.shared_date, + ) return self - + def delete(self, *args, **kwargs): - MActivity.remove_shared_story(user_id=self.user_id, story_feed_id=self.story_feed_id, - story_id=self.story_guid) + MActivity.remove_shared_story( + user_id=self.user_id, story_feed_id=self.story_feed_id, story_id=self.story_guid + ) self.remove_from_redis() @@ -1608,48 +1791,52 @@ class MSharedStory(mongo.DynamicDocument): @classmethod def trim_old_stories(cls, stories=10, days=90, dryrun=False): print(" ---> Fetching shared story counts...") - stats = settings.MONGODB.newsblur.shared_stories.aggregate([{ - "$group": { - "_id": "$user_id", - "stories": {"$sum": 1}, - }, - }, { - "$match": { - "stories": {"$gte": stories} - }, - }]) + stats = settings.MONGODB.newsblur.shared_stories.aggregate( + [ + { + "$group": { + "_id": "$user_id", + "stories": {"$sum": 1}, + }, + }, + { + "$match": {"stories": {"$gte": stories}}, + }, + ] + ) month_ago = datetime.datetime.now() - datetime.timedelta(days=days) user_ids = list(stats) - user_ids = sorted(user_ids, key=lambda x:x['stories'], reverse=True) + user_ids = sorted(user_ids, key=lambda x: x["stories"], reverse=True) print(" ---> Found %s users with more than %s starred stories" % (len(user_ids), stories)) total = 0 for stat in user_ids: try: - user = User.objects.select_related('profile').get(pk=stat['_id']) + user = User.objects.select_related("profile").get(pk=stat["_id"]) except User.DoesNotExist: user = None - + if user and (user.profile.is_premium or user.profile.last_seen_on > month_ago): continue - - total += stat['stories'] - username = "%s (%s)" % (user and user.username or " - ", stat['_id']) - print(" ---> %19.19s: %-20.20s %s stories" % (user and user.profile.last_seen_on or "Deleted", - username, - stat['stories'])) - if not dryrun and stat['_id']: - cls.objects.filter(user_id=stat['_id']).delete() - elif not dryrun and stat['_id'] == 0: + + total += stat["stories"] + username = "%s (%s)" % (user and user.username or " - ", stat["_id"]) + print( + " ---> %19.19s: %-20.20s %s stories" + % (user and user.profile.last_seen_on or "Deleted", username, stat["stories"]) + ) + if not dryrun and stat["_id"]: + cls.objects.filter(user_id=stat["_id"]).delete() + elif not dryrun and stat["_id"] == 0: print(" ---> Deleting unshared stories (user_id = 0)") - cls.objects.filter(user_id=stat['_id']).delete() - - + cls.objects.filter(user_id=stat["_id"]).delete() + print(" ---> Deleted %s stories in total." % total) - + def unshare_story(self): - socialsubs = MSocialSubscription.objects.filter(subscription_user_id=self.user_id, - needs_unread_recalc=False) + socialsubs = MSocialSubscription.objects.filter( + subscription_user_id=self.user_id, needs_unread_recalc=False + ) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() @@ -1660,23 +1847,30 @@ class MSharedStory(mongo.DynamicDocument): feed = Feed.get_by_id(self.story_feed_id) try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish("social:%s:story" % (self.user_id), '%s,%s' % (self.story_hash, self.shared_date.strftime('%s'))) - logging.debug(" ***> [%-30s] ~BMPublishing to Redis for real-time." % (feed.title[:30] if feed else "NO FEED")) + r.publish( + "social:%s:story" % (self.user_id), + "%s,%s" % (self.story_hash, self.shared_date.strftime("%s")), + ) + logging.debug( + " ***> [%-30s] ~BMPublishing to Redis for real-time." + % (feed.title[:30] if feed else "NO FEED") + ) except redis.ConnectionError: - logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.title[:30] if feed else "NO FEED")) - + logging.debug( + " ***> [%-30s] ~BMRedis is unavailable for real-time." + % (feed.title[:30] if feed else "NO FEED") + ) + @classmethod def feed_quota(cls, user_id, story_hash, feed_id=None, days=1, quota=1): - day_ago = datetime.datetime.now()-datetime.timedelta(days=days) - params = dict(user_id=user_id, - shared_date__gte=day_ago, - story_hash__nin=[story_hash]) + day_ago = datetime.datetime.now() - datetime.timedelta(days=days) + params = dict(user_id=user_id, shared_date__gte=day_ago, story_hash__nin=[story_hash]) if feed_id: - params['story_feed_id'] = feed_id + params["story_feed_id"] = feed_id shared_count = cls.objects.filter(**params).count() return shared_count >= quota - + @classmethod def count_potential_spammers(cls, days=1, destroy=False): try: @@ -1684,50 +1878,59 @@ class MSharedStory(mongo.DynamicDocument): except NameError: logging.debug(" ---> ~FR~SNMissing ~SBspam.py~SN") guaranteed_spammers = [] - + return guaranteed_spammers - + @classmethod def get_shared_stories_from_site(cls, feed_id, user_id, story_url, limit=3): - your_story = cls.objects.filter(story_feed_id=feed_id, - story_permalink=story_url, - user_id=user_id).limit(1).first() - same_stories = cls.objects.filter(story_feed_id=feed_id, - story_permalink=story_url, - user_id__ne=user_id - ).order_by('-shared_date') + your_story = ( + cls.objects.filter(story_feed_id=feed_id, story_permalink=story_url, user_id=user_id) + .limit(1) + .first() + ) + same_stories = cls.objects.filter( + story_feed_id=feed_id, story_permalink=story_url, user_id__ne=user_id + ).order_by("-shared_date") - same_stories = [{ - "user_id": story.user_id, - "comments": story.comments, - "relative_date": relative_timesince(story.shared_date), - "blurblog_permalink": story.blurblog_permalink(), - } for story in same_stories] - - other_stories = [] - if feed_id: - other_stories = cls.objects.filter(story_feed_id=feed_id, - story_permalink__ne=story_url - ).order_by('-shared_date').limit(limit) - other_stories = [{ + same_stories = [ + { "user_id": story.user_id, - "story_title": story.story_title, - "story_permalink": story.story_permalink, "comments": story.comments, "relative_date": relative_timesince(story.shared_date), "blurblog_permalink": story.blurblog_permalink(), - } for story in other_stories] - + } + for story in same_stories + ] + + other_stories = [] + if feed_id: + other_stories = ( + cls.objects.filter(story_feed_id=feed_id, story_permalink__ne=story_url) + .order_by("-shared_date") + .limit(limit) + ) + other_stories = [ + { + "user_id": story.user_id, + "story_title": story.story_title, + "story_permalink": story.story_permalink, + "comments": story.comments, + "relative_date": relative_timesince(story.shared_date), + "blurblog_permalink": story.blurblog_permalink(), + } + for story in other_stories + ] + return your_story, same_stories, other_stories - + def set_source_user_id(self, source_user_id): if source_user_id == self.user_id: return - + def find_source(source_user_id, seen_user_ids): - parent_shared_story = MSharedStory.objects.filter(user_id=source_user_id, - story_guid=self.story_guid, - story_feed_id=self.story_feed_id).limit(1) + parent_shared_story = MSharedStory.objects.filter( + user_id=source_user_id, story_guid=self.story_guid, story_feed_id=self.story_feed_id + ).limit(1) if parent_shared_story and parent_shared_story[0].source_user_id: user_id = parent_shared_story[0].source_user_id if user_id in seen_user_ids: @@ -1737,7 +1940,7 @@ class MSharedStory(mongo.DynamicDocument): return find_source(user_id, seen_user_ids) else: return source_user_id - + if source_user_id: source_user_id = find_source(source_user_id, []) if source_user_id == self.user_id: @@ -1746,19 +1949,21 @@ class MSharedStory(mongo.DynamicDocument): self.source_user_id = source_user_id logging.debug(" ---> Re-share from %s." % source_user_id) self.save() - - MInteraction.new_reshared_story(user_id=self.source_user_id, - reshare_user_id=self.user_id, - comments=self.comments, - story_title=self.story_title, - story_feed_id=self.story_feed_id, - story_id=self.story_guid) - + + MInteraction.new_reshared_story( + user_id=self.source_user_id, + reshare_user_id=self.user_id, + comments=self.comments, + story_title=self.story_title, + story_feed_id=self.story_feed_id, + story_id=self.story_guid, + ) + def mute_for_user(self, user_id): if user_id not in self.mute_email_users: self.mute_email_users.append(user_id) self.save() - + @classmethod def switch_feed(cls, original_feed_id, duplicate_feed_id): shared_stories = cls.objects.filter(story_feed_id=duplicate_feed_id) @@ -1766,7 +1971,7 @@ class MSharedStory(mongo.DynamicDocument): for story in shared_stories: story.story_feed_id = original_feed_id story.save() - + @classmethod def collect_popular_stories(cls, cutoff=None, days=None, shared_feed_ids=None): if not days: @@ -1778,7 +1983,7 @@ class MSharedStory(mongo.DynamicDocument): # shared_stories_count = sum(json.decode(MStatistics.get('stories_shared'))) # cutoff = cutoff or max(math.floor(.025 * shared_stories_count), 3) today = datetime.datetime.now() - datetime.timedelta(days=days) - + map_f = """ function() { emit(this.story_hash, { @@ -1809,74 +2014,82 @@ class MSharedStory(mongo.DynamicDocument): return value; } } - """ % {'cutoff': cutoff, 'shared_feed_ids': ', '.join(shared_feed_ids)} - res = cls.objects(shared_date__gte=today).map_reduce(map_f, reduce_f, - finalize_f=finalize_f, - output='inline') + """ % { + "cutoff": cutoff, + "shared_feed_ids": ", ".join(shared_feed_ids), + } + res = cls.objects(shared_date__gte=today).map_reduce( + map_f, reduce_f, finalize_f=finalize_f, output="inline" + ) stories = dict([(r.key, r.value) for r in res if r.value]) return stories, cutoff - + @classmethod def share_popular_stories(cls, cutoff=None, days=None, interactive=True): publish_new_stories = False - popular_profile = MSocialProfile.objects.get(user_id=User.objects.get(username='popular').pk) + popular_profile = MSocialProfile.objects.get(user_id=User.objects.get(username="popular").pk) popular_user = User.objects.get(pk=popular_profile.user_id) week_ago = datetime.datetime.now() - datetime.timedelta(days=7) - shared_feed_ids = [str(s.story_feed_id) - for s in MSharedStory.objects(user_id=popular_profile.user_id, - shared_date__gte=week_ago).only('story_feed_id')] - shared_stories_today, cutoff = cls.collect_popular_stories(cutoff=cutoff, days=days, - shared_feed_ids=shared_feed_ids) + shared_feed_ids = [ + str(s.story_feed_id) + for s in MSharedStory.objects(user_id=popular_profile.user_id, shared_date__gte=week_ago).only( + "story_feed_id" + ) + ] + shared_stories_today, cutoff = cls.collect_popular_stories( + cutoff=cutoff, days=days, shared_feed_ids=shared_feed_ids + ) shared = 0 - + for story_hash, story_info in list(shared_stories_today.items()): - story, _ = MStory.find_story(story_info['feed_id'], story_info['story_hash']) + story, _ = MStory.find_story(story_info["feed_id"], story_info["story_hash"]) if not story: logging.user(popular_user, "~FRPopular stories, story not found: %s" % story_info) continue if story.story_feed_id in shared_feed_ids: logging.user(popular_user, "~FRPopular stories, story feed just shared: %s" % story_info) continue - + if interactive: feed = Feed.get_by_id(story.story_feed_id) accept_story = eval(input("%s / %s [Y/n]: " % (story.decoded_story_title, feed.title))) - if accept_story in ['n', 'N']: continue - - story_db = dict([(k, v) for k, v in list(story._data.items()) - if k is not None and v is not None]) - story_db.pop('user_id', None) - story_db.pop('id', None) - story_db.pop('comments', None) - story_db.pop('replies', None) - story_db['has_comments'] = False - story_db['has_replies'] = False - story_db['shared_date'] = datetime.datetime.now() + if accept_story in ["n", "N"]: + continue + + story_db = dict([(k, v) for k, v in list(story._data.items()) if k is not None and v is not None]) + story_db.pop("user_id", None) + story_db.pop("id", None) + story_db.pop("comments", None) + story_db.pop("replies", None) + story_db["has_comments"] = False + story_db["has_replies"] = False + story_db["shared_date"] = datetime.datetime.now() story_values = { - 'user_id': popular_profile.user_id, - 'story_guid': story_db['story_guid'], + "user_id": popular_profile.user_id, + "story_guid": story_db["story_guid"], } try: shared_story = MSharedStory.objects.get(**story_values) except MSharedStory.DoesNotExist: story_values.update(story_db) shared_story = MSharedStory.objects.create(**story_values) - shared_story.post_to_service('twitter') + shared_story.post_to_service("twitter") shared += 1 shared_feed_ids.append(story.story_feed_id) publish_new_stories = True - logging.user(popular_user, "~FCSharing: ~SB~FM%s (%s shares, %s min)" % ( - story.decoded_story_title[:50], - story_info['count'], - cutoff)) - + logging.user( + popular_user, + "~FCSharing: ~SB~FM%s (%s shares, %s min)" + % (story.decoded_story_title[:50], story_info["count"], cutoff), + ) + if publish_new_stories: socialsubs = MSocialSubscription.objects.filter(subscription_user_id=popular_user.pk) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() shared_story.publish_update_to_subscribers() - + return shared @staticmethod @@ -1884,13 +2097,13 @@ class MSharedStory(mongo.DynamicDocument): if not r: r = redis.Redis(connection_pool=settings.REDIS_POOL) pipeline = r.pipeline() - + for story_hash in story_hashes: feed_id, guid_hash = MStory.split_story_hash(story_hash) share_key = "S:%s:%s" % (feed_id, guid_hash) pipeline.sismember(share_key, user_id) shared_hashes = pipeline.execute() - + return [story_hash for s, story_hash in enumerate(story_hashes) if shared_hashes[s]] @classmethod @@ -1907,7 +2120,7 @@ class MSharedStory(mongo.DynamicDocument): for story in cls.objects.all(): story.sync_redis_shares(r=r) story.sync_redis_story(r=h) - + def sync_redis(self): self.sync_redis_shares() self.sync_redis_story() @@ -1915,8 +2128,8 @@ class MSharedStory(mongo.DynamicDocument): def sync_redis_shares(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_POOL) - - share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) + + share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) comment_key = "C:%s:%s" % (self.story_feed_id, self.guid_hash) r.sadd(share_key, self.user_id) if self.has_comments: @@ -1929,20 +2142,18 @@ class MSharedStory(mongo.DynamicDocument): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) # if not r2: # r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2) - - r.sadd('B:%s' % self.user_id, self.feed_guid_hash) + + r.sadd("B:%s" % self.user_id, self.feed_guid_hash) # r2.sadd('B:%s' % self.user_id, self.feed_guid_hash) - redis_data = { - self.feed_guid_hash : time.mktime(self.shared_date.timetuple()) - } - r.zadd('zB:%s' % self.user_id, redis_data) + redis_data = {self.feed_guid_hash: time.mktime(self.shared_date.timetuple())} + r.zadd("zB:%s" % self.user_id, redis_data) # r2.zadd('zB:%s' % self.user_id, {self.feed_guid_hash: # time.mktime(self.shared_date.timetuple())}) - r.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) + r.expire("B:%s" % self.user_id, settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) # r2.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) - r.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) + r.expire("zB:%s" % self.user_id, settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) # r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) - + def remove_from_redis(self): r = redis.Redis(connection_pool=settings.REDIS_POOL) share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) @@ -1953,16 +2164,16 @@ class MSharedStory(mongo.DynamicDocument): h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) # h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2) - h.srem('B:%s' % self.user_id, self.feed_guid_hash) + h.srem("B:%s" % self.user_id, self.feed_guid_hash) # h2.srem('B:%s' % self.user_id, self.feed_guid_hash) - h.zrem('zB:%s' % self.user_id, self.feed_guid_hash) + h.zrem("zB:%s" % self.user_id, self.feed_guid_hash) # h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash) def publish_update_to_subscribers(self): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) feed_id = "social:%s" % self.user_id - listeners_count = r.publish("%s:story" % feed_id, 'story:new:%s' % self.story_hash) + listeners_count = r.publish("%s:story" % feed_id, "story:new:%s" % self.story_hash) if listeners_count: logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count)) except redis.ConnectionError: @@ -1970,178 +2181,191 @@ class MSharedStory(mongo.DynamicDocument): def comments_with_author(self): comments = { - 'id': self.id, - 'user_id': self.user_id, - 'comments': self.comments, - 'shared_date': relative_timesince(self.shared_date), - 'date': self.shared_date, - 'replies': [reply.canonical() for reply in self.replies], - 'liking_users': self.liking_users and list(self.liking_users), - 'source_user_id': self.source_user_id, + "id": self.id, + "user_id": self.user_id, + "comments": self.comments, + "shared_date": relative_timesince(self.shared_date), + "date": self.shared_date, + "replies": [reply.canonical() for reply in self.replies], + "liking_users": self.liking_users and list(self.liking_users), + "source_user_id": self.source_user_id, } return comments - + def comment_with_author_and_profiles(self): comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = [reply['user_id'] for reply in comment['replies']] + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] profile_user_ids = profile_user_ids.union(reply_user_ids) - profile_user_ids = profile_user_ids.union(comment['liking_users']) - if comment['source_user_id']: - profile_user_ids.add(comment['source_user_id']) + profile_user_ids = profile_user_ids.union(comment["liking_users"]) + if comment["source_user_id"]: + profile_user_ids.add(comment["source_user_id"]) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] return comment, profiles - + @classmethod def stories_with_comments_and_profiles(cls, stories, user_id, check_all=False): r = redis.Redis(connection_pool=settings.REDIS_POOL) friend_key = "F:%s:F" % (user_id) profile_user_ids = set() - for story in stories: - story['friend_comments'] = [] - story['friend_shares'] = [] - story['public_comments'] = [] - story['reply_count'] = 0 - if check_all or story['comment_count']: - comment_key = "C:%s:%s" % (story['story_feed_id'], story['guid_hash']) - story['comment_count'] = r.scard(comment_key) + for story in stories: + story["friend_comments"] = [] + story["friend_shares"] = [] + story["public_comments"] = [] + story["reply_count"] = 0 + if check_all or story["comment_count"]: + comment_key = "C:%s:%s" % (story["story_feed_id"], story["guid_hash"]) + story["comment_count"] = r.scard(comment_key) friends_with_comments = [int(f) for f in r.sinter(comment_key, friend_key)] sharer_user_ids = [int(f) for f in r.smembers(comment_key)] shared_stories = [] if sharer_user_ids: params = { - 'story_hash': story['story_hash'], - 'user_id__in': sharer_user_ids, + "story_hash": story["story_hash"], + "user_id__in": sharer_user_ids, } - if 'story_db_id' in params: - params.pop('story_db_id') - shared_stories = cls.objects.filter(**params)\ - .hint([('story_hash', 1)]) + if "story_db_id" in params: + params.pop("story_db_id") + shared_stories = cls.objects.filter(**params).hint([("story_hash", 1)]) for shared_story in shared_stories: comments = shared_story.comments_with_author() - story['reply_count'] += len(comments['replies']) + story["reply_count"] += len(comments["replies"]) if shared_story.user_id in friends_with_comments: - story['friend_comments'].append(comments) + story["friend_comments"].append(comments) else: - story['public_comments'].append(comments) - if comments.get('source_user_id'): - profile_user_ids.add(comments['source_user_id']) - if comments.get('liking_users'): - profile_user_ids = profile_user_ids.union(comments['liking_users']) - all_comments = story['friend_comments'] + story['public_comments'] - profile_user_ids = profile_user_ids.union([reply['user_id'] - for c in all_comments - for reply in c['replies']]) - if story.get('source_user_id'): - profile_user_ids.add(story['source_user_id']) - story['comment_count_friends'] = len(friends_with_comments) - story['comment_count_public'] = story['comment_count'] - len(friends_with_comments) - - if check_all or story['share_count']: - share_key = "S:%s:%s" % (story['story_feed_id'], story['guid_hash']) - story['share_count'] = r.scard(share_key) + story["public_comments"].append(comments) + if comments.get("source_user_id"): + profile_user_ids.add(comments["source_user_id"]) + if comments.get("liking_users"): + profile_user_ids = profile_user_ids.union(comments["liking_users"]) + all_comments = story["friend_comments"] + story["public_comments"] + profile_user_ids = profile_user_ids.union( + [reply["user_id"] for c in all_comments for reply in c["replies"]] + ) + if story.get("source_user_id"): + profile_user_ids.add(story["source_user_id"]) + story["comment_count_friends"] = len(friends_with_comments) + story["comment_count_public"] = story["comment_count"] - len(friends_with_comments) + + if check_all or story["share_count"]: + share_key = "S:%s:%s" % (story["story_feed_id"], story["guid_hash"]) + story["share_count"] = r.scard(share_key) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] nonfriend_user_ids = [int(f) for f in r.sdiff(share_key, friend_key)] profile_user_ids.update(nonfriend_user_ids) profile_user_ids.update(friends_with_shares) - story['commented_by_public'] = [c['user_id'] for c in story['public_comments']] - story['commented_by_friends'] = [c['user_id'] for c in story['friend_comments']] - story['shared_by_public'] = list(set(nonfriend_user_ids) - - set(story['commented_by_public'])) - story['shared_by_friends'] = list(set(friends_with_shares) - - set(story['commented_by_friends'])) - story['share_count_public'] = story['share_count'] - len(friends_with_shares) - story['share_count_friends'] = len(friends_with_shares) - story['friend_user_ids'] = list(set(story['commented_by_friends'] + story['shared_by_friends'])) - story['public_user_ids'] = list(set(story['commented_by_public'] + story['shared_by_public'])) - if not story['share_user_ids']: - story['share_user_ids'] = story['friend_user_ids'] + story['public_user_ids'] - if story.get('source_user_id'): - profile_user_ids.add(story['source_user_id']) + story["commented_by_public"] = [c["user_id"] for c in story["public_comments"]] + story["commented_by_friends"] = [c["user_id"] for c in story["friend_comments"]] + story["shared_by_public"] = list(set(nonfriend_user_ids) - set(story["commented_by_public"])) + story["shared_by_friends"] = list( + set(friends_with_shares) - set(story["commented_by_friends"]) + ) + story["share_count_public"] = story["share_count"] - len(friends_with_shares) + story["share_count_friends"] = len(friends_with_shares) + story["friend_user_ids"] = list( + set(story["commented_by_friends"] + story["shared_by_friends"]) + ) + story["public_user_ids"] = list(set(story["commented_by_public"] + story["shared_by_public"])) + if not story["share_user_ids"]: + story["share_user_ids"] = story["friend_user_ids"] + story["public_user_ids"] + if story.get("source_user_id"): + profile_user_ids.add(story["source_user_id"]) shared_stories = [] - if story['shared_by_friends']: + if story["shared_by_friends"]: params = { - 'story_hash': story['story_hash'], - 'user_id__in': story['shared_by_friends'], + "story_hash": story["story_hash"], + "user_id__in": story["shared_by_friends"], } - shared_stories = cls.objects.filter(**params)\ - .hint([('story_hash', 1)]) + shared_stories = cls.objects.filter(**params).hint([("story_hash", 1)]) for shared_story in shared_stories: comments = shared_story.comments_with_author() - story['reply_count'] += len(comments['replies']) - story['friend_shares'].append(comments) - profile_user_ids = profile_user_ids.union([reply['user_id'] - for reply in comments['replies']]) - if comments.get('source_user_id'): - profile_user_ids.add(comments['source_user_id']) - if comments.get('liking_users'): - profile_user_ids = profile_user_ids.union(comments['liking_users']) - + story["reply_count"] += len(comments["replies"]) + story["friend_shares"].append(comments) + profile_user_ids = profile_user_ids.union( + [reply["user_id"] for reply in comments["replies"]] + ) + if comments.get("source_user_id"): + profile_user_ids.add(comments["source_user_id"]) + if comments.get("liking_users"): + profile_user_ids = profile_user_ids.union(comments["liking_users"]) + profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) - + # Toss public comments by private profiles and muted users - profiles_dict = dict((profile['user_id'], profile) for profile in profiles) + profiles_dict = dict((profile["user_id"], profile) for profile in profiles) for story in stories: - commented_by_public = story.get('commented_by_public') or [c['user_id'] for c in story['public_comments']] + commented_by_public = story.get("commented_by_public") or [ + c["user_id"] for c in story["public_comments"] + ] for comment_user_id in commented_by_public: private = profiles_dict[comment_user_id].private muted = user_id in profiles_dict[comment_user_id].muted_by_user_ids if private or muted: - story['public_comments'] = [c for c in story['public_comments'] if c['user_id'] != comment_user_id] - story['comment_count_public'] -= 1 + story["public_comments"] = [ + c for c in story["public_comments"] if c["user_id"] != comment_user_id + ] + story["comment_count_public"] -= 1 profiles = [profile.canonical(compact=True) for profile in profiles] - + return stories, profiles - + @staticmethod def attach_users_to_stories(stories, profiles): - profiles = dict([(p['user_id'], p) for p in profiles]) + profiles = dict([(p["user_id"], p) for p in profiles]) for s, story in enumerate(stories): - for u, user_id in enumerate(story['shared_by_friends']): - if user_id not in profiles: continue - stories[s]['shared_by_friends'][u] = profiles[user_id] - for u, user_id in enumerate(story['shared_by_public']): - if user_id not in profiles: continue - stories[s]['shared_by_public'][u] = profiles[user_id] - for comment_set in ['friend_comments', 'public_comments', 'friend_shares']: + for u, user_id in enumerate(story["shared_by_friends"]): + if user_id not in profiles: + continue + stories[s]["shared_by_friends"][u] = profiles[user_id] + for u, user_id in enumerate(story["shared_by_public"]): + if user_id not in profiles: + continue + stories[s]["shared_by_public"][u] = profiles[user_id] + for comment_set in ["friend_comments", "public_comments", "friend_shares"]: for c, comment in enumerate(story[comment_set]): - if comment['user_id'] not in profiles: continue - stories[s][comment_set][c]['user'] = profiles[comment['user_id']] - if comment['source_user_id'] and comment['source_user_id'] in profiles: - stories[s][comment_set][c]['source_user'] = profiles[comment['source_user_id']] - for r, reply in enumerate(comment['replies']): - if reply['user_id'] not in profiles: continue - stories[s][comment_set][c]['replies'][r]['user'] = profiles[reply['user_id']] - stories[s][comment_set][c]['liking_user_ids'] = list(comment['liking_users']) - for u, user_id in enumerate(comment['liking_users']): - if user_id not in profiles: continue - stories[s][comment_set][c]['liking_users'][u] = profiles[user_id] + if comment["user_id"] not in profiles: + continue + stories[s][comment_set][c]["user"] = profiles[comment["user_id"]] + if comment["source_user_id"] and comment["source_user_id"] in profiles: + stories[s][comment_set][c]["source_user"] = profiles[comment["source_user_id"]] + for r, reply in enumerate(comment["replies"]): + if reply["user_id"] not in profiles: + continue + stories[s][comment_set][c]["replies"][r]["user"] = profiles[reply["user_id"]] + stories[s][comment_set][c]["liking_user_ids"] = list(comment["liking_users"]) + for u, user_id in enumerate(comment["liking_users"]): + if user_id not in profiles: + continue + stories[s][comment_set][c]["liking_users"][u] = profiles[user_id] return stories - + @staticmethod def attach_users_to_comment(comment, profiles): - profiles = dict([(p['user_id'], p) for p in profiles]) + profiles = dict([(p["user_id"], p) for p in profiles]) - if comment['user_id'] not in profiles: return comment - comment['user'] = profiles[comment['user_id']] + if comment["user_id"] not in profiles: + return comment + comment["user"] = profiles[comment["user_id"]] - if comment['source_user_id']: - comment['source_user'] = profiles[comment['source_user_id']] + if comment["source_user_id"]: + comment["source_user"] = profiles[comment["source_user_id"]] - for r, reply in enumerate(comment['replies']): - if reply['user_id'] not in profiles: continue - comment['replies'][r]['user'] = profiles[reply['user_id']] - comment['liking_user_ids'] = list(comment['liking_users']) - for u, user_id in enumerate(comment['liking_users']): - if user_id not in profiles: continue - comment['liking_users'][u] = profiles[user_id] + for r, reply in enumerate(comment["replies"]): + if reply["user_id"] not in profiles: + continue + comment["replies"][r]["user"] = profiles[reply["user_id"]] + comment["liking_user_ids"] = list(comment["liking_users"]) + for u, user_id in enumerate(comment["liking_users"]): + if user_id not in profiles: + continue + comment["liking_users"][u] = profiles[user_id] return comment - + def add_liking_user(self, user_id): if user_id not in self.liking_users: self.liking_users.append(user_id) @@ -2151,15 +2375,11 @@ class MSharedStory(mongo.DynamicDocument): if user_id in self.liking_users: self.liking_users.remove(user_id) self.save() - + def blurblog_permalink(self): profile = MSocialProfile.get_user(self.user_id) - return "%sstory/%s/%s" % ( - profile.blurblog_url, - slugify(self.story_title)[:20], - self.story_hash - ) - + return "%sstory/%s/%s" % (profile.blurblog_url, slugify(self.story_title)[:20], self.story_hash) + def generate_post_to_service_message(self, truncate=None, include_url=True): message = strip_tags(self.comments) if not message or len(message) < 1: @@ -2178,55 +2398,56 @@ class MSharedStory(mongo.DynamicDocument): if truncate: message = truncate_chars(message, truncate - 24) message += " " + self.blurblog_permalink() - + return message - + def post_to_service(self, service): user = User.objects.get(pk=self.user_id) - + if service in self.posted_to_services: logging.user(user, "~BM~FRAlready posted to %s." % (service)) return - - posts_last_hour = MSharedStory.objects.filter(user_id=self.user_id, - posted_to_services__contains=service, - shared_date__gte=datetime.datetime.now() - - datetime.timedelta(hours=1)).count() + + posts_last_hour = MSharedStory.objects.filter( + user_id=self.user_id, + posted_to_services__contains=service, + shared_date__gte=datetime.datetime.now() - datetime.timedelta(hours=1), + ).count() if posts_last_hour >= 3: logging.user(user, "~BM~FRPosted to %s > 3 times in past hour" % service) return - + posted = False social_service = MSocialServices.objects.get(user_id=self.user_id) - + message = self.generate_post_to_service_message() logging.user(user, "~BM~FGPosting to %s: ~SB%s" % (service, message)) - - if service == 'twitter': + + if service == "twitter": posted = social_service.post_to_twitter(self) - elif service == 'facebook': + elif service == "facebook": posted = social_service.post_to_facebook(self) - + if posted: self.posted_to_services.append(service) self.save() - + def notify_user_ids(self, include_parent=True): user_ids = set() for reply in self.replies: if reply.user_id not in self.mute_email_users: user_ids.add(reply.user_id) - + if include_parent and self.user_id not in self.mute_email_users: user_ids.add(self.user_id) - + return list(user_ids) - + def reply_for_id(self, reply_id): for reply in self.replies: if reply.reply_id == reply_id: return reply - + def send_emails_for_new_reply(self, reply_id): if reply_id in self.emailed_replies: logging.debug(" ***> Already sent reply email: %s on %s" % (reply_id, self)) @@ -2236,7 +2457,7 @@ class MSharedStory(mongo.DynamicDocument): if not reply: logging.debug(" ***> Reply doesn't exist: %s on %s" % (reply_id, self)) return - + notify_user_ids = self.notify_user_ids() if reply.user_id in notify_user_ids: notify_user_ids.remove(reply.user_id) @@ -2246,15 +2467,15 @@ class MSharedStory(mongo.DynamicDocument): story_feed = Feed.get_by_id(self.story_feed_id) comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = list(r['user_id'] for r in comment['replies']) + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = list(r["user_id"] for r in comment["replies"]) profile_user_ids = profile_user_ids.union(reply_user_ids) if self.source_user_id: profile_user_ids.add(self.source_user_id) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] comment = MSharedStory.attach_users_to_comment(comment, profiles) - + for user_id in notify_user_ids: user = User.objects.get(pk=user_id) @@ -2264,170 +2485,190 @@ class MSharedStory(mongo.DynamicDocument): elif not user.profile.send_emails: logging.user(user, "~FMDisabled emails, skipping.") continue - + mute_url = "http://%s%s" % ( Site.objects.get_current().domain, - reverse('social-mute-story', kwargs={ - 'secret_token': user.profile.secret_token, - 'shared_story_id': self.id, - }) + reverse( + "social-mute-story", + kwargs={ + "secret_token": user.profile.secret_token, + "shared_story_id": self.id, + }, + ), ) data = { - 'reply_user_profile': reply_user_profile, - 'comment': comment, - 'shared_story': self, - 'story_feed': story_feed, - 'mute_url': mute_url, + "reply_user_profile": reply_user_profile, + "comment": comment, + "shared_story": self, + "story_feed": story_feed, + "mute_url": mute_url, } - story_title = self.decoded_story_title.replace('\n', ' ') - - text = render_to_string('mail/email_reply.txt', data) - html = pynliner.fromString(render_to_string('mail/email_reply.xhtml', data)) - subject = "%s replied to you on \"%s\" on NewsBlur" % (reply_user.username, story_title) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + story_title = self.decoded_story_title.replace("\n", " ") + + text = render_to_string("mail/email_reply.txt", data) + html = pynliner.fromString(render_to_string("mail/email_reply.xhtml", data)) + subject = '%s replied to you on "%s" on NewsBlur' % (reply_user.username, story_title) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() sent_emails += 1 - - logging.user(reply_user, "~BB~FM~SBSending %s/%s email%s for new reply: %s" % ( - sent_emails, len(notify_user_ids), - '' if len(notify_user_ids) == 1 else 's', - self.decoded_story_title[:30])) - + + logging.user( + reply_user, + "~BB~FM~SBSending %s/%s email%s for new reply: %s" + % ( + sent_emails, + len(notify_user_ids), + "" if len(notify_user_ids) == 1 else "s", + self.decoded_story_title[:30], + ), + ) + self.emailed_replies.append(reply.reply_id) self.save() - + def send_email_for_reshare(self): if self.emailed_reshare: logging.debug(" ***> Already sent reply email: %s" % self) return - + reshare_user = User.objects.get(pk=self.user_id) reshare_user_profile = MSocialProfile.get_user(self.user_id) original_user = User.objects.get(pk=self.source_user_id) - original_shared_story = MSharedStory.objects.get(user_id=self.source_user_id, - story_guid=self.story_guid) - + original_shared_story = MSharedStory.objects.get( + user_id=self.source_user_id, story_guid=self.story_guid + ) + if not original_user.email or not original_user.profile.send_emails: if not original_user.email: logging.user(original_user, "~FMNo email to send to, skipping.") elif not original_user.profile.send_emails: logging.user(original_user, "~FMDisabled emails, skipping.") return - + story_feed = Feed.get_by_id(self.story_feed_id) comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = [reply['user_id'] for reply in comment['replies']] + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] profile_user_ids = profile_user_ids.union(reply_user_ids) if self.source_user_id: profile_user_ids.add(self.source_user_id) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] comment = MSharedStory.attach_users_to_comment(comment, profiles) - + mute_url = "http://%s%s" % ( Site.objects.get_current().domain, - reverse('social-mute-story', kwargs={ - 'secret_token': original_user.profile.secret_token, - 'shared_story_id': original_shared_story.id, - }) + reverse( + "social-mute-story", + kwargs={ + "secret_token": original_user.profile.secret_token, + "shared_story_id": original_shared_story.id, + }, + ), ) data = { - 'comment': comment, - 'shared_story': self, - 'reshare_user_profile': reshare_user_profile, - 'original_shared_story': original_shared_story, - 'story_feed': story_feed, - 'mute_url': mute_url, + "comment": comment, + "shared_story": self, + "reshare_user_profile": reshare_user_profile, + "original_shared_story": original_shared_story, + "story_feed": story_feed, + "mute_url": mute_url, } - story_title = self.decoded_story_title.replace('\n', ' ') - - text = render_to_string('mail/email_reshare.txt', data) - html = pynliner.fromString(render_to_string('mail/email_reshare.xhtml', data)) - subject = "%s re-shared \"%s\" from you on NewsBlur" % (reshare_user.username, story_title) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (original_user.username, original_user.email)]) + story_title = self.decoded_story_title.replace("\n", " ") + + text = render_to_string("mail/email_reshare.txt", data) + html = pynliner.fromString(render_to_string("mail/email_reshare.xhtml", data)) + subject = '%s re-shared "%s" from you on NewsBlur' % (reshare_user.username, story_title) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (original_user.username, original_user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + self.emailed_reshare = True self.save() - - logging.user(reshare_user, "~BB~FM~SBSending %s email for story re-share: %s" % ( - original_user.username, - self.decoded_story_title[:30])) - + + logging.user( + reshare_user, + "~BB~FM~SBSending %s email for story re-share: %s" + % (original_user.username, self.decoded_story_title[:30]), + ) + def extract_image_urls(self, force=False): if not self.story_content_z: return if self.image_urls and not force: return - + soup = BeautifulSoup(zlib.decompress(self.story_content_z), features="lxml") - image_sources = [img.get('src') for img in soup.findAll('img') if img and img.get('src')] + image_sources = [img.get("src") for img in soup.findAll("img") if img and img.get("src")] if len(image_sources) > 0: self.image_urls = image_sources max_length = MSharedStory.image_urls.field.max_length - while len(''.join(self.image_urls)) > max_length: + while len("".join(self.image_urls)) > max_length: if len(self.image_urls) <= 1: - self.image_urls[0] = self.image_urls[0][:max_length-1] + self.image_urls[0] = self.image_urls[0][: max_length - 1] break else: self.image_urls.pop() self.save() - + def calculate_image_sizes(self, force=False): if not self.story_content_z: return - + if not force and self.image_count: return self.image_sizes - + headers = { - 'User-Agent': 'NewsBlur Image Fetcher - %s' % ( - settings.NEWSBLUR_URL - ), + "User-Agent": "NewsBlur Image Fetcher - %s" % (settings.NEWSBLUR_URL), } - + self.extract_image_urls() image_sizes = [] - + for image_source in self.image_urls[:10]: if any(ignore in image_source for ignore in IGNORE_IMAGE_SOURCES): continue width, height = ImageOps.image_size(image_source, headers=headers) # if width <= 16 or height <= 16: # continue - image_sizes.append({'src': image_source, 'size': (width, height)}) - + image_sizes.append({"src": image_source, "size": (width, height)}) + if image_sizes: - image_sizes = sorted(image_sizes, key=lambda i: i['size'][0] * i['size'][1], - reverse=True) + image_sizes = sorted(image_sizes, key=lambda i: i["size"][0] * i["size"][1], reverse=True) self.image_sizes = image_sizes self.image_count = len(image_sizes) self.save() - - logging.debug(" ---> ~SN~FGFetched image sizes on shared story: ~SB%s/%s images" % - (self.image_count, len(self.image_urls))) - + + logging.debug( + " ---> ~SN~FGFetched image sizes on shared story: ~SB%s/%s images" + % (self.image_count, len(self.image_urls)) + ) + return image_sizes - + def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z feed = Feed.get_by_id(self.story_feed_id) - + if not original_text_z or force: ti = TextImporter(self, feed=feed, request=request, debug=False) original_text = ti.fetch() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text def fetch_original_page(self, force=False, request=None, debug=False): @@ -2438,62 +2679,61 @@ class MSharedStory(mongo.DynamicDocument): else: logging.user(request, "~FYFetching ~FGoriginal~FY story page, ~SBfound.") original_page = zlib.decompress(self.original_page_z) - + return original_page + class MSocialServices(mongo.Document): - user_id = mongo.IntField() - autofollow = mongo.BooleanField(default=True) - twitter_uid = mongo.StringField() - twitter_access_key = mongo.StringField() + user_id = mongo.IntField() + autofollow = mongo.BooleanField(default=True) + twitter_uid = mongo.StringField() + twitter_access_key = mongo.StringField() twitter_access_secret = mongo.StringField() - twitter_friend_ids = mongo.ListField(mongo.StringField()) - twitter_picture_url = mongo.StringField() - twitter_username = mongo.StringField() - twitter_refresh_date = mongo.DateTimeField() - facebook_uid = mongo.StringField() + twitter_friend_ids = mongo.ListField(mongo.StringField()) + twitter_picture_url = mongo.StringField() + twitter_username = mongo.StringField() + twitter_refresh_date = mongo.DateTimeField() + facebook_uid = mongo.StringField() facebook_access_token = mongo.StringField() - facebook_friend_ids = mongo.ListField(mongo.StringField()) - facebook_picture_url = mongo.StringField() + facebook_friend_ids = mongo.ListField(mongo.StringField()) + facebook_picture_url = mongo.StringField() facebook_refresh_date = mongo.DateTimeField() - upload_picture_url = mongo.StringField() - syncing_twitter = mongo.BooleanField(default=False) - syncing_facebook = mongo.BooleanField(default=False) - + upload_picture_url = mongo.StringField() + syncing_twitter = mongo.BooleanField(default=False) + syncing_facebook = mongo.BooleanField(default=False) + meta = { - 'collection': 'social_services', - 'indexes': ['user_id', 'twitter_friend_ids', 'facebook_friend_ids', 'twitter_uid', 'facebook_uid'], - 'allow_inheritance': False, - 'strict': False, + "collection": "social_services", + "indexes": ["user_id", "twitter_friend_ids", "facebook_friend_ids", "twitter_uid", "facebook_uid"], + "allow_inheritance": False, + "strict": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s (Twitter: %s, FB: %s)" % (user.username, self.twitter_uid, self.facebook_uid) - + def canonical(self): user = User.objects.get(pk=self.user_id) return { - 'twitter': { - 'twitter_username': self.twitter_username, - 'twitter_picture_url': self.twitter_picture_url, - 'twitter_uid': self.twitter_uid, - 'syncing': self.syncing_twitter, + "twitter": { + "twitter_username": self.twitter_username, + "twitter_picture_url": self.twitter_picture_url, + "twitter_uid": self.twitter_uid, + "syncing": self.syncing_twitter, }, - 'facebook': { - 'facebook_uid': self.facebook_uid, - 'facebook_picture_url': self.facebook_picture_url, - 'syncing': self.syncing_facebook, + "facebook": { + "facebook_uid": self.facebook_uid, + "facebook_picture_url": self.facebook_picture_url, + "syncing": self.syncing_facebook, }, - 'gravatar': { - 'gravatar_picture_url': "https://www.gravatar.com/avatar/" + \ - hashlib.md5(user.email.lower().encode('utf-8')).hexdigest() + "gravatar": { + "gravatar_picture_url": "https://www.gravatar.com/avatar/" + + hashlib.md5(user.email.lower().encode("utf-8")).hexdigest() }, - 'upload': { - 'upload_picture_url': self.upload_picture_url - } + "upload": {"upload_picture_url": self.upload_picture_url}, } - + @classmethod def get_user(cls, user_id): try: @@ -2513,26 +2753,26 @@ class MSocialServices(mongo.Document): if created: profile.save() return profile - + @classmethod def profile(cls, user_id): profile = cls.get_user(user_id=user_id) return profile.canonical() - + def save_uploaded_photo(self, photo): photo_body = photo.read() filename = photo.name s3 = s3_utils.S3Store() image_name = s3.save_profile_picture(self.user_id, filename, photo_body) - if image_name: + if image_name: self.upload_picture_url = "https://s3.amazonaws.com/%s/avatars/%s/thumbnail_%s" % ( settings.S3_AVATARS_BUCKET_NAME, self.user_id, image_name, ) self.save() - + return image_name and self.upload_picture_url def twitter_api(self): @@ -2542,42 +2782,42 @@ class MSocialServices(mongo.Document): auth.set_access_token(self.twitter_access_key, self.twitter_access_secret) api = tweepy.API(auth) return api - + def facebook_api(self): graph = facebook.GraphAPI(access_token=self.facebook_access_token, version="3.1") return graph - + def sync_twitter_friends(self): user = User.objects.get(pk=self.user_id) logging.user(user, "~BG~FMTwitter import starting...") - + api = self.twitter_api() try: twitter_user = api.me() except tweepy.TweepError as e: api = None - + if not api: logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no api access.") self.syncing_twitter = False self.save() return - + self.twitter_picture_url = twitter_user.profile_image_url_https self.twitter_username = twitter_user.screen_name self.twitter_refreshed_date = datetime.datetime.utcnow() self.syncing_twitter = False self.save() - + profile = MSocialProfile.get_user(self.user_id) profile.location = profile.location or twitter_user.location profile.bio = profile.bio or twitter_user.description profile.website = profile.website or twitter_user.url profile.save() profile.count_follows() - + if not profile.photo_url or not profile.photo_service: - self.set_photo('twitter') + self.set_photo("twitter") try: friend_ids = list(str(friend.id) for friend in list(tweepy.Cursor(api.friends).items())) @@ -2588,17 +2828,17 @@ class MSocialServices(mongo.Document): logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no friend_ids.") self.twitter_friend_ids = friend_ids self.save() - + following = self.follow_twitter_friends() - + if not following: logging.user(user, "~BG~FMTwitter import finished.") - + def follow_twitter_friends(self): social_profile = MSocialProfile.get_user(self.user_id) following = [] followers = 0 - + if not self.autofollow: return following @@ -2609,7 +2849,7 @@ class MSocialServices(mongo.Document): socialsub = social_profile.follow_user(followee_user_id) if socialsub: following.append(followee_user_id) - + # Friends already on NewsBlur should follow back # following_users = MSocialServices.objects.filter(twitter_friend_ids__contains=self.twitter_uid) # for following_user in following_users: @@ -2617,16 +2857,20 @@ class MSocialServices(mongo.Document): # following_user_profile = MSocialProfile.get_user(following_user.user_id) # following_user_profile.follow_user(self.user_id, check_unfollowed=True) # followers += 1 - + user = User.objects.get(pk=self.user_id) - logging.user(user, "~BG~FMTwitter import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.twitter_friend_ids), len(following), followers)) - + logging.user( + user, + "~BG~FMTwitter import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" + % (len(self.twitter_friend_ids), len(following), followers), + ) + return following - + def sync_facebook_friends(self): user = User.objects.get(pk=self.user_id) logging.user(user, "~BG~FMFacebook import starting...") - + graph = self.facebook_api() if not graph: logging.user(user, "~BG~FMFacebook import ~SBfailed~SN: no api access.") @@ -2647,25 +2891,27 @@ class MSocialServices(mongo.Document): self.facebook_picture_url = "https://graph.facebook.com/%s/picture" % self.facebook_uid self.syncing_facebook = False self.save() - - facebook_user = graph.request('me', args={'fields':'website,about,location'}) + + facebook_user = graph.request("me", args={"fields": "website,about,location"}) profile = MSocialProfile.get_user(self.user_id) - profile.location = profile.location or (facebook_user.get('location') and facebook_user['location']['name']) - profile.bio = profile.bio or facebook_user.get('about') - if not profile.website and facebook_user.get('website'): - profile.website = facebook_user.get('website').split()[0] + profile.location = profile.location or ( + facebook_user.get("location") and facebook_user["location"]["name"] + ) + profile.bio = profile.bio or facebook_user.get("about") + if not profile.website and facebook_user.get("website"): + profile.website = facebook_user.get("website").split()[0] profile.save() profile.count_follows() if not profile.photo_url or not profile.photo_service: - self.set_photo('facebook') - + self.set_photo("facebook") + self.follow_facebook_friends() - + def follow_facebook_friends(self): social_profile = MSocialProfile.get_user(self.user_id) following = [] followers = 0 - + if not self.autofollow: return following @@ -2676,7 +2922,7 @@ class MSocialServices(mongo.Document): socialsub = social_profile.follow_user(followee_user_id) if socialsub: following.append(followee_user_id) - + # Friends already on NewsBlur should follow back # following_users = MSocialServices.objects.filter(facebook_friend_ids__contains=self.facebook_uid) # for following_user in following_users: @@ -2684,47 +2930,52 @@ class MSocialServices(mongo.Document): # following_user_profile = MSocialProfile.get_user(following_user.user_id) # following_user_profile.follow_user(self.user_id, check_unfollowed=True) # followers += 1 - + user = User.objects.get(pk=self.user_id) - logging.user(user, "~BG~FMFacebook import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.facebook_friend_ids), len(following), followers)) - + logging.user( + user, + "~BG~FMFacebook import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" + % (len(self.facebook_friend_ids), len(following), followers), + ) + return following - + def disconnect_twitter(self): self.syncing_twitter = False self.twitter_uid = None self.save() - + def disconnect_facebook(self): self.syncing_facebook = False self.facebook_uid = None self.save() - + def set_photo(self, service): profile = MSocialProfile.get_user(self.user_id) - if service == 'nothing': + if service == "nothing": service = None profile.photo_service = service if not service: profile.photo_url = None - elif service == 'twitter': + elif service == "twitter": profile.photo_url = self.twitter_picture_url - elif service == 'facebook': + elif service == "facebook": profile.photo_url = self.facebook_picture_url - elif service == 'upload': + elif service == "upload": profile.photo_url = self.upload_picture_url - elif service == 'gravatar': + elif service == "gravatar": user = User.objects.get(pk=self.user_id) - profile.photo_url = "https://www.gravatar.com/avatar/" + \ - hashlib.md5(user.email.encode('utf-8')).hexdigest() + profile.photo_url = ( + "https://www.gravatar.com/avatar/" + hashlib.md5(user.email.encode("utf-8")).hexdigest() + ) profile.save() return profile - + @classmethod def sync_all_twitter_photos(cls, days=14, everybody=False): if everybody: - sharers = [ss.user_id for ss in MSocialServices.objects.all().only('user_id')] + sharers = [ss.user_id for ss in MSocialServices.objects.all().only("user_id")] elif days: week_ago = datetime.datetime.now() - datetime.timedelta(days=days) shares = MSharedStory.objects.filter(shared_date__gte=week_ago) @@ -2736,7 +2987,8 @@ class MSocialServices(mongo.Document): profile = MSocialProfile.objects.get(user_id=user_id) except MSocialProfile.DoesNotExist: continue - if not profile.photo_service == 'twitter': continue + if not profile.photo_service == "twitter": + continue ss = MSocialServices.objects.get(user_id=user_id) try: ss.sync_twitter_photo() @@ -2749,10 +3001,10 @@ class MSocialServices(mongo.Document): if profile.photo_service != "twitter": return - + user = User.objects.get(pk=self.user_id) logging.user(user, "~FCSyncing Twitter profile photo...") - + try: api = self.twitter_api() me = api.me() @@ -2764,12 +3016,12 @@ class MSocialServices(mongo.Document): self.twitter_picture_url = me.profile_image_url_https self.save() - self.set_photo('twitter') - + self.set_photo("twitter") + def post_to_twitter(self, shared_story): message = shared_story.generate_post_to_service_message(truncate=280) shared_story.calculate_image_sizes() - + try: api = self.twitter_api() filename = self.fetch_image_file_for_twitter(shared_story) @@ -2782,93 +3034,101 @@ class MSocialServices(mongo.Document): user = User.objects.get(pk=self.user_id) logging.user(user, "~FRTwitter error: ~SB%s" % e) return - + return True - + def fetch_image_file_for_twitter(self, shared_story): - if not shared_story.image_urls: return + if not shared_story.image_urls: + return user = User.objects.get(pk=self.user_id) logging.user(user, "~FCFetching image for twitter: ~SB%s" % shared_story.image_urls[0]) - + url = shared_story.image_urls[0] image_filename = os.path.basename(urllib.parse.urlparse(url).path) req = requests.get(url, stream=True, timeout=10) filename = "/tmp/%s-%s" % (shared_story.story_hash, image_filename) - + if req.status_code == 200: f = open(filename, "wb") for chunk in req: f.write(chunk) f.close() - + return filename - + def post_to_facebook(self, shared_story): message = shared_story.generate_post_to_service_message(include_url=False) shared_story.calculate_image_sizes() content = zlib.decompress(shared_story.story_content_z)[:1024] - + try: api = self.facebook_api() # api.put_wall_post(message=message) - api.put_object('me', '%s:share' % settings.FACEBOOK_NAMESPACE, - link=shared_story.blurblog_permalink(), - type="link", - name=shared_story.decoded_story_title, - description=content, - website=shared_story.blurblog_permalink(), - message=message, - ) + api.put_object( + "me", + "%s:share" % settings.FACEBOOK_NAMESPACE, + link=shared_story.blurblog_permalink(), + type="link", + name=shared_story.decoded_story_title, + description=content, + website=shared_story.blurblog_permalink(), + message=message, + ) except facebook.GraphAPIError as e: logging.debug("---> ~SN~FMFacebook posting error, disconnecting: ~SB~FR%s" % e) self.disconnect_facebook() return - + return True - + class MInteraction(mongo.Document): - user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - category = mongo.StringField() - title = mongo.StringField() - content = mongo.StringField() + user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + category = mongo.StringField() + title = mongo.StringField() + content = mongo.StringField() with_user_id = mongo.IntField() - feed_id = mongo.DynamicField() - story_feed_id= mongo.IntField() - content_id = mongo.StringField() - + feed_id = mongo.DynamicField() + story_feed_id = mongo.IntField() + content_id = mongo.StringField() + meta = { - 'collection': 'interactions', - 'indexes': [('user_id', '-date'), 'category', 'with_user_id'], - 'allow_inheritance': False, - 'ordering': ['-date'], + "collection": "interactions", + "indexes": [("user_id", "-date"), "category", "with_user_id"], + "allow_inheritance": False, + "ordering": ["-date"], } - + def __str__(self): user = User.objects.get(pk=self.user_id) with_user = self.with_user_id and User.objects.get(pk=self.with_user_id) - return "<%s> %s on %s: %s - %s" % (user.username, with_user and with_user.username, self.date, - self.category, self.content and self.content[:20]) - + return "<%s> %s on %s: %s - %s" % ( + user.username, + with_user and with_user.username, + self.date, + self.category, + self.content and self.content[:20], + ) + def canonical(self): story_hash = None if self.story_feed_id: story_hash = MStory.ensure_story_hash(self.content_id, story_feed_id=self.story_feed_id) return { - 'date': self.date, - 'category': self.category, - 'title': self.title, - 'content': self.content, - 'with_user_id': self.with_user_id, - 'feed_id': self.feed_id, - 'story_feed_id': self.story_feed_id, - 'content_id': self.content_id, - 'story_hash': story_hash, + "date": self.date, + "category": self.category, + "title": self.title, + "content": self.content, + "with_user_id": self.with_user_id, + "feed_id": self.feed_id, + "story_feed_id": self.story_feed_id, + "content_id": self.content_id, + "story_hash": story_hash, } - + @classmethod def trim(cls, user_id, limit=100): user = User.objects.get(pk=user_id) @@ -2877,22 +3137,24 @@ class MInteraction(mongo.Document): if interaction_count == 0: interaction_count = cls.objects.filter(user_id=user_id).count() - logging.user(user, "~FBNot trimming interactions, only ~SB%s~SN interactions found" % interaction_count) + logging.user( + user, "~FBNot trimming interactions, only ~SB%s~SN interactions found" % interaction_count + ) return - + logging.user(user, "~FBTrimming ~SB%s~SN interactions..." % interaction_count) for interaction in interactions: interaction.delete() logging.user(user, "~FBDone trimming ~SB%s~SN interactions" % interaction_count) - + @classmethod def publish_update_to_subscribers(self, user_id): user = User.objects.get(pk=user_id) try: r = redis.Redis(connection_pool=settings.REDIS_POOL) - listeners_count = r.publish(user.username, 'interaction:new') + listeners_count = r.publish(user.username, "interaction:new") if listeners_count: logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count)) except redis.ConnectionError: @@ -2904,74 +3166,85 @@ class MInteraction(mongo.Document): dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on page = max(1, page) limit = int(limit) if limit else 4 - offset = (page-1) * limit - + offset = (page - 1) * limit + interactions_db = cls.objects.filter(user_id=user_id) if categories: interactions_db = interactions_db.filter(category__in=categories) - interactions_db = interactions_db[offset:offset+limit+1] - + interactions_db = interactions_db[offset : offset + limit + 1] + has_next_page = len(interactions_db) > limit - interactions_db = interactions_db[offset:offset+limit] + interactions_db = interactions_db[offset : offset + limit] with_user_ids = [i.with_user_id for i in interactions_db if i.with_user_id] - social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids)) - + social_profiles = dict( + (p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids) + ) + interactions = [] for interaction_db in interactions_db: interaction = interaction_db.canonical() social_profile = social_profiles.get(interaction_db.with_user_id) if social_profile: - interaction['photo_url'] = social_profile.profile_photo_url - interaction['with_user'] = social_profiles.get(interaction_db.with_user_id) - interaction['time_since'] = relative_timesince(interaction_db.date) - interaction['date'] = interaction_db.date - interaction['is_new'] = interaction_db.date > dashboard_date + interaction["photo_url"] = social_profile.profile_photo_url + interaction["with_user"] = social_profiles.get(interaction_db.with_user_id) + interaction["time_since"] = relative_timesince(interaction_db.date) + interaction["date"] = interaction_db.date + interaction["is_new"] = interaction_db.date > dashboard_date interactions.append(interaction) return interactions, has_next_page - + @classmethod def user_unread_count(cls, user_id): user_profile = Profile.objects.get(user=user_id) dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on - + interactions_count = cls.objects.filter(user_id=user_id, date__gte=dashboard_date).count() - + return interactions_count - + @classmethod def new_follow(cls, follower_user_id, followee_user_id): params = { - 'user_id': followee_user_id, - 'with_user_id': follower_user_id, - 'category': 'follow', + "user_id": followee_user_id, + "with_user_id": follower_user_id, + "category": "follow", } try: cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe follow interactions. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() - + cls.publish_update_to_subscribers(followee_user_id) - + @classmethod - def new_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_comment_reply( + cls, + user_id, + reply_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -2982,55 +3255,69 @@ class MInteraction(mongo.Document): if not original_message: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) - + @classmethod def remove_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments): - params = dict(user_id=comment_user_id, - with_user_id=liking_user_id, - category="comment_like", - feed_id="social:%s" % comment_user_id, - story_feed_id=story_feed_id, - content_id=story_id) + def new_comment_like( + cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments + ): + params = dict( + user_id=comment_user_id, + with_user_id=liking_user_id, + category="comment_like", + feed_id="social:%s" % comment_user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: cls.objects.get(**params) except cls.DoesNotExist: params.update(dict(title=story_title, content=comments)) cls.objects.create(**params) - + cls.publish_update_to_subscribers(comment_user_id) @classmethod - def new_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_reply_reply( + cls, + user_id, + comment_user_id, + reply_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'reply_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "reply_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -3041,39 +3328,43 @@ class MInteraction(mongo.Document): if not original_message: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def remove_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id): + def remove_reply_reply( + cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'reply_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "reply_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def new_reshared_story(cls, user_id, reshare_user_id, comments, story_title, story_feed_id, story_id, original_comments=None): + def new_reshared_story( + cls, user_id, reshare_user_id, comments, story_title, story_feed_id, story_id, original_comments=None + ): params = { - 'user_id': user_id, - 'with_user_id': reshare_user_id, - 'category': 'story_reshare', - 'content': comments, - 'title': story_title, - 'feed_id': "social:%s" % reshare_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reshare_user_id, + "category": "story_reshare", + "content": comments, + "title": story_title, + "feed_id": "social:%s" % reshare_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } if original_comments: - params['content'] = original_comments + params["content"] = original_comments original = cls.objects.filter(**params).limit(1) if original: interaction = original[0] @@ -3084,49 +3375,50 @@ class MInteraction(mongo.Document): if not original_comments: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) + class MActivity(mongo.Document): - user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - category = mongo.StringField() - title = mongo.StringField() - content = mongo.StringField() + user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + category = mongo.StringField() + title = mongo.StringField() + content = mongo.StringField() with_user_id = mongo.IntField() - feed_id = mongo.DynamicField() - story_feed_id= mongo.IntField() - content_id = mongo.StringField() - + feed_id = mongo.DynamicField() + story_feed_id = mongo.IntField() + content_id = mongo.StringField() + meta = { - 'collection': 'activities', - 'indexes': [('user_id', '-date'), 'category', 'with_user_id'], - 'allow_inheritance': False, - 'ordering': ['-date'], + "collection": "activities", + "indexes": [("user_id", "-date"), "category", "with_user_id"], + "allow_inheritance": False, + "ordering": ["-date"], } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "<%s> %s - %s" % (user.username, self.category, self.content and self.content[:20]) - + def canonical(self): story_hash = None if self.story_feed_id: story_hash = MStory.ensure_story_hash(self.content_id, story_feed_id=self.story_feed_id) return { - 'date': self.date, - 'category': self.category, - 'title': self.title, - 'content': self.content, - 'user_id': self.user_id, - 'with_user_id': self.with_user_id or self.user_id, - 'feed_id': self.feed_id or self.story_feed_id, - 'story_feed_id': self.story_feed_id or self.feed_id, - 'content_id': self.content_id, - 'story_hash': story_hash, + "date": self.date, + "category": self.category, + "title": self.title, + "content": self.content, + "user_id": self.user_id, + "with_user_id": self.with_user_id or self.user_id, + "feed_id": self.feed_id or self.story_feed_id, + "story_feed_id": self.story_feed_id or self.feed_id, + "content_id": self.content_id, + "story_hash": story_hash, } - + @classmethod def trim(cls, user_id, limit=100): user = User.objects.get(pk=user_id) @@ -3137,53 +3429,54 @@ class MActivity(mongo.Document): activity_count = cls.objects.filter(user_id=user_id).count() logging.user(user, "~FBNot trimming activities, only ~SB%s~SN activities found" % activity_count) return - + logging.user(user, "~FBTrimming ~SB%s~SN activities..." % activity_count) for activity in activities: activity.delete() logging.user(user, "~FBDone trimming ~SB%s~SN activities" % activity_count) - + @classmethod def user(cls, user_id, page=1, limit=4, public=False, categories=None): user_profile = Profile.objects.get(user=user_id) dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on page = max(1, page) limit = int(limit) - offset = (page-1) * limit - + offset = (page - 1) * limit + activities_db = cls.objects.filter(user_id=user_id) if categories: activities_db = activities_db.filter(category__in=categories) if public: - activities_db = activities_db.filter(category__nin=['star', 'feedsub', 'opml_import', 'opml_export']) - activities_db = activities_db[offset:offset+limit+1] - + activities_db = activities_db.filter( + category__nin=["star", "feedsub", "opml_import", "opml_export"] + ) + activities_db = activities_db[offset : offset + limit + 1] + has_next_page = len(activities_db) > limit - activities_db = activities_db[offset:offset+limit] + activities_db = activities_db[offset : offset + limit] with_user_ids = [a.with_user_id for a in activities_db if a.with_user_id] - social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids)) + social_profiles = dict( + (p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids) + ) activities = [] for activity_db in activities_db: activity = activity_db.canonical() - activity['date'] = activity_db.date - activity['time_since'] = relative_timesince(activity_db.date) + activity["date"] = activity_db.date + activity["time_since"] = relative_timesince(activity_db.date) social_profile = social_profiles.get(activity_db.with_user_id) if social_profile: - activity['photo_url'] = social_profile.profile_photo_url - activity['is_new'] = activity_db.date > dashboard_date - activity['with_user'] = social_profiles.get(activity_db.with_user_id or activity_db.user_id) + activity["photo_url"] = social_profile.profile_photo_url + activity["is_new"] = activity_db.date > dashboard_date + activity["with_user"] = social_profiles.get(activity_db.with_user_id or activity_db.user_id) activities.append(activity) - + return activities, has_next_page - + @classmethod def new_starred_story(cls, user_id, story_title, story_feed_id, story_id): - params = dict(user_id=user_id, - category='star', - story_feed_id=story_feed_id, - content_id=story_id) + params = dict(user_id=user_id, category="star", story_feed_id=story_feed_id, content_id=story_id) try: cls.objects.get(**params) except cls.DoesNotExist: @@ -3193,19 +3486,19 @@ class MActivity(mongo.Document): @classmethod def remove_starred_story(cls, user_id, story_feed_id, story_id): params = { - 'user_id': user_id, - 'category': 'star', - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "category": "star", + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + @classmethod def new_feed_subscription(cls, user_id, feed_id, feed_title): params = { "user_id": user_id, - "category": 'feedsub', + "category": "feedsub", "feed_id": feed_id, } try: @@ -3214,7 +3507,7 @@ class MActivity(mongo.Document): params.update(dict(content=feed_title)) cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe feed subscription activities. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() @@ -3223,11 +3516,11 @@ class MActivity(mongo.Document): def new_opml_import(cls, user_id, count): if count <= 0: return - + params = { "user_id": user_id, - "category": 'opml_import', - 'content': f"You imported an OPML file with {count} sites" + "category": "opml_import", + "content": f"You imported an OPML file with {count} sites", } cls.objects.create(**params) @@ -3235,44 +3528,53 @@ class MActivity(mongo.Document): def new_opml_export(cls, user_id, count, automated=False): params = { "user_id": user_id, - "category": 'opml_export', - 'content': f"You exported an OPML backup of {count} subscriptions" + "category": "opml_export", + "content": f"You exported an OPML backup of {count} subscriptions", } if automated: - params['content'] = f"An automatic OPML backup of {count} subscriptions was emailed to you" + params["content"] = f"An automatic OPML backup of {count} subscriptions was emailed to you" cls.objects.create(**params) - + @classmethod def new_follow(cls, follower_user_id, followee_user_id): params = { - 'user_id': follower_user_id, - 'with_user_id': followee_user_id, - 'category': 'follow', + "user_id": follower_user_id, + "with_user_id": followee_user_id, + "category": "follow", } try: cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe follow activities. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() - + @classmethod - def new_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_comment_reply( + cls, + user_id, + comment_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': comment_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": comment_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -3283,47 +3585,51 @@ class MActivity(mongo.Document): if not original_message: cls.objects.create(**params) - + @classmethod def remove_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id): params = { - 'user_id': user_id, - 'with_user_id': comment_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": comment_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + @classmethod - def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments): - params = dict(user_id=liking_user_id, - with_user_id=comment_user_id, - category="comment_like", - feed_id="social:%s" % comment_user_id, - story_feed_id=story_feed_id, - content_id=story_id) + def new_comment_like( + cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments + ): + params = dict( + user_id=liking_user_id, + with_user_id=comment_user_id, + category="comment_like", + feed_id="social:%s" % comment_user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: cls.objects.get(**params) except cls.DoesNotExist: params.update(dict(title=story_title, content=comments)) cls.objects.create(**params) - + @classmethod - def new_shared_story(cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None): + def new_shared_story( + cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None + ): data = { "user_id": user_id, - "category": 'sharedstory', + "category": "sharedstory", "feed_id": "social:%s" % user_id, "story_feed_id": story_feed_id, "content_id": story_id, } - extradata = {'with_user_id': source_user_id, - 'title': story_title, - 'content': comments} + extradata = {"with_user_id": source_user_id, "title": story_title, "content": comments} try: a = cls.objects.get(**data) @@ -3351,20 +3657,22 @@ class MActivity(mongo.Document): @classmethod def remove_shared_story(cls, user_id, story_feed_id, story_id): - params = dict(user_id=user_id, - category='sharedstory', - feed_id="social:%s" % user_id, - story_feed_id=story_feed_id, - content_id=story_id) + params = dict( + user_id=user_id, + category="sharedstory", + feed_id="social:%s" % user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: a = cls.objects.get(**params) except cls.DoesNotExist: return except cls.MultipleObjectsReturned: a = cls.objects.filter(**params) - + a.delete() - + @classmethod def new_signup(cls, user_id): params = dict(user_id=user_id, with_user_id=user_id, category="signup") @@ -3375,17 +3683,17 @@ class MActivity(mongo.Document): class MFollowRequest(mongo.Document): - follower_user_id = mongo.IntField(unique_with='followee_user_id') - followee_user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - + follower_user_id = mongo.IntField(unique_with="followee_user_id") + followee_user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + meta = { - 'collection': 'follow_request', - 'indexes': ['follower_user_id', 'followee_user_id'], - 'ordering': ['-date'], - 'allow_inheritance': False, + "collection": "follow_request", + "indexes": ["follower_user_id", "followee_user_id"], + "ordering": ["-date"], + "allow_inheritance": False, } - + @classmethod def add(cls, follower_user_id, followee_user_id): params = dict(follower_user_id=follower_user_id, followee_user_id=followee_user_id) @@ -3393,9 +3701,7 @@ class MFollowRequest(mongo.Document): cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) - + @classmethod def remove(cls, follower_user_id, followee_user_id): - cls.objects.filter(follower_user_id=follower_user_id, - followee_user_id=followee_user_id).delete() - + cls.objects.filter(follower_user_id=follower_user_id, followee_user_id=followee_user_id).delete() diff --git a/apps/social/tasks.py b/apps/social/tasks.py index d16f5c67a..476450ad5 100644 --- a/apps/social/tasks.py +++ b/apps/social/tasks.py @@ -12,52 +12,61 @@ def PostToService(shared_story_id, service): shared_story.post_to_service(service) except MSharedStory.DoesNotExist: logging.debug(" ---> Shared story not found (%s). Can't post to: %s" % (shared_story_id, service)) - + + @app.task() def EmailNewFollower(follower_user_id, followee_user_id): user_profile = MSocialProfile.get_user(followee_user_id) user_profile.send_email_for_new_follower(follower_user_id) - + + @app.task() def EmailFollowRequest(follower_user_id, followee_user_id): user_profile = MSocialProfile.get_user(followee_user_id) user_profile.send_email_for_follow_request(follower_user_id) - + + @app.task() def EmailFirstShare(user_id): user = User.objects.get(pk=user_id) user.profile.send_first_share_to_blurblog_email() - + + @app.task() def EmailCommentReplies(shared_story_id, reply_id): shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story.send_emails_for_new_reply(ObjectId(reply_id)) - + + @app.task() def EmailStoryReshares(shared_story_id): shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story.send_email_for_reshare() - + + @app.task() def SyncTwitterFriends(user_id): social_services = MSocialServices.objects.get(user_id=user_id) social_services.sync_twitter_friends() + @app.task() def SyncFacebookFriends(user_id): social_services = MSocialServices.objects.get(user_id=user_id) social_services.sync_facebook_friends() - + + @app.task(name="share-popular-stories") def SharePopularStories(): logging.debug(" ---> Sharing popular stories...") MSharedStory.share_popular_stories(interactive=False) - -@app.task(name='clean-social-spam') + + +@app.task(name="clean-social-spam") def CleanSocialSpam(): logging.debug(" ---> Finding social spammers...") MSharedStory.count_potential_spammers(destroy=True) - + @app.task() def UpdateRecalcForSubscription(subscription_user_id, shared_story_id): @@ -68,12 +77,12 @@ def UpdateRecalcForSubscription(subscription_user_id, shared_story_id): except MSharedStory.DoesNotExist: return - logging.debug(" ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" % ( - socialsubs.count(), - user.username - )) + logging.debug( + " ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" + % (socialsubs.count(), user.username) + ) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() - + shared_story.publish_update_to_subscribers() diff --git a/apps/social/templatetags/social_tags.py b/apps/social/templatetags/social_tags.py index d07711381..169995160 100644 --- a/apps/social/templatetags/social_tags.py +++ b/apps/social/templatetags/social_tags.py @@ -4,63 +4,69 @@ from apps.social.models import MSocialProfile register = template.Library() -@register.inclusion_tag('social/social_story.xhtml', takes_context=True) + +@register.inclusion_tag("social/social_story.xhtml", takes_context=True) def render_social_story(context, story, has_next_story=False): - user = context['user'] - user_social_profile = context['user_social_profile'] - + user = context["user"] + user_social_profile = context["user_social_profile"] + return { - 'story': story, - 'has_next_story': has_next_story, - 'user': user, - 'user_social_profile': user_social_profile, + "story": story, + "has_next_story": has_next_story, + "user": user, + "user_social_profile": user_social_profile, } -@register.inclusion_tag('social/story_share.xhtml', takes_context=True) + +@register.inclusion_tag("social/story_share.xhtml", takes_context=True) def render_story_share(context, story): - user = context['user'] + user = context["user"] return { - 'user': user, - 'story': story, + "user": user, + "story": story, } - -@register.inclusion_tag('social/story_comments.xhtml', takes_context=True) + + +@register.inclusion_tag("social/story_comments.xhtml", takes_context=True) def render_story_comments(context, story): - user = context['user'] - user_social_profile = context.get('user_social_profile') + user = context["user"] + user_social_profile = context.get("user_social_profile") MEDIA_URL = settings.MEDIA_URL if not user_social_profile and user.is_authenticated: user_social_profile = MSocialProfile.objects.get(user_id=user.pk) - + return { - 'user': user, - 'user_social_profile': user_social_profile, - 'story': story, - 'MEDIA_URL': MEDIA_URL, + "user": user, + "user_social_profile": user_social_profile, + "story": story, + "MEDIA_URL": MEDIA_URL, } -@register.inclusion_tag('social/story_comment.xhtml', takes_context=True) + +@register.inclusion_tag("social/story_comment.xhtml", takes_context=True) def render_story_comment(context, story, comment): - user = context['user'] + user = context["user"] MEDIA_URL = settings.MEDIA_URL - + return { - 'user': user, - 'story': story, - 'comment': comment, - 'MEDIA_URL': MEDIA_URL, + "user": user, + "story": story, + "comment": comment, + "MEDIA_URL": MEDIA_URL, } -@register.inclusion_tag('mail/email_story_comment.xhtml') + +@register.inclusion_tag("mail/email_story_comment.xhtml") def render_email_comment(comment): return { - 'comment': comment, + "comment": comment, } - -@register.inclusion_tag('social/avatars.xhtml') + + +@register.inclusion_tag("social/avatars.xhtml") def render_avatars(avatars): if not isinstance(avatars, list): avatars = [avatars] return { - 'users': avatars, + "users": avatars, } diff --git a/apps/social/urls.py b/apps/social/urls.py index 2b386b217..1b5934d17 100644 --- a/apps/social/urls.py +++ b/apps/social/urls.py @@ -2,42 +2,68 @@ from django.conf.urls import url from apps.social import views urlpatterns = [ - url(r'^river_stories/?$', views.load_river_blurblog, name='social-river-blurblog'), - url(r'^share_story/?$', views.mark_story_as_shared, name='mark-story-as-shared'), - url(r'^unshare_story/?$', views.mark_story_as_unshared, name='mark-story-as-unshared'), - url(r'^load_user_friends/?$', views.load_user_friends, name='load-user-friends'), - url(r'^load_follow_requests/?$', views.load_follow_requests, name='load-follow-requests'), - url(r'^profile/?$', views.profile, name='profile'), - url(r'^load_user_profile/?$', views.load_user_profile, name='load-user-profile'), - url(r'^save_user_profile/?$', views.save_user_profile, name='save-user-profile'), - url(r'^upload_avatar/?', views.upload_avatar, name='upload-avatar'), - url(r'^save_blurblog_settings/?$', views.save_blurblog_settings, name='save-blurblog-settings'), - url(r'^interactions/?$', views.load_interactions, name='social-interactions'), - url(r'^activities/?$', views.load_activities, name='social-activities'), - url(r'^follow/?$', views.follow, name='social-follow'), - url(r'^unfollow/?$', views.unfollow, name='social-unfollow'), - url(r'^approve_follower/?$', views.approve_follower, name='social-approve-follower'), - url(r'^ignore_follower/?$', views.ignore_follower, name='social-ignore-follower'), - url(r'^mute_user/?$', views.mute_user, name='social-mute-user'), - url(r'^unmute_user/?$', views.unmute_user, name='social-unmute-user'), - url(r'^feed_trainer', views.social_feed_trainer, name='social-feed-trainer'), - url(r'^public_comments/?$', views.story_public_comments, name='story-public-comments'), - url(r'^save_comment_reply/?$', views.save_comment_reply, name='social-save-comment-reply'), - url(r'^remove_comment_reply/?$', views.remove_comment_reply, name='social-remove-comment-reply'), - url(r'^find_friends/?$', views.find_friends, name='social-find-friends'), - url(r'^like_comment/?$', views.like_comment, name='social-like-comment'), - url(r'^remove_like_comment/?$', views.remove_like_comment, name='social-remove-like-comment'), + url(r"^river_stories/?$", views.load_river_blurblog, name="social-river-blurblog"), + url(r"^share_story/?$", views.mark_story_as_shared, name="mark-story-as-shared"), + url(r"^unshare_story/?$", views.mark_story_as_unshared, name="mark-story-as-unshared"), + url(r"^load_user_friends/?$", views.load_user_friends, name="load-user-friends"), + url(r"^load_follow_requests/?$", views.load_follow_requests, name="load-follow-requests"), + url(r"^profile/?$", views.profile, name="profile"), + url(r"^load_user_profile/?$", views.load_user_profile, name="load-user-profile"), + url(r"^save_user_profile/?$", views.save_user_profile, name="save-user-profile"), + url(r"^upload_avatar/?", views.upload_avatar, name="upload-avatar"), + url(r"^save_blurblog_settings/?$", views.save_blurblog_settings, name="save-blurblog-settings"), + url(r"^interactions/?$", views.load_interactions, name="social-interactions"), + url(r"^activities/?$", views.load_activities, name="social-activities"), + url(r"^follow/?$", views.follow, name="social-follow"), + url(r"^unfollow/?$", views.unfollow, name="social-unfollow"), + url(r"^approve_follower/?$", views.approve_follower, name="social-approve-follower"), + url(r"^ignore_follower/?$", views.ignore_follower, name="social-ignore-follower"), + url(r"^mute_user/?$", views.mute_user, name="social-mute-user"), + url(r"^unmute_user/?$", views.unmute_user, name="social-unmute-user"), + url(r"^feed_trainer", views.social_feed_trainer, name="social-feed-trainer"), + url(r"^public_comments/?$", views.story_public_comments, name="story-public-comments"), + url(r"^save_comment_reply/?$", views.save_comment_reply, name="social-save-comment-reply"), + url(r"^remove_comment_reply/?$", views.remove_comment_reply, name="social-remove-comment-reply"), + url(r"^find_friends/?$", views.find_friends, name="social-find-friends"), + url(r"^like_comment/?$", views.like_comment, name="social-like-comment"), + url(r"^remove_like_comment/?$", views.remove_like_comment, name="social-remove-like-comment"), # url(r'^like_reply/?$', views.like_reply, name='social-like-reply'), # url(r'^remove_like_reply/?$', views.remove_like_reply, name='social-remove-like-reply'), - url(r'^comment/(?P\w+)/reply/(?P\w+)/?$', views.comment_reply, name='social-comment-reply'), - url(r'^comment/(?P\w+)/?$', views.comment, name='social-comment'), - url(r'^rss/(?P\d+)/?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'), - url(r'^rss/(?P\d+)/(?P[-\w]+)?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'), - url(r'^stories/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_stories, name='load-social-stories'), - url(r'^page/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_page, name='load-social-page'), - url(r'^settings/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_settings, name='load-social-settings'), - url(r'^statistics/(?P\w+)/(?P[-\w]+)/?$', views.load_social_statistics, name='load-social-statistics'), - url(r'^statistics/(?P\w+)/?$', views.load_social_statistics, name='load-social-statistics'), - url(r'^mute_story/(?P\w+)/(?P\w+)?$', views.mute_story, name='social-mute-story'), - url(r'^(?P[-\w]+)/?$', views.shared_stories_public, name='shared-stories-public'), + url( + r"^comment/(?P\w+)/reply/(?P\w+)/?$", + views.comment_reply, + name="social-comment-reply", + ), + url(r"^comment/(?P\w+)/?$", views.comment, name="social-comment"), + url(r"^rss/(?P\d+)/?$", views.shared_stories_rss_feed, name="shared-stories-rss-feed"), + url( + r"^rss/(?P\d+)/(?P[-\w]+)?$", + views.shared_stories_rss_feed, + name="shared-stories-rss-feed", + ), + url( + r"^stories/(?P\w+)/(?P[-\w]+)?/?$", + views.load_social_stories, + name="load-social-stories", + ), + url(r"^page/(?P\w+)/(?P[-\w]+)?/?$", views.load_social_page, name="load-social-page"), + url( + r"^settings/(?P\w+)/(?P[-\w]+)?/?$", + views.load_social_settings, + name="load-social-settings", + ), + url( + r"^statistics/(?P\w+)/(?P[-\w]+)/?$", + views.load_social_statistics, + name="load-social-statistics", + ), + url( + r"^statistics/(?P\w+)/?$", views.load_social_statistics, name="load-social-statistics" + ), + url( + r"^mute_story/(?P\w+)/(?P\w+)?$", + views.mute_story, + name="social-mute-story", + ), + url(r"^(?P[-\w]+)/?$", views.shared_stories_public, name="shared-stories-public"), ] diff --git a/apps/social/views.py b/apps/social/views.py index ab8797b35..513129020 100644 --- a/apps/social/views.py +++ b/apps/social/views.py @@ -13,12 +13,23 @@ from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpRespons from django.conf import settings from django.utils import feedgenerator from apps.rss_feeds.models import MStory, Feed, MStarredStory -from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription, MCommentReply +from apps.social.models import ( + MSharedStory, + MSocialServices, + MSocialProfile, + MSocialSubscription, + MCommentReply, +) from apps.social.models import MInteraction, MActivity, MFollowRequest from apps.social.tasks import PostToService, EmailCommentReplies, EmailStoryReshares from apps.social.tasks import UpdateRecalcForSubscription, EmailFirstShare from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed from apps.reader.models import UserSubscription from apps.profile.models import Profile @@ -37,56 +48,67 @@ from vendor.timezones.utilities import localtime_for_timezone @json.json_view def load_social_stories(request, user_id, username=None): - user = get_user(request) + user = get_user(request) social_user_id = int(user_id) - social_user = get_object_or_404(User, pk=social_user_id) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 6)) - page = int(request.GET.get('page', 1)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'all') - query = request.GET.get('query', '').strip() - include_story_content = is_true(request.GET.get('include_story_content', True)) - stories = [] - message = None - - if page: offset = limit * (int(page) - 1) + social_user = get_object_or_404(User, pk=social_user_id) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 6)) + page = int(request.GET.get("page", 1)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "all") + query = request.GET.get("query", "").strip() + include_story_content = is_true(request.GET.get("include_story_content", True)) + stories = [] + message = None + + if page: + offset = limit * (int(page) - 1) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - + social_profile = MSocialProfile.get_user(social_user.pk) try: socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=social_user_id) except MSocialSubscription.DoesNotExist: socialsub = None - + if social_profile.private and not social_profile.is_followed_by_user(user.pk): - message = "%s has a private blurblog and you must be following them in order to read it." % social_profile.username + message = ( + "%s has a private blurblog and you must be following them in order to read it." + % social_profile.username + ) elif query: if user.profile.is_premium: stories = social_profile.find_stories(query, offset=offset, limit=limit) else: stories = [] message = "You must be a premium subscriber to search." - elif socialsub and (read_filter == 'unread' or order == 'oldest'): + elif socialsub and (read_filter == "unread" or order == "oldest"): cutoff_date = max(socialsub.mark_read_date, user.profile.unread_cutoff) - story_hashes = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit, cutoff_date=cutoff_date) - story_date_order = "%sshared_date" % ('' if order == 'oldest' else '-') + story_hashes = socialsub.get_stories( + order=order, read_filter=read_filter, offset=offset, limit=limit, cutoff_date=cutoff_date + ) + story_date_order = "%sshared_date" % ("" if order == "oldest" else "-") if story_hashes: - mstories = MSharedStory.objects(user_id=social_user.pk, - story_hash__in=story_hashes).order_by(story_date_order) - for story in mstories: story.extract_image_urls() + mstories = MSharedStory.objects(user_id=social_user.pk, story_hash__in=story_hashes).order_by( + story_date_order + ) + for story in mstories: + story.extract_image_urls() stories = Feed.format_stories(mstories) else: - mstories = MSharedStory.objects(user_id=social_user.pk).order_by('-shared_date')[offset:offset+limit] - for story in mstories: story.extract_image_urls() + mstories = MSharedStory.objects(user_id=social_user.pk).order_by("-shared_date")[ + offset : offset + limit + ] + for story in mstories: + story.extract_image_urls() stories = Feed.format_stories(mstories) - if not stories or False: # False is to force a recount even if 0 stories + if not stories or False: # False is to force a recount even if 0 stories return dict(stories=[], message=message) - + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) unsub_feed_ids = list(set(story_feed_ids).difference(set(usersubs_map.keys()))) @@ -95,115 +117,140 @@ def load_social_stories(request, user_id, username=None): date_delta = user.profile.unread_cutoff if socialsub and date_delta < socialsub.mark_read_date: date_delta = socialsub.mark_read_date - + # Get intelligence classifier for user - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id=social_user_id)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, social_user_id=social_user_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id=social_user_id)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id=social_user_id)) # Merge with feed specific classifiers - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) unread_story_hashes = [] - if (read_filter == 'all' or query) and socialsub: - unread_story_hashes = socialsub.get_stories(read_filter='unread', limit=500, cutoff_date=user.profile.unread_cutoff) - story_hashes = [story['story_hash'] for story in stories] + if (read_filter == "all" or query) and socialsub: + unread_story_hashes = socialsub.get_stories( + read_filter="unread", limit=500, cutoff_date=user.profile.unread_cutoff + ) + story_hashes = [story["story_hash"] for story in stories] + + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).only( + "story_hash", "starred_date", "user_tags" + ) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + starred_stories = dict( + [ + (story.story_hash, dict(starred_date=story.starred_date, user_tags=story.user_tags)) + for story in starred_stories + ] + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) - starred_stories = MStarredStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .only('story_hash', 'starred_date', 'user_tags') - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags)) - for story in starred_stories]) - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: if not include_story_content: - del story['story_content'] - story['social_user_id'] = social_user_id + del story["story_content"] + story["social_user_id"] = social_user_id # story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - shared_date = localtime_for_timezone(story['shared_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(shared_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(shared_date, nowtz) - - story['read_status'] = 1 - if story['story_date'] < user.profile.unread_cutoff: - story['read_status'] = 1 - elif (read_filter == 'all' or query) and socialsub: - story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0 - elif read_filter == 'unread' and socialsub: - story['read_status'] = 0 + shared_date = localtime_for_timezone(story["shared_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(shared_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(shared_date, nowtz) - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) + story["read_status"] = 1 + if story["story_date"] < user.profile.unread_cutoff: + story["read_status"] = 1 + elif (read_filter == "all" or query) and socialsub: + story["read_status"] = 1 if story["story_hash"] not in unread_story_hashes else 0 + elif read_filter == "unread" and socialsub: + story["read_status"] = 0 - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=social_user_id), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + + story["intelligence"] = { + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=social_user_id + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - - - classifiers = sort_classifiers_by_feed(user=user, feed_ids=story_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=story_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) if socialsub: socialsub.feed_opens += 1 socialsub.needs_unread_recalc = True socialsub.save() - + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" - logging.user(request, "~FYLoading ~FMshared stories~FY: ~SB%s%s %s" % ( - social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', search_log)) + logging.user( + request, + "~FYLoading ~FMshared stories~FY: ~SB%s%s %s" + % (social_profile.title[:22], ("~SN/p%s" % page) if page > 1 else "", search_log), + ) return { - "stories": stories, - "user_profiles": user_profiles, - "feeds": unsub_feeds, + "stories": stories, + "user_profiles": user_profiles, + "feeds": unsub_feeds, "classifiers": classifiers, } - + + @json.json_view def load_river_blurblog(request): - limit = int(request.GET.get('limit', 10)) - start = time.time() - user = get_user(request) - social_user_ids = request.GET.getlist('social_user_ids') or request.GET.getlist('social_user_ids[]') - social_user_ids = [int(uid) for uid in social_user_ids if uid] + limit = int(request.GET.get("limit", 10)) + start = time.time() + user = get_user(request) + social_user_ids = request.GET.getlist("social_user_ids") or request.GET.getlist("social_user_ids[]") + social_user_ids = [int(uid) for uid in social_user_ids if uid] original_user_ids = list(social_user_ids) - page = int(request.GET.get('page', 1)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'unread') - relative_user_id = request.GET.get('relative_user_id', None) - global_feed = request.GET.get('global_feed', None) - on_dashboard = is_true(request.GET.get('dashboard', False)) - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + page = int(request.GET.get("page", 1)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "unread") + relative_user_id = request.GET.get("relative_user_id", None) + global_feed = request.GET.get("global_feed", None) + on_dashboard = is_true(request.GET.get("dashboard", False)) + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) if global_feed: - global_user = User.objects.get(username='popular') + global_user = User.objects.get(username="popular") relative_user_id = global_user.pk - + if not relative_user_id: relative_user_id = user.pk @@ -213,147 +260,172 @@ def load_river_blurblog(request): if not social_user_ids: social_user_ids = [s.subscription_user_id for s in socialsubs] - - offset = (page-1) * limit + + offset = (page - 1) * limit limit = page * limit - 1 - + story_hashes, story_dates, unread_feed_story_hashes = MSocialSubscription.feed_stories( - user.pk, social_user_ids, - offset=offset, limit=limit, - order=order, read_filter=read_filter, - relative_user_id=relative_user_id, - socialsubs=socialsubs, - cutoff_date=user.profile.unread_cutoff, - dashboard_global=on_dashboard and global_feed) + user.pk, + social_user_ids, + offset=offset, + limit=limit, + order=order, + read_filter=read_filter, + relative_user_id=relative_user_id, + socialsubs=socialsubs, + cutoff_date=user.profile.unread_cutoff, + dashboard_global=on_dashboard and global_feed, + ) mstories = MStory.find_by_story_hashes(story_hashes) story_hashes_to_dates = dict(list(zip(story_hashes, story_dates))) sorted_mstories = reversed(sorted(mstories, key=lambda x: int(story_hashes_to_dates[str(x.story_hash)]))) stories = Feed.format_stories(sorted_mstories) for s, story in enumerate(stories): - timestamp = story_hashes_to_dates[story['story_hash']] - story['story_date'] = datetime.datetime.fromtimestamp(timestamp) + timestamp = story_hashes_to_dates[story["story_hash"]] + story["story_date"] = datetime.datetime.fromtimestamp(timestamp) share_relative_user_id = relative_user_id if global_feed: share_relative_user_id = user.pk - - stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, - share_relative_user_id, - check_all=True) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles( + stories, share_relative_user_id, check_all=True + ) + + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) unsub_feed_ids = list(set(story_feed_ids).difference(set(usersubs_map.keys()))) unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] - + if story_feed_ids: - story_hashes = [story['story_hash'] for story in stories] - starred_stories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes - ).only('story_hash', 'starred_date', 'user_tags') - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags)) - for story in starred_stories]) - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) + story_hashes = [story["story_hash"] for story in stories] + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).only( + "story_hash", "starred_date", "user_tags" + ) + starred_stories = dict( + [ + (story.story_hash, dict(starred_date=story.starred_date, user_tags=story.user_tags)) + for story in starred_stories + ] + ) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) else: starred_stories = {} shared_stories = {} - + # Intelligence classifiers for all feeds involved if story_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - + # Just need to format stories nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story['read_status'] = 0 - if story['story_hash'] not in unread_feed_story_hashes: - story['read_status'] = 1 - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=story['friend_user_ids']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 0 + if story["story_hash"] not in unread_feed_story_hashes: + story["read_status"] = 1 + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + story["intelligence"] = { + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=story["friend_user_ids"] + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - if story['story_hash'] in shared_stories: - story['shared'] = True - shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'], - user.profile.timezone) - story['shared_date'] = format_story_link_date__long(shared_date, now) - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - if (shared_stories[story['story_hash']]['shared_date'] < user.profile.unread_cutoff or - story['story_hash'] not in unread_feed_story_hashes): - story['read_status'] = 1 + if story["story_hash"] in shared_stories: + story["shared"] = True + shared_date = localtime_for_timezone( + shared_stories[story["story_hash"]]["shared_date"], user.profile.timezone + ) + story["shared_date"] = format_story_link_date__long(shared_date, now) + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + if ( + shared_stories[story["story_hash"]]["shared_date"] < user.profile.unread_cutoff + or story["story_hash"] not in unread_feed_story_hashes + ): + story["read_status"] = 1 - classifiers = sort_classifiers_by_feed(user=user, feed_ids=story_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=story_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) diff = time.time() - start timediff = round(float(diff), 2) - logging.user(request, "~FY%sLoading ~FCriver ~FMblurblogs~FC stories~FY: ~SBp%s~SN (%s/%s " - "stories, ~SN%s/%s/%s feeds)" % - ("~FCAuto-" if on_dashboard else "", - page, len(stories), len(mstories), len(story_feed_ids), - len(social_user_ids), len(original_user_ids))) - - + logging.user( + request, + "~FY%sLoading ~FCriver ~FMblurblogs~FC stories~FY: ~SBp%s~SN (%s/%s " + "stories, ~SN%s/%s/%s feeds)" + % ( + "~FCAuto-" if on_dashboard else "", + page, + len(stories), + len(mstories), + len(story_feed_ids), + len(social_user_ids), + len(original_user_ids), + ), + ) + return { - "stories": stories, - "user_profiles": user_profiles, - "feeds": unsub_feeds, + "stories": stories, + "user_profiles": user_profiles, + "feeds": unsub_feeds, "classifiers": classifiers, "elapsed_time": timediff, } - + + def load_social_page(request, user_id, username=None, **kwargs): user = get_user(request.user) social_user_id = int(user_id) social_user = get_object_or_404(User, pk=social_user_id) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 6)) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 6)) try: - page = int(request.GET.get('page', 1)) + page = int(request.GET.get("page", 1)) except ValueError: page = 1 - format = request.GET.get('format', None) + format = request.GET.get("format", None) has_next_page = False - feed_id = kwargs.get('feed_id') or request.GET.get('feed_id') - if page: - offset = limit * (page-1) + feed_id = kwargs.get("feed_id") or request.GET.get("feed_id") + if page: + offset = limit * (page - 1) social_services = None user_social_profile = None user_social_services = None @@ -364,9 +436,9 @@ def load_social_page(request, user_id, username=None, **kwargs): user_social_services = MSocialServices.get_user(user.pk) user_following_social_profile = user_social_profile.is_following_user(social_user_id) social_profile = MSocialProfile.get_user(social_user_id) - - if username and '.dev' in username: - username = username.replace('.dev', '') + + if username and ".dev" in username: + username = username.replace(".dev", "") current_tab = "blurblogs" global_feed = False if username == "popular": @@ -374,39 +446,46 @@ def load_social_page(request, user_id, username=None, **kwargs): elif username == "popular.global": current_tab = "global" global_feed = True - - if social_profile.private and (not user.is_authenticated or - not social_profile.is_followed_by_user(user.pk)): + + if social_profile.private and ( + not user.is_authenticated or not social_profile.is_followed_by_user(user.pk) + ): stories = [] elif global_feed: - socialsubs = MSocialSubscription.objects.filter(user_id=relative_user_id) + socialsubs = MSocialSubscription.objects.filter(user_id=relative_user_id) social_user_ids = [s.subscription_user_id for s in socialsubs] - story_ids, story_dates, _ = MSocialSubscription.feed_stories(user.pk, social_user_ids, - offset=offset, limit=limit+1, - # order=order, read_filter=read_filter, - relative_user_id=relative_user_id, - cache=request.user.is_authenticated, - cutoff_date=user.profile.unread_cutoff) + story_ids, story_dates, _ = MSocialSubscription.feed_stories( + user.pk, + social_user_ids, + offset=offset, + limit=limit + 1, + # order=order, read_filter=read_filter, + relative_user_id=relative_user_id, + cache=request.user.is_authenticated, + cutoff_date=user.profile.unread_cutoff, + ) if len(story_ids) > limit: has_next_page = True story_ids = story_ids[:-1] mstories = MStory.find_by_story_hashes(story_ids) story_id_to_dates = dict(list(zip(story_ids, story_dates))) + def sort_stories_by_id(a, b): return int(story_id_to_dates[str(b.story_hash)]) - int(story_id_to_dates[str(a.story_hash)]) + sorted_mstories = sorted(mstories, key=sort_stories_by_id) stories = Feed.format_stories(sorted_mstories) for story in stories: - story['shared_date'] = story['story_date'] + story["shared_date"] = story["story_date"] else: params = dict(user_id=social_user.pk) if feed_id: - params['story_feed_id'] = feed_id - if 'story_db_id' in params: - params.pop('story_db_id') - mstories = MSharedStory.objects(**params).order_by('-shared_date')[offset:offset+limit+1] + params["story_feed_id"] = feed_id + if "story_db_id" in params: + params.pop("story_db_id") + mstories = MSharedStory.objects(**params).order_by("-shared_date")[offset : offset + limit + 1] stories = Feed.format_stories(mstories, include_permalinks=True) - + if len(stories) > limit: has_next_page = True stories = stories[:-1] @@ -419,181 +498,210 @@ def load_social_page(request, user_id, username=None, **kwargs): "social_user": social_user, "social_profile": social_profile, "user_social_services": user_social_services, - 'user_social_profile' : json.encode(user_social_profile and user_social_profile.page()), - 'user_following_social_profile': user_following_social_profile, + "user_social_profile": json.encode(user_social_profile and user_social_profile.page()), + "user_following_social_profile": user_following_social_profile, } - template = 'social/social_page.xhtml' + template = "social/social_page.xhtml" return render(request, template, params) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) feeds = Feed.objects.filter(pk__in=story_feed_ids) feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in feeds) for story in stories: - if story['story_feed_id'] in feeds: + if story["story_feed_id"] in feeds: # Feed could have been deleted. - story['feed'] = feeds[story['story_feed_id']] - shared_date = localtime_for_timezone(story['shared_date'], user.profile.timezone) - story['shared_date'] = shared_date - - stories, profiles = MSharedStory.stories_with_comments_and_profiles(stories, social_user.pk, - check_all=True) + story["feed"] = feeds[story["story_feed_id"]] + shared_date = localtime_for_timezone(story["shared_date"], user.profile.timezone) + story["shared_date"] = shared_date + + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + stories, social_user.pk, check_all=True + ) if user.is_authenticated: for story in stories: - if user.pk in story['share_user_ids']: - story['shared_by_user'] = True - shared_story = MSharedStory.objects.hint([('story_hash', 1)])\ - .get(user_id=user.pk, - story_feed_id=story['story_feed_id'], - story_hash=story['story_hash']) - story['user_comments'] = shared_story.comments + if user.pk in story["share_user_ids"]: + story["shared_by_user"] = True + shared_story = MSharedStory.objects.hint([("story_hash", 1)]).get( + user_id=user.pk, story_feed_id=story["story_feed_id"], story_hash=story["story_hash"] + ) + story["user_comments"] = shared_story.comments stories = MSharedStory.attach_users_to_stories(stories, profiles) - + active_story = None - path = request.META['PATH_INFO'] - if '/story/' in path and format != 'html': + path = request.META["PATH_INFO"] + if "/story/" in path and format != "html": story_id = re.sub(r"^/story/.*?/(.*?)/?", "", path) - if not story_id or '/story' in story_id: - story_id = path.replace('/story/', '') + if not story_id or "/story" in story_id: + story_id = path.replace("/story/", "") social_services = MSocialServices.get_user(social_user.pk) - active_story_db = MSharedStory.objects.filter(user_id=social_user.pk, - story_hash=story_id)\ - .hint([('story_hash', 1)])\ - .limit(1) + active_story_db = ( + MSharedStory.objects.filter(user_id=social_user.pk, story_hash=story_id) + .hint([("story_hash", 1)]) + .limit(1) + ) if active_story_db: active_story_db = active_story_db[0] if user_social_profile.bb_permalink_direct: return HttpResponseRedirect(active_story_db.story_permalink) active_story = Feed.format_story(active_story_db) if active_story_db.image_count: - active_story['image_url'] = active_story_db.image_sizes[0]['src'] - active_story['tags'] = ', '.join(active_story_db.story_tags) - active_story['blurblog_permalink'] = active_story_db.blurblog_permalink() - active_story['iso8601'] = active_story_db.story_date.isoformat() - if active_story['story_feed_id']: - feed = Feed.get_by_id(active_story['story_feed_id']) + active_story["image_url"] = active_story_db.image_sizes[0]["src"] + active_story["tags"] = ", ".join(active_story_db.story_tags) + active_story["blurblog_permalink"] = active_story_db.blurblog_permalink() + active_story["iso8601"] = active_story_db.story_date.isoformat() + if active_story["story_feed_id"]: + feed = Feed.get_by_id(active_story["story_feed_id"]) if feed: - active_story['feed'] = feed.canonical() - + active_story["feed"] = feed.canonical() + params = { - 'social_user' : social_user, - 'stories' : stories, - 'user_social_profile' : user_social_profile, - 'user_social_profile_page' : json.encode(user_social_profile and user_social_profile.page()), - 'user_social_services' : user_social_services, - 'user_social_services_page' : json.encode(user_social_services and user_social_services.canonical()), - 'user_following_social_profile': user_following_social_profile, - 'social_profile': social_profile, - 'feeds' : feeds, - 'user_profile' : hasattr(user, 'profile') and user.profile, - 'has_next_page' : has_next_page, - 'holzer_truism' : random.choice(jennyholzer.TRUISMS), #if not has_next_page else None - 'facebook_app_id': settings.FACEBOOK_APP_ID, - 'active_story' : active_story, - 'current_tab' : current_tab, - 'social_services': social_services, + "social_user": social_user, + "stories": stories, + "user_social_profile": user_social_profile, + "user_social_profile_page": json.encode(user_social_profile and user_social_profile.page()), + "user_social_services": user_social_services, + "user_social_services_page": json.encode(user_social_services and user_social_services.canonical()), + "user_following_social_profile": user_following_social_profile, + "social_profile": social_profile, + "feeds": feeds, + "user_profile": hasattr(user, "profile") and user.profile, + "has_next_page": has_next_page, + "holzer_truism": random.choice(jennyholzer.TRUISMS), # if not has_next_page else None + "facebook_app_id": settings.FACEBOOK_APP_ID, + "active_story": active_story, + "current_tab": current_tab, + "social_services": social_services, } - logging.user(request, "~FYLoading ~FMsocial page~FY: ~SB%s%s ~FM%s/%s" % ( - social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', - request.META.get('HTTP_USER_AGENT', "")[:40], - request.META.get('HTTP_X_FORWARDED_FOR', ""))) - if format == 'html': - template = 'social/social_stories.xhtml' + logging.user( + request, + "~FYLoading ~FMsocial page~FY: ~SB%s%s ~FM%s/%s" + % ( + social_profile.title[:22], + ("~SN/p%s" % page) if page > 1 else "", + request.META.get("HTTP_USER_AGENT", "")[:40], + request.META.get("HTTP_X_FORWARDED_FOR", ""), + ), + ) + if format == "html": + template = "social/social_stories.xhtml" else: - template = 'social/social_page.xhtml' - + template = "social/social_page.xhtml" + return render(request, template, params) -@required_params('story_id', feed_id=int, method="GET") + +@required_params("story_id", feed_id=int, method="GET") def story_public_comments(request): - format = request.GET.get('format', 'json') - relative_user_id = request.GET.get('user_id', None) - feed_id = int(request.GET.get('feed_id')) - story_id = request.GET.get('story_id') - + format = request.GET.get("format", "json") + relative_user_id = request.GET.get("user_id", None) + feed_id = int(request.GET.get("feed_id")) + story_id = request.GET.get("story_id") + if not relative_user_id: relative_user_id = get_user(request).pk - + story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) if not story: - return json.json_response(request, { - 'message': "Story not found.", - 'code': -1, - }) - + return json.json_response( + request, + { + "message": "Story not found.", + "code": -1, + }, + ) + story = Feed.format_story(story) - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], - relative_user_id, - check_all=True) - - if format == 'html': + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=True + ) + + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/story_comments.xhtml', { - 'story': stories[0], - }) + return render( + request, + "social/story_comments.xhtml", + { + "story": stories[0], + }, + ) else: - return json.json_response(request, { - 'comments': stories[0]['public_comments'], - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "comments": stories[0]["public_comments"], + "user_profiles": profiles, + }, + ) + @ajax_login_required def mark_story_as_shared(request): - code = 1 - feed_id = int(request.POST['feed_id']) - story_id = request.POST['story_id'] - comments = request.POST.get('comments', '') - source_user_id = request.POST.get('source_user_id') - relative_user_id = request.POST.get('relative_user_id') or request.user.pk - post_to_services = request.POST.getlist('post_to_services') or request.POST.getlist('post_to_services[]') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["feed_id"]) + story_id = request.POST["story_id"] + comments = request.POST.get("comments", "") + source_user_id = request.POST.get("source_user_id") + relative_user_id = request.POST.get("relative_user_id") or request.user.pk + post_to_services = request.POST.getlist("post_to_services") or request.POST.getlist("post_to_services[]") + format = request.POST.get("format", "json") now = datetime.datetime.now() nowtz = localtime_for_timezone(now, request.user.profile.timezone) - + MSocialProfile.get_user(request.user.pk) - + story, original_story_found = MStory.find_story(feed_id, story_id) if not story: - return json.json_response(request, { - 'code': -1, - 'message': 'Could not find the original story and no copies could be found.' - }) - + return json.json_response( + request, + {"code": -1, "message": "Could not find the original story and no copies could be found."}, + ) + feed = Feed.get_by_id(feed_id) if feed and feed.is_newsletter: - return json.json_response(request, { - 'code': -1, - 'message': 'You cannot share newsletters. Somebody could unsubscribe you!' - }) - - if not request.user.profile.is_premium and MSharedStory.feed_quota(request.user.pk, story.story_hash, feed_id=feed_id): - return json.json_response(request, { - 'code': -1, - 'message': 'Only premium users can share multiple stories per day from the same site.' - }) - + return json.json_response( + request, {"code": -1, "message": "You cannot share newsletters. Somebody could unsubscribe you!"} + ) + + if not request.user.profile.is_premium and MSharedStory.feed_quota( + request.user.pk, story.story_hash, feed_id=feed_id + ): + return json.json_response( + request, + { + "code": -1, + "message": "Only premium users can share multiple stories per day from the same site.", + }, + ) + quota = 100 if not request.user.profile.is_premium: quota = 3 if MSharedStory.feed_quota(request.user.pk, story.story_hash, quota=quota): - logging.user(request, "~FRNOT ~FCSharing ~FM%s~FC, over quota: ~SB~FB%s" % (story.story_title[:20], comments[:30])) - message = 'You can only share up to %s stories per day.' % quota + logging.user( + request, + "~FRNOT ~FCSharing ~FM%s~FC, over quota: ~SB~FB%s" % (story.story_title[:20], comments[:30]), + ) + message = "You can only share up to %s stories per day." % quota if not request.user.profile.is_premium: - message = 'You can only share up to %s stories per day as a free user. Upgrade to premium to share more.' % quota - return json.json_response(request, { - 'code': -1, - 'message': message - }) - - shared_story = MSharedStory.objects.filter(user_id=request.user.pk, - story_feed_id=feed_id, - story_hash=story['story_hash'])\ - .hint([('story_hash', 1)])\ - .limit(1).first() + message = ( + "You can only share up to %s stories per day as a free user. Upgrade to premium to share more." + % quota + ) + return json.json_response(request, {"code": -1, "message": message}) + + shared_story = ( + MSharedStory.objects.filter( + user_id=request.user.pk, story_feed_id=feed_id, story_hash=story["story_hash"] + ) + .hint([("story_hash", 1)]) + .limit(1) + .first() + ) if not shared_story: story_db = { "story_guid": story.story_guid, @@ -601,7 +709,7 @@ def mark_story_as_shared(request): "story_permalink": story.story_permalink, "story_title": story.story_title, "story_feed_id": story.story_feed_id, - "story_content_z": getattr(story, 'story_latest_content_z', None) or story.story_content_z, + "story_content_z": getattr(story, "story_latest_content_z", None) or story.story_content_z, "story_author_name": story.story_author_name, "story_tags": story.story_tags, "story_date": story.story_date, @@ -613,160 +721,198 @@ def mark_story_as_shared(request): shared_story = MSharedStory.objects.create(**story_db) shared_story.publish_to_subscribers() except NotUniqueError: - shared_story = MSharedStory.objects.get(story_guid=story_db['story_guid'], - user_id=story_db['user_id']) + shared_story = MSharedStory.objects.get( + story_guid=story_db["story_guid"], user_id=story_db["user_id"] + ) except MSharedStory.DoesNotExist: - return json.json_response(request, { - 'code': -1, - 'message': 'Story already shared but then not shared. I don\'t really know. Did you submit this twice very quickly?' - }) + return json.json_response( + request, + { + "code": -1, + "message": "Story already shared but then not shared. I don't really know. Did you submit this twice very quickly?", + }, + ) if source_user_id: shared_story.set_source_user_id(int(source_user_id)) - UpdateRecalcForSubscription.delay(subscription_user_id=request.user.pk, - shared_story_id=str(shared_story.id)) + UpdateRecalcForSubscription.delay( + subscription_user_id=request.user.pk, shared_story_id=str(shared_story.id) + ) logging.user(request, "~FCSharing ~FM%s: ~SB~FB%s" % (story.story_title[:20], comments[:30])) else: shared_story.comments = comments shared_story.has_comments = bool(comments) shared_story.save() - logging.user(request, "~FCUpdating shared story ~FM%s: ~SB~FB%s" % ( - story.story_title[:20], comments[:30])) - + logging.user( + request, "~FCUpdating shared story ~FM%s: ~SB~FB%s" % (story.story_title[:20], comments[:30]) + ) + if original_story_found: story.count_comments() - + story = Feed.format_story(story) check_all = not original_story_found - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], relative_user_id, - check_all=check_all) + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=check_all + ) story = stories[0] - starred_stories = MStarredStory.objects(user_id=request.user.pk, - story_feed_id=story['story_feed_id'], - story_hash=story['story_hash'])\ - .only('story_hash', 'starred_date', 'user_tags').limit(1) + starred_stories = ( + MStarredStory.objects( + user_id=request.user.pk, story_feed_id=story["story_feed_id"], story_hash=story["story_hash"] + ) + .only("story_hash", "starred_date", "user_tags") + .limit(1) + ) if starred_stories: - story['user_tags'] = starred_stories[0]['user_tags'] - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[0]['starred_date'], - request.user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['shared_comments'] = strip_tags(shared_story['comments'] or "") - story['shared_by_user'] = True - story['shared'] = True - shared_date = localtime_for_timezone(shared_story['shared_date'], request.user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(shared_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(shared_date, nowtz) - + story["user_tags"] = starred_stories[0]["user_tags"] + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[0]["starred_date"], request.user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["shared_comments"] = strip_tags(shared_story["comments"] or "") + story["shared_by_user"] = True + story["shared"] = True + shared_date = localtime_for_timezone(shared_story["shared_date"], request.user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(shared_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(shared_date, nowtz) + if post_to_services: for service in post_to_services: if service not in shared_story.posted_to_services: PostToService.delay(shared_story_id=str(shared_story.id), service=service) - - if shared_story.source_user_id and shared_story.comments: - EmailStoryReshares.apply_async(kwargs=dict(shared_story_id=str(shared_story.id)), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - - EmailFirstShare.apply_async(kwargs=dict(user_id=request.user.pk)) - - if format == 'html': + if shared_story.source_user_id and shared_story.comments: + EmailStoryReshares.apply_async( + kwargs=dict(shared_story_id=str(shared_story.id)), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + + EmailFirstShare.apply_async(kwargs=dict(user_id=request.user.pk)) + + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/social_story.xhtml', { - 'story': story, - }) + return render( + request, + "social/social_story.xhtml", + { + "story": story, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'story': story, - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "code": code, + "story": story, + "user_profiles": profiles, + }, + ) + @ajax_login_required def mark_story_as_unshared(request): - feed_id = int(request.POST['feed_id']) - story_id = request.POST['story_id'] - relative_user_id = request.POST.get('relative_user_id') or request.user.pk - format = request.POST.get('format', 'json') + feed_id = int(request.POST["feed_id"]) + story_id = request.POST["story_id"] + relative_user_id = request.POST.get("relative_user_id") or request.user.pk + format = request.POST.get("format", "json") original_story_found = True - - story, original_story_found = MStory.find_story(story_feed_id=feed_id, - story_id=story_id) - - shared_story = MSharedStory.objects(user_id=request.user.pk, - story_feed_id=feed_id, - story_hash=story['story_hash']).limit(1).first() + + story, original_story_found = MStory.find_story(story_feed_id=feed_id, story_id=story_id) + + shared_story = ( + MSharedStory.objects(user_id=request.user.pk, story_feed_id=feed_id, story_hash=story["story_hash"]) + .limit(1) + .first() + ) if not shared_story: - return json.json_response(request, {'code': -1, 'message': 'Shared story not found.'}) - + return json.json_response(request, {"code": -1, "message": "Shared story not found."}) + shared_story.unshare_story() - + if original_story_found: story.count_comments() else: story = shared_story - - story = Feed.format_story(story) - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], - relative_user_id, - check_all=True) - if format == 'html': + story = Feed.format_story(story) + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=True + ) + + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/social_story.xhtml', { - 'story': stories[0], - }) + return render( + request, + "social/social_story.xhtml", + { + "story": stories[0], + }, + ) else: - return json.json_response(request, { - 'code': 1, - 'message': "Story unshared.", - 'story': stories[0], - 'user_profiles': profiles, - }) - + return json.json_response( + request, + { + "code": 1, + "message": "Story unshared.", + "story": stories[0], + "user_profiles": profiles, + }, + ) + + @ajax_login_required def save_comment_reply(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - reply_comments = request.POST.get('reply_comments') - reply_id = request.POST.get('reply_id') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + reply_comments = request.POST.get("reply_comments") + reply_id = request.POST.get("reply_id") + format = request.POST.get("format", "json") original_message = None - + if not reply_comments: - return json.json_response(request, { - 'code': -1, - 'message': 'Reply comments cannot be empty.', - }) - + return json.json_response( + request, + { + "code": -1, + "message": "Reply comments cannot be empty.", + }, + ) + commenter_profile = MSocialProfile.get_user(comment_user_id) if commenter_profile.protected and not commenter_profile.is_followed_by_user(request.user.pk): - return json.json_response(request, { - 'code': -1, - 'message': 'You must be following %s to reply to them.' % (commenter_profile.user.username if commenter_profile.user else "[deleted]"), - }) - + return json.json_response( + request, + { + "code": -1, + "message": "You must be following %s to reply to them." + % (commenter_profile.user.username if commenter_profile.user else "[deleted]"), + }, + ) + try: - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) except MSharedStory.DoesNotExist: - return json.json_response(request, { - 'code': -1, - 'message': 'Shared story cannot be found.', - }) - + return json.json_response( + request, + { + "code": -1, + "message": "Shared story cannot be found.", + }, + ) + reply = MCommentReply() reply.user_id = request.user.pk reply.publish_date = datetime.datetime.now() reply.comments = reply_comments - + if reply_id: replies = [] for story_reply in shared_story.replies: - if (story_reply.user_id == reply.user_id and - story_reply.reply_id == ObjectId(reply_id)): + if story_reply.user_id == reply.user_id and story_reply.reply_id == ObjectId(reply_id): reply.publish_date = story_reply.publish_date reply.reply_id = story_reply.reply_id original_message = story_reply.comments @@ -774,80 +920,96 @@ def save_comment_reply(request): else: replies.append(story_reply) shared_story.replies = replies - logging.user(request, "~FCUpdating comment reply in ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], reply_comments[:30])) + logging.user( + request, + "~FCUpdating comment reply in ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], reply_comments[:30]), + ) else: reply.reply_id = ObjectId() - logging.user(request, "~FCReplying to comment in: ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], reply_comments[:30])) + logging.user( + request, + "~FCReplying to comment in: ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], reply_comments[:30]), + ) shared_story.replies.append(reply) shared_story.save() - + comment, profiles = shared_story.comment_with_author_and_profiles() - + # Interaction for every other replier and original commenter - MActivity.new_comment_reply(user_id=request.user.pk, - comment_user_id=comment['user_id'], - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) - if comment['user_id'] != request.user.pk: - MInteraction.new_comment_reply(user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) + MActivity.new_comment_reply( + user_id=request.user.pk, + comment_user_id=comment["user_id"], + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) + if comment["user_id"] != request.user.pk: + MInteraction.new_comment_reply( + user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) - reply_user_ids = list(r['user_id'] for r in comment['replies']) - for user_id in set(reply_user_ids).difference([comment['user_id']]): + reply_user_ids = list(r["user_id"] for r in comment["replies"]) + for user_id in set(reply_user_ids).difference([comment["user_id"]]): if request.user.pk != user_id: - MInteraction.new_reply_reply(user_id=user_id, - comment_user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) + MInteraction.new_reply_reply( + user_id=user_id, + comment_user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) - EmailCommentReplies.apply_async(kwargs=dict(shared_story_id=str(shared_story.id), - reply_id=str(reply.reply_id)), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - - if format == 'html': + EmailCommentReplies.apply_async( + kwargs=dict(shared_story_id=str(shared_story.id), reply_id=str(reply.reply_id)), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'reply_id': reply.reply_id, - 'user_profiles': profiles - }) + return json.json_response( + request, {"code": code, "comment": comment, "reply_id": reply.reply_id, "user_profiles": profiles} + ) + @ajax_login_required def remove_comment_reply(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - reply_id = request.POST.get('reply_id') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + reply_id = request.POST.get("reply_id") + format = request.POST.get("format", "json") original_message = None - - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) replies = [] for story_reply in shared_story.replies: - if ((story_reply.user_id == request.user.pk or request.user.is_staff) and - story_reply.reply_id == ObjectId(reply_id)): + if ( + story_reply.user_id == request.user.pk or request.user.is_staff + ) and story_reply.reply_id == ObjectId(reply_id): original_message = story_reply.comments # Skip reply else: @@ -855,53 +1017,64 @@ def remove_comment_reply(request): shared_story.replies = replies shared_story.save() - logging.user(request, "~FCRemoving comment reply in ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], original_message and original_message[:30])) - + logging.user( + request, + "~FCRemoving comment reply in ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], original_message and original_message[:30]), + ) + comment, profiles = shared_story.comment_with_author_and_profiles() # Interaction for every other replier and original commenter - MActivity.remove_comment_reply(user_id=request.user.pk, - comment_user_id=comment['user_id'], - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - MInteraction.remove_comment_reply(user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - - reply_user_ids = [reply['user_id'] for reply in comment['replies']] - for user_id in set(reply_user_ids).difference([comment['user_id']]): + MActivity.remove_comment_reply( + user_id=request.user.pk, + comment_user_id=comment["user_id"], + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + MInteraction.remove_comment_reply( + user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] + for user_id in set(reply_user_ids).difference([comment["user_id"]]): if request.user.pk != user_id: - MInteraction.remove_reply_reply(user_id=user_id, - comment_user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - - if format == 'html': + MInteraction.remove_reply_reply( + user_id=user_id, + comment_user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles - }) - -@render_to('social/mute_story.xhtml') + return json.json_response(request, {"code": code, "comment": comment, "user_profiles": profiles}) + + +@render_to("social/mute_story.xhtml") def mute_story(request, secret_token, shared_story_id): user_profile = Profile.objects.get(secret_token=secret_token) shared_story = MSharedStory.objects.get(id=shared_story_id) shared_story.mute_for_user(user_profile.user_id) - + return {} - + + def shared_stories_public(request, username): try: user = User.objects.get(username=username) @@ -909,50 +1082,59 @@ def shared_stories_public(request, username): raise Http404 shared_stories = MSharedStory.objects.filter(user_id=user.pk) - + return HttpResponse("There are %s stories shared by %s." % (shared_stories.count(), username)) - + + @json.json_view def profile(request): user = get_user(request.user) - user_id = int(request.GET.get('user_id', user.pk)) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - include_activities_html = request.GET.get('include_activities_html', None) + user_id = int(request.GET.get("user_id", user.pk)) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + include_activities_html = request.GET.get("include_activities_html", None) user_profile = MSocialProfile.get_user(user_id) user_profile.count_follows() - + activities = [] if not user_profile.private or user_profile.is_followed_by_user(user.pk): activities, _ = MActivity.user(user_id, page=1, public=True, categories=categories) user_profile = user_profile.canonical(include_follows=True, common_follows_with_user=user.pk) - profile_ids = set(user_profile['followers_youknow'] + user_profile['followers_everybody'] + - user_profile['following_youknow'] + user_profile['following_everybody']) + profile_ids = set( + user_profile["followers_youknow"] + + user_profile["followers_everybody"] + + user_profile["following_youknow"] + + user_profile["following_everybody"] + ) profiles = MSocialProfile.profiles(profile_ids) - logging.user(request, "~BB~FRLoading social profile: %s" % user_profile['username']) - + logging.user(request, "~BB~FRLoading social profile: %s" % user_profile["username"]) + payload = { - 'user_profile': user_profile, - 'followers_youknow': user_profile['followers_youknow'], - 'followers_everybody': user_profile['followers_everybody'], - 'following_youknow': user_profile['following_youknow'], - 'following_everybody': user_profile['following_everybody'], - 'requested_follow': user_profile['requested_follow'], - 'profiles': dict([(p.user_id, p.canonical(compact=True)) for p in profiles]), - 'activities': activities, + "user_profile": user_profile, + "followers_youknow": user_profile["followers_youknow"], + "followers_everybody": user_profile["followers_everybody"], + "following_youknow": user_profile["following_youknow"], + "following_everybody": user_profile["following_everybody"], + "requested_follow": user_profile["requested_follow"], + "profiles": dict([(p.user_id, p.canonical(compact=True)) for p in profiles]), + "activities": activities, } if include_activities_html: - payload['activities_html'] = render_to_string('reader/activities_module.xhtml', { - 'activities': activities, - 'username': user_profile['username'], - 'public': True, - }) - + payload["activities_html"] = render_to_string( + "reader/activities_module.xhtml", + { + "activities": activities, + "username": user_profile["username"], + "public": True, + }, + ) + return payload + @ajax_login_required @json.json_view def load_user_profile(request): @@ -961,43 +1143,44 @@ def load_user_profile(request): social_services = MSocialServices.objects.get(user_id=request.user.pk) except MSocialServices.DoesNotExist: social_services = MSocialServices.objects.create(user_id=request.user.pk) - + logging.user(request, "~BB~FRLoading social profile and blurblog settings") - + return { - 'services': social_services, - 'user_profile': social_profile.canonical(include_follows=True, include_settings=True), + "services": social_services, + "user_profile": social_profile.canonical(include_follows=True, include_settings=True), } - + + @ajax_login_required @json.json_view def save_user_profile(request): data = request.POST - website = data['website'] - - if website and not website.startswith('http'): - website = 'http://' + website - + website = data["website"] + + if website and not website.startswith("http"): + website = "http://" + website + profile = MSocialProfile.get_user(request.user.pk) - profile.location = data['location'] - profile.bio = data['bio'] + profile.location = data["location"] + profile.bio = data["bio"] profile.website = website - profile.protected = is_true(data.get('protected', False)) - profile.private = is_true(data.get('private', False)) + profile.protected = is_true(data.get("protected", False)) + profile.private = is_true(data.get("private", False)) profile.save() social_services = MSocialServices.get_user(user_id=request.user.pk) - profile = social_services.set_photo(data['photo_service']) - + profile = social_services.set_photo(data["photo_service"]) + logging.user(request, "~BB~FRSaving social profile") - + return dict(code=1, user_profile=profile.canonical(include_follows=True)) @ajax_login_required @json.json_view def upload_avatar(request): - photo = request.FILES['photo'] + photo = request.FILES["photo"] profile = MSocialProfile.get_user(request.user.pk) social_services = MSocialServices.objects.get(user_id=request.user.pk) @@ -1005,7 +1188,7 @@ def upload_avatar(request): image_url = social_services.save_uploaded_photo(photo) if image_url: - profile = social_services.set_photo('upload') + profile = social_services.set_photo("upload") return { "code": 1 if image_url else -1, @@ -1014,22 +1197,24 @@ def upload_avatar(request): "user_profile": profile.canonical(include_follows=True), } + @ajax_login_required @json.json_view def save_blurblog_settings(request): data = request.POST profile = MSocialProfile.get_user(request.user.pk) - profile.custom_css = strip_tags(data.get('custom_css', None)) - profile.custom_bgcolor = strip_tags(data.get('custom_bgcolor', None)) - profile.blurblog_title = strip_tags(data.get('blurblog_title', None)) - profile.bb_permalink_direct = is_true(data.get('bb_permalink_direct', False)) + profile.custom_css = strip_tags(data.get("custom_css", None)) + profile.custom_bgcolor = strip_tags(data.get("custom_bgcolor", None)) + profile.blurblog_title = strip_tags(data.get("blurblog_title", None)) + profile.bb_permalink_direct = is_true(data.get("bb_permalink_direct", False)) profile.save() logging.user(request, "~BB~FRSaving blurblog settings") - + return dict(code=1, user_profile=profile.canonical(include_follows=True, include_settings=True)) + @json.json_view def load_follow_requests(request): user = get_user(request.user) @@ -1039,53 +1224,57 @@ def load_follow_requests(request): request_profiles = [p.canonical(include_following_user=user.pk) for p in request_profiles] if len(request_profiles): - logging.user(request, "~BB~FRLoading Follow Requests (%s requests)" % ( - len(request_profiles), - )) + logging.user(request, "~BB~FRLoading Follow Requests (%s requests)" % (len(request_profiles),)) return { - 'request_profiles': request_profiles, + "request_profiles": request_profiles, } + @ratelimit(minutes=1, requests=100) @json.json_view def load_user_friends(request): user = get_user(request.user) - social_profile = MSocialProfile.get_user(user_id=user.pk) - social_services = MSocialServices.get_user(user_id=user.pk) + social_profile = MSocialProfile.get_user(user_id=user.pk) + social_services = MSocialServices.get_user(user_id=user.pk) following_profiles = MSocialProfile.profiles(social_profile.following_user_ids) - follower_profiles = MSocialProfile.profiles(social_profile.follower_user_ids) - recommended_users = social_profile.recommended_users() + follower_profiles = MSocialProfile.profiles(social_profile.follower_user_ids) + recommended_users = social_profile.recommended_users() following_profiles = [p.canonical(include_following_user=user.pk) for p in following_profiles] - follower_profiles = [p.canonical(include_following_user=user.pk) for p in follower_profiles] - - logging.user(request, "~BB~FRLoading Friends (%s following, %s followers)" % ( - social_profile.following_count, - social_profile.follower_count, - )) + follower_profiles = [p.canonical(include_following_user=user.pk) for p in follower_profiles] + + logging.user( + request, + "~BB~FRLoading Friends (%s following, %s followers)" + % ( + social_profile.following_count, + social_profile.follower_count, + ), + ) return { - 'services': social_services, - 'autofollow': social_services.autofollow, - 'user_profile': social_profile.canonical(include_follows=True), - 'following_profiles': following_profiles, - 'follower_profiles': follower_profiles, - 'recommended_users': recommended_users, + "services": social_services, + "autofollow": social_services.autofollow, + "user_profile": social_profile.canonical(include_follows=True), + "following_profiles": following_profiles, + "follower_profiles": follower_profiles, + "recommended_users": recommended_users, } + @ajax_login_required @json.json_view def follow(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = request.POST['user_id'] + user_id = request.POST["user_id"] try: follow_user_id = int(user_id) except ValueError: try: - follow_user_id = int(user_id.replace('social:', '')) + follow_user_id = int(user_id.replace("social:", "")) follow_profile = MSocialProfile.get_user(follow_user_id) except (ValueError, MSocialProfile.DoesNotExist): - follow_username = user_id.replace('social:', '') + follow_username = user_id.replace("social:", "") try: follow_profile = MSocialProfile.objects.get(username=follow_username) except MSocialProfile.DoesNotExist: @@ -1094,54 +1283,55 @@ def follow(request): profile.follow_user(follow_user_id) follow_profile = MSocialProfile.get_user(follow_user_id) - + social_params = { - 'user_id': request.user.pk, - 'subscription_user_id': follow_user_id, - 'include_favicon': True, - 'update_counts': True, + "user_id": request.user.pk, + "subscription_user_id": follow_user_id, + "include_favicon": True, + "update_counts": True, } follow_subscription = MSocialSubscription.feeds(calculate_all_scores=True, **social_params) - + if follow_profile.user: if follow_profile.protected: logging.user(request, "~BB~FR~SBRequested~SN follow from: ~SB%s" % follow_profile.user.username) else: logging.user(request, "~BB~FRFollowing: ~SB%s" % follow_profile.user.username) - + return { - "user_profile": profile.canonical(include_follows=True), + "user_profile": profile.canonical(include_follows=True), "follow_profile": follow_profile.canonical(common_follows_with_user=request.user.pk), "follow_subscription": follow_subscription, } - + + @ajax_login_required @json.json_view def unfollow(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = request.POST['user_id'] + user_id = request.POST["user_id"] try: unfollow_user_id = int(user_id) except ValueError: try: - unfollow_user_id = int(user_id.replace('social:', '')) + unfollow_user_id = int(user_id.replace("social:", "")) unfollow_profile = MSocialProfile.get_user(unfollow_user_id) except (ValueError, MSocialProfile.DoesNotExist): - unfollow_username = user_id.replace('social:', '') + unfollow_username = user_id.replace("social:", "") try: unfollow_profile = MSocialProfile.objects.get(username=unfollow_username) except MSocialProfile.DoesNotExist: raise Http404 unfollow_user_id = unfollow_profile.user_id - + profile.unfollow_user(unfollow_user_id) unfollow_profile = MSocialProfile.get_user(unfollow_user_id) - + logging.user(request, "~BB~FRUnfollowing: ~SB%s" % unfollow_profile.username) - + return { - 'user_profile': profile.canonical(include_follows=True), - 'unfollow_profile': unfollow_profile.canonical(common_follows_with_user=request.user.pk), + "user_profile": profile.canonical(include_follows=True), + "unfollow_profile": unfollow_profile.canonical(common_follows_with_user=request.user.pk), } @@ -1149,80 +1339,84 @@ def unfollow(request): @json.json_view def approve_follower(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = int(request.POST['user_id']) + user_id = int(request.POST["user_id"]) follower_profile = MSocialProfile.get_user(user_id) code = -1 - + logging.user(request, "~BB~FRApproving follow: ~SB%s" % follower_profile.username) - + if user_id in profile.requested_follow_user_ids: follower_profile.follow_user(request.user.pk, force=True) code = 1 - - return {'code': code} + + return {"code": code} + @ajax_login_required @json.json_view def ignore_follower(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = int(request.POST['user_id']) + user_id = int(request.POST["user_id"]) follower_profile = MSocialProfile.get_user(user_id) code = -1 - + logging.user(request, "~BB~FR~SK~SBNOT~SN approving follow: ~SB%s" % follower_profile.username) - + if user_id in profile.requested_follow_user_ids: follower_profile.unfollow_user(request.user.pk) code = 1 - - return {'code': code} + + return {"code": code} + @ajax_login_required -@required_params('user_id', method="POST") +@required_params("user_id", method="POST") @json.json_view def mute_user(request): profile = MSocialProfile.get_user(request.user.pk) - muting_user_id = int(request.POST['user_id']) + muting_user_id = int(request.POST["user_id"]) social_profile = MSocialProfile.get_user(request.user.pk) muting_profile = MSocialProfile.get_user(muting_user_id) code = 1 - + logging.user(request, "~FMMuting user ~SB%s" % muting_profile.username) - + social_profile.mute_user(muting_user_id) - + return { - 'code': code, - 'user_profile': social_profile.canonical(), + "code": code, + "user_profile": social_profile.canonical(), } + @ajax_login_required -@required_params('user_id', method="POST") +@required_params("user_id", method="POST") @json.json_view def unmute_user(request): profile = MSocialProfile.get_user(request.user.pk) - muting_user_id = int(request.POST['user_id']) + muting_user_id = int(request.POST["user_id"]) muting_profile = MSocialProfile.get_user(muting_user_id) code = 1 - + logging.user(request, "~FM~SBUn-~SN~FMMuting user ~SB%s" % muting_profile.username) - + profile.unmute_user(muting_user_id) - + return { - 'code': code, - 'user_profile': profile.canonical(), + "code": code, + "user_profile": profile.canonical(), } -@required_params('query', method="GET") + +@required_params("query", method="GET") @json.json_view def find_friends(request): - query = request.GET['query'] - limit = int(request.GET.get('limit', 3)) + query = request.GET["query"] + limit = int(request.GET.get("limit", 3)) profiles = [] - - if '@' in query: - results = re.search(r'[\w\.-]+@[\w\.-]+', query) + + if "@" in query: + results = re.search(r"[\w\.-]+@[\w\.-]+", query) if results: email = results.group(0) profiles = MSocialProfile.objects.filter(email__iexact=email)[:limit] @@ -1238,110 +1432,141 @@ def find_friends(request): profiles = MSocialProfile.objects.filter(blurblog_title__icontains=query)[:limit] if not profiles: profiles = MSocialProfile.objects.filter(location__icontains=query)[:limit] - + profiles = [p.canonical(include_following_user=request.user.pk) for p in profiles] - profiles = sorted(profiles, key=lambda p: -1 * p['shared_stories_count']) + profiles = sorted(profiles, key=lambda p: -1 * p["shared_stories_count"]) return dict(profiles=profiles) + @ajax_login_required def like_comment(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = int(request.POST['comment_user_id']) - format = request.POST.get('format', 'json') - + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = int(request.POST["comment_user_id"]) + format = request.POST.get("format", "json") + if comment_user_id == request.user.pk: - return json.json_response(request, {'code': -1, 'message': 'You cannot favorite your own shared story comment.'}) + return json.json_response( + request, {"code": -1, "message": "You cannot favorite your own shared story comment."} + ) try: - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) except MSharedStory.DoesNotExist: - return json.json_response(request, {'code': -1, 'message': 'The shared comment cannot be found.'}) - + return json.json_response(request, {"code": -1, "message": "The shared comment cannot be found."}) + shared_story.add_liking_user(request.user.pk) comment, profiles = shared_story.comment_with_author_and_profiles() comment_user = User.objects.get(pk=shared_story.user_id) - logging.user(request, "~BB~FMLiking comment by ~SB%s~SN: %s" % ( - comment_user.username, - shared_story.comments[:30], - )) + logging.user( + request, + "~BB~FMLiking comment by ~SB%s~SN: %s" + % ( + comment_user.username, + shared_story.comments[:30], + ), + ) - MActivity.new_comment_like(liking_user_id=request.user.pk, - comment_user_id=comment['user_id'], - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title, - comments=shared_story.comments) - MInteraction.new_comment_like(liking_user_id=request.user.pk, - comment_user_id=comment['user_id'], - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title, - comments=shared_story.comments) - - if format == 'html': + MActivity.new_comment_like( + liking_user_id=request.user.pk, + comment_user_id=comment["user_id"], + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + comments=shared_story.comments, + ) + MInteraction.new_comment_like( + liking_user_id=request.user.pk, + comment_user_id=comment["user_id"], + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + comments=shared_story.comments, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles, - }) - + return json.json_response( + request, + { + "code": code, + "comment": comment, + "user_profiles": profiles, + }, + ) + + @ajax_login_required def remove_like_comment(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - format = request.POST.get('format', 'json') - - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + format = request.POST.get("format", "json") + + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) shared_story.remove_liking_user(request.user.pk) comment, profiles = shared_story.comment_with_author_and_profiles() comment_user = User.objects.get(pk=shared_story.user_id) - logging.user(request, "~BB~FMRemoving like on comment by ~SB%s~SN: %s" % ( - comment_user.username, - shared_story.comments[:30], - )) - - if format == 'html': + logging.user( + request, + "~BB~FMRemoving like on comment by ~SB%s~SN: %s" + % ( + comment_user.username, + shared_story.comments[:30], + ), + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "code": code, + "comment": comment, + "user_profiles": profiles, + }, + ) + + def get_subdomain(request): - host = request.META.get('HTTP_HOST') + host = request.META.get("HTTP_HOST") if host.count(".") == 2: return host.split(".")[0] else: return None + def shared_stories_rss_feed_noid(request): - index = HttpResponseRedirect('http://%s%s' % ( - Site.objects.get_current().domain, - reverse('index'))) + index = HttpResponseRedirect("http://%s%s" % (Site.objects.get_current().domain, reverse("index"))) if get_subdomain(request): username = get_subdomain(request) try: - if '.' in username: - username = username.split('.')[0] + if "." in username: + username = username.split(".")[0] user = User.objects.get(username__iexact=username) except User.DoesNotExist: return index @@ -1349,6 +1574,7 @@ def shared_stories_rss_feed_noid(request): return index + @ratelimit(minutes=1, requests=5) def shared_stories_rss_feed(request, user_id, username=None): try: @@ -1357,81 +1583,89 @@ def shared_stories_rss_feed(request, user_id, username=None): raise Http404 limit = 25 - offset = request.GET.get('page', 0) * limit + offset = request.GET.get("page", 0) * limit username = username and username.lower() profile = MSocialProfile.get_user(user.pk) - params = {'username': profile.username_slug, 'user_id': user.pk} + params = {"username": profile.username_slug, "user_id": user.pk} if not username or profile.username_slug.lower() != username: - return HttpResponseRedirect(reverse('shared-stories-rss-feed', kwargs=params)) + return HttpResponseRedirect(reverse("shared-stories-rss-feed", kwargs=params)) social_profile = MSocialProfile.get_user(user_id) current_site = Site.objects.get_current() current_site = current_site and current_site.domain - + if social_profile.private: return HttpResponseForbidden() - + data = {} - data['title'] = social_profile.title - data['link'] = social_profile.blurblog_url - data['description'] = "Stories shared by %s on NewsBlur." % user.username - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "http://%s%s" % ( + data["title"] = social_profile.title + data["link"] = social_profile.blurblog_url + data["description"] = "Stories shared by %s on NewsBlur." % user.username + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "http://%s%s" % ( current_site, - reverse('shared-stories-rss-feed', kwargs=params), + reverse("shared-stories-rss-feed", kwargs=params), ) rss = feedgenerator.Atom1Feed(**data) - shared_stories = MSharedStory.objects.filter(user_id=user.pk).order_by('-shared_date')[offset:offset+limit] + shared_stories = MSharedStory.objects.filter(user_id=user.pk).order_by("-shared_date")[ + offset : offset + limit + ] for shared_story in shared_stories: feed = Feed.get_by_id(shared_story.story_feed_id) - content = render_to_string('social/rss_story.xhtml', { - 'feed': feed, - 'user': user, - 'social_profile': social_profile, - 'shared_story': shared_story, - 'content': shared_story.story_content_str, - }) + content = render_to_string( + "social/rss_story.xhtml", + { + "feed": feed, + "user": user, + "social_profile": social_profile, + "shared_story": shared_story, + "content": shared_story.story_content_str, + }, + ) story_data = { - 'title': shared_story.story_title, - 'link': shared_story.story_permalink, - 'description': content, - 'author_name': shared_story.story_author_name, - 'categories': shared_story.story_tags, - 'unique_id': shared_story.story_permalink, - 'pubdate': shared_story.shared_date, + "title": shared_story.story_title, + "link": shared_story.story_permalink, + "description": content, + "author_name": shared_story.story_author_name, + "categories": shared_story.story_tags, + "unique_id": shared_story.story_permalink, + "pubdate": shared_story.shared_date, } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's RSS feed: ~FM%s" % ( - user.username, - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') -@required_params('user_id', method="GET") + logging.user( + request, + "~FBGenerating ~SB%s~SN's RSS feed: ~FM%s" + % (user.username, request.META.get("HTTP_USER_AGENT", "")[:24]), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + + +@required_params("user_id", method="GET") @json.json_view def social_feed_trainer(request): - social_user_id = request.GET['user_id'] + social_user_id = request.GET["user_id"] social_profile = MSocialProfile.get_user(social_user_id) social_user = get_object_or_404(User, pk=social_user_id) user = get_user(request) - + social_profile.count_stories() classifier = social_profile.canonical() - classifier['classifiers'] = get_classifiers_for_user(user, social_user_id=classifier['id']) - classifier['num_subscribers'] = social_profile.follower_count - classifier['feed_tags'] = [] - classifier['feed_authors'] = [] - - logging.user(user, "~FGLoading social trainer on ~SB%s: %s" % ( - social_user.username, social_profile.title)) - + classifier["classifiers"] = get_classifiers_for_user(user, social_user_id=classifier["id"]) + classifier["num_subscribers"] = social_profile.follower_count + classifier["feed_tags"] = [] + classifier["feed_authors"] = [] + + logging.user( + user, "~FGLoading social trainer on ~SB%s: %s" % (social_user.username, social_profile.title) + ) + return [classifier] - + @json.json_view def load_social_statistics(request, social_user_id, username=None): @@ -1439,96 +1673,101 @@ def load_social_statistics(request, social_user_id, username=None): social_profile = MSocialProfile.get_user(social_user_id) social_profile.save_feed_story_history_statistics() social_profile.save_classifier_counts() - + # Stories per month - average and month-by-month breakout - stats['average_stories_per_month'] = social_profile.average_stories_per_month - stats['story_count_history'] = social_profile.story_count_history - stats['story_hours_history'] = social_profile.story_hours_history - stats['story_days_history'] = social_profile.story_days_history - + stats["average_stories_per_month"] = social_profile.average_stories_per_month + stats["story_count_history"] = social_profile.story_count_history + stats["story_hours_history"] = social_profile.story_hours_history + stats["story_days_history"] = social_profile.story_days_history + # Subscribers - stats['subscriber_count'] = social_profile.follower_count - stats['num_subscribers'] = social_profile.follower_count - + stats["subscriber_count"] = social_profile.follower_count + stats["num_subscribers"] = social_profile.follower_count + # Classifier counts - stats['classifier_counts'] = social_profile.feed_classifier_counts - + stats["classifier_counts"] = social_profile.feed_classifier_counts + # Feeds - feed_ids = [c['feed_id'] for c in stats['classifier_counts'].get('feed', [])] - feeds = Feed.objects.filter(pk__in=feed_ids).only('feed_title') + feed_ids = [c["feed_id"] for c in stats["classifier_counts"].get("feed", [])] + feeds = Feed.objects.filter(pk__in=feed_ids).only("feed_title") titles = dict([(f.pk, f.feed_title) for f in feeds]) - for stat in stats['classifier_counts'].get('feed', []): - stat['feed_title'] = titles.get(stat['feed_id'], "") - - logging.user(request, "~FBStatistics social: ~SB%s ~FG(%s subs)" % ( - social_profile.user_id, social_profile.follower_count)) + for stat in stats["classifier_counts"].get("feed", []): + stat["feed_title"] = titles.get(stat["feed_id"], "") + + logging.user( + request, + "~FBStatistics social: ~SB%s ~FG(%s subs)" % (social_profile.user_id, social_profile.follower_count), + ) return stats + @json.json_view def load_social_settings(request, social_user_id, username=None): social_profile = MSocialProfile.get_user(social_user_id) - + return social_profile.canonical() + @ajax_login_required def load_interactions(request): - user_id = request.GET.get('user_id', None) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - if not user_id or 'null' in user_id: + user_id = request.GET.get("user_id", None) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + if not user_id or "null" in user_id: user_id = get_user(request).pk - page = max(1, int(request.GET.get('page', 1))) - limit = request.GET.get('limit') - interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit, - categories=categories) - format = request.GET.get('format', None) - - data = { - 'interactions': interactions, - 'page': page, - 'has_next_page': has_next_page - } - + page = max(1, int(request.GET.get("page", 1))) + limit = request.GET.get("limit") + interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit, categories=categories) + format = request.GET.get("format", None) + + data = {"interactions": interactions, "page": page, "has_next_page": has_next_page} + logging.user(request, "~FBLoading interactions ~SBp/%s" % page) - - if format == 'html': - return render(request, 'reader/interactions_module.xhtml', data) + + if format == "html": + return render(request, "reader/interactions_module.xhtml", data) else: return json.json_response(request, data) + @ajax_login_required def load_activities(request): - user_id = request.GET.get('user_id', None) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - if user_id and 'null' not in user_id: + user_id = request.GET.get("user_id", None) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + if user_id and "null" not in user_id: user_id = int(user_id) user = User.objects.get(pk=user_id) else: user = get_user(request) user_id = user.pk - + public = user_id != request.user.pk - page = max(1, int(request.GET.get('page', 1))) - limit = request.GET.get('limit', 4) - activities, has_next_page = MActivity.user(user_id, page=page, limit=limit, public=public, - categories=categories) - format = request.GET.get('format', None) - + page = max(1, int(request.GET.get("page", 1))) + limit = request.GET.get("limit", 4) + activities, has_next_page = MActivity.user( + user_id, page=page, limit=limit, public=public, categories=categories + ) + format = request.GET.get("format", None) + data = { - 'activities': activities, - 'page': page, - 'has_next_page': has_next_page, - 'username': (user.username if public else 'You'), + "activities": activities, + "page": page, + "has_next_page": has_next_page, + "username": (user.username if public else "You"), } - + logging.user(request, "~FBLoading activities ~SBp/%s" % page) - - if format == 'html': - return render(request, 'reader/activities_module.xhtml', data, - ) + + if format == "html": + return render( + request, + "reader/activities_module.xhtml", + data, + ) else: return json.json_response(request, data) + @json.json_view def comment(request, comment_id): try: @@ -1537,13 +1776,14 @@ def comment(request, comment_id): raise Http404 return shared_story.comments_with_author() + @json.json_view def comment_reply(request, comment_id, reply_id): try: shared_story = MSharedStory.objects.get(id=comment_id) except MSharedStory.DoesNotExist: raise Http404 - + for story_reply in shared_story.replies: if story_reply.reply_id == ObjectId(reply_id): return story_reply diff --git a/apps/static/tests.py b/apps/static/tests.py index 2247054b3..3748f41ba 100644 --- a/apps/static/tests.py +++ b/apps/static/tests.py @@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application. from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ class SimpleTest(TestCase): """ self.failUnlessEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/static/views.py b/apps/static/views.py index c5c98f05c..934a38ef9 100644 --- a/apps/static/views.py +++ b/apps/static/views.py @@ -8,102 +8,123 @@ from apps.rss_feeds.models import Feed, MStory from apps.search.models import SearchFeed from utils import log as logging -def about(request): - return render(request, 'static/about.xhtml') - -def faq(request): - return render(request, 'static/faq.xhtml') - -def api(request): - filename = settings.TEMPLATES[0]['DIRS'][0] + '/static/api.yml' - api_yml_file = open(filename).read() - data = yaml.load(api_yml_file) - return render(request, 'static/api.xhtml', {'data': data}) - +def about(request): + return render(request, "static/about.xhtml") + + +def faq(request): + return render(request, "static/faq.xhtml") + + +def api(request): + filename = settings.TEMPLATES[0]["DIRS"][0] + "/static/api.yml" + api_yml_file = open(filename).read() + data = yaml.load(api_yml_file) + + return render(request, "static/api.xhtml", {"data": data}) + + def press(request): - return render(request, 'static/press.xhtml') + return render(request, "static/press.xhtml") + def privacy(request): - return render(request, 'static/privacy.xhtml') + return render(request, "static/privacy.xhtml") + def tos(request): - return render(request, 'static/tos.xhtml') + return render(request, "static/tos.xhtml") + def webmanifest(request): - filename = settings.MEDIA_ROOT + '/extensions/edge/manifest.json' + filename = settings.MEDIA_ROOT + "/extensions/edge/manifest.json" manifest = open(filename).read() - - return HttpResponse(manifest, content_type='application/manifest+json') + + return HttpResponse(manifest, content_type="application/manifest+json") + def apple_app_site_assoc(request): - return render(request, 'static/apple_app_site_assoc.xhtml') - + return render(request, "static/apple_app_site_assoc.xhtml") + + def apple_developer_merchantid(request): - return render(request, 'static/apple_developer_merchantid.xhtml') + return render(request, "static/apple_developer_merchantid.xhtml") + def feedback(request): - return render(request, 'static/feedback.xhtml') + return render(request, "static/feedback.xhtml") + def firefox(request): - filename = settings.MEDIA_ROOT + '/extensions/firefox/manifest.json' + filename = settings.MEDIA_ROOT + "/extensions/firefox/manifest.json" manifest = open(filename).read() - - return HttpResponse(manifest, content_type='application/x-web-app-manifest+json') + + return HttpResponse(manifest, content_type="application/x-web-app-manifest+json") + def ios(request): - return render(request, 'static/ios.xhtml') - + return render(request, "static/ios.xhtml") + + def android(request): - return render(request, 'static/android.xhtml') - + return render(request, "static/android.xhtml") + + def ios_download(request): - return render(request, 'static/ios_download.xhtml') - + return render(request, "static/ios_download.xhtml") + + def ios_plist(request): - filename = os.path.join(settings.NEWSBLUR_DIR, 'clients/ios/NewsBlur.plist') + filename = os.path.join(settings.NEWSBLUR_DIR, "clients/ios/NewsBlur.plist") manifest = open(filename).read() - + logging.user(request, "~SK~FR~BBDownloading NewsBlur.plist...") - return HttpResponse(manifest, content_type='text/xml') - + return HttpResponse(manifest, content_type="text/xml") + + def ios_ipa(request): - filename = os.path.join(settings.NEWSBLUR_DIR, 'clients/ios/NewsBlur.ipa') + filename = os.path.join(settings.NEWSBLUR_DIR, "clients/ios/NewsBlur.ipa") manifest = open(filename).read() - + logging.user(request, "~SK~FR~BBDownloading NewsBlur.ipa...") - return HttpResponse(manifest, content_type='application/octet-stream') + return HttpResponse(manifest, content_type="application/octet-stream") + def haproxy_check(request): return HttpResponse("OK") + def postgres_check(request): - feed = Feed.objects.latest('pk').pk + feed = Feed.objects.latest("pk").pk if feed: return HttpResponse(unicode(feed)) assert False, "Cannot read from postgres database" + def mongo_check(request): stories = MStory.objects.count() if stories: return HttpResponse(unicode(stories)) assert False, "Cannot read from mongo database" + def elasticsearch_check(request): client = SearchFeed.ES() if client.indices.exists_index(SearchFeed.index_name()): return HttpResponse(SearchFeed.index_name()) assert False, "Cannot read from elasticsearch database" + def redis_check(request): - pool = request.GET['pool'] - if pool == 'main': + pool = request.GET["pool"] + if pool == "main": r = redis.Redis(connection_pool=settings.REDIS_POOL) - elif pool == 'story': + elif pool == "story": r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - elif pool == 'sessions': + elif pool == "sessions": r = redis.Redis(connection_pool=settings.REDIS_SESSION_POOL) - + key = r.randomkey() if key: return HttpResponse(unicode(key)) diff --git a/apps/statistics/management/commands/collect_feedback.py b/apps/statistics/management/commands/collect_feedback.py index 38f0e9378..64ffa8d51 100644 --- a/apps/statistics/management/commands/collect_feedback.py +++ b/apps/statistics/management/commands/collect_feedback.py @@ -1,7 +1,7 @@ from django.core.management.base import BaseCommand from apps.statistics.models import MFeedback -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - MFeedback.collect_feedback() \ No newline at end of file + MFeedback.collect_feedback() diff --git a/apps/statistics/management/commands/collect_stats.py b/apps/statistics/management/commands/collect_stats.py index eea39564e..d664faf69 100644 --- a/apps/statistics/management/commands/collect_stats.py +++ b/apps/statistics/management/commands/collect_stats.py @@ -1,8 +1,7 @@ from django.core.management.base import BaseCommand from apps.statistics.models import MStatistics -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): MStatistics.collect_statistics() - \ No newline at end of file diff --git a/apps/statistics/models.py b/apps/statistics/models.py index 9d4e5fa5e..9d3df0934 100644 --- a/apps/statistics/models.py +++ b/apps/statistics/models.py @@ -13,20 +13,21 @@ from utils import json_functions as json from utils import db_functions from utils import log as logging + class MStatistics(mongo.Document): - key = mongo.StringField(unique=True) + key = mongo.StringField(unique=True) value = mongo.DynamicField() expiration_date = mongo.DateTimeField() - + meta = { - 'collection': 'statistics', - 'allow_inheritance': False, - 'indexes': ['key'], + "collection": "statistics", + "allow_inheritance": False, + "indexes": ["key"], } - + def __str__(self): return "%s: %s" % (self.key, self.value) - + @classmethod def get(cls, key, default=None, set_default=False, expiration_sec=None): obj = cls.objects.filter(key=key).first() @@ -53,25 +54,31 @@ class MStatistics(mongo.Document): if expiration_sec: obj.expiration_date = datetime.datetime.now() + datetime.timedelta(seconds=expiration_sec) obj.save() - + @classmethod def all(cls): stats = cls.objects.all() values = dict([(stat.key, stat.value) for stat in stats]) for key, value in list(values.items()): - if key in ('avg_time_taken', 'sites_loaded', 'stories_shared'): + if key in ("avg_time_taken", "sites_loaded", "stories_shared"): values[key] = json.decode(value) - elif key in ('feeds_fetched', 'premium_users', 'standard_users', 'latest_sites_loaded', - 'max_sites_loaded', 'max_stories_shared'): + elif key in ( + "feeds_fetched", + "premium_users", + "standard_users", + "latest_sites_loaded", + "max_sites_loaded", + "max_stories_shared", + ): values[key] = int(value) - elif key in ('latest_avg_time_taken', 'max_avg_time_taken', 'last_1_min_time_taken'): + elif key in ("latest_avg_time_taken", "max_avg_time_taken", "last_1_min_time_taken"): values[key] = float(value) - - values['total_sites_loaded'] = sum(values['sites_loaded']) if 'sites_loaded' in values else 0 - values['total_stories_shared'] = sum(values['stories_shared']) if 'stories_shared' in values else 0 + + values["total_sites_loaded"] = sum(values["sites_loaded"]) if "sites_loaded" in values else 0 + values["total_stories_shared"] = sum(values["stories_shared"]) if "stories_shared" in values else 0 return values - + @classmethod def collect_statistics(cls): now = datetime.datetime.now() @@ -93,34 +100,38 @@ class MStatistics(mongo.Document): cls.collect_statistics_feeds_fetched() # if settings.DEBUG: # print("Feeds Fetched: %s" % (datetime.datetime.now() - now)) - + @classmethod def collect_statistics_feeds_fetched(cls): - feeds_fetched = RStats.count('feed_fetch', hours=24) - cls.objects(key='feeds_fetched').update_one(upsert=True, - set__key='feeds_fetched', - set__value=feeds_fetched) - + feeds_fetched = RStats.count("feed_fetch", hours=24) + cls.objects(key="feeds_fetched").update_one( + upsert=True, set__key="feeds_fetched", set__value=feeds_fetched + ) + return feeds_fetched - + @classmethod def collect_statistics_premium_users(cls): last_day = datetime.datetime.now() - datetime.timedelta(hours=24) - + premium_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=True).count() - cls.objects(key='premium_users').update_one(upsert=True, set__key='premium_users', set__value=premium_users) - + cls.objects(key="premium_users").update_one( + upsert=True, set__key="premium_users", set__value=premium_users + ) + return premium_users - + @classmethod def collect_statistics_standard_users(cls): last_day = datetime.datetime.now() - datetime.timedelta(hours=24) - + standard_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=False).count() - cls.objects(key='standard_users').update_one(upsert=True, set__key='standard_users', set__value=standard_users) - + cls.objects(key="standard_users").update_one( + upsert=True, set__key="standard_users", set__value=standard_users + ) + return standard_users - + @classmethod def collect_statistics_sites_loaded(cls): now = round_time(datetime.datetime.now(), round_to=60) @@ -130,23 +141,23 @@ class MStatistics(mongo.Document): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) for hours_ago in range(24): - start_hours_ago = now - datetime.timedelta(hours=hours_ago+1) - + start_hours_ago = now - datetime.timedelta(hours=hours_ago + 1) + pipe = r.pipeline() for m in range(60): minute = start_hours_ago + datetime.timedelta(minutes=m) - key = "%s:%s" % (RStats.stats_type('page_load'), minute.strftime('%s')) + key = "%s:%s" % (RStats.stats_type("page_load"), minute.strftime("%s")) pipe.get("%s:s" % key) pipe.get("%s:a" % key) - + times = pipe.execute() - + counts = [int(c) for c in times[::2] if c] avgs = [float(a) for a in times[1::2] if a] - + if hours_ago == 0: last_1_min_time_taken = round(sum(avgs[:1]) / max(1, sum(counts[:1])), 2) - + if counts and avgs: count = max(1, sum(counts)) avg = round(sum(avgs) / count, 3) @@ -161,81 +172,81 @@ class MStatistics(mongo.Document): avg_time_taken.reverse() values = ( - ('sites_loaded', json.encode(sites_loaded)), - ('avg_time_taken', json.encode(avg_time_taken)), - ('latest_sites_loaded', sites_loaded[-1]), - ('latest_avg_time_taken', avg_time_taken[-1]), - ('max_sites_loaded', max(sites_loaded)), - ('max_avg_time_taken', max(1, max(avg_time_taken))), - ('last_1_min_time_taken', last_1_min_time_taken), + ("sites_loaded", json.encode(sites_loaded)), + ("avg_time_taken", json.encode(avg_time_taken)), + ("latest_sites_loaded", sites_loaded[-1]), + ("latest_avg_time_taken", avg_time_taken[-1]), + ("max_sites_loaded", max(sites_loaded)), + ("max_avg_time_taken", max(1, max(avg_time_taken))), + ("last_1_min_time_taken", last_1_min_time_taken), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) - + @classmethod def collect_statistics_stories_shared(cls): now = datetime.datetime.now() stories_shared = [] - + for hour in range(24): start_hours_ago = now - datetime.timedelta(hours=hour) - end_hours_ago = now - datetime.timedelta(hours=hour+1) + end_hours_ago = now - datetime.timedelta(hours=hour + 1) shares = MSharedStory.objects.filter( - shared_date__lte=start_hours_ago, - shared_date__gte=end_hours_ago + shared_date__lte=start_hours_ago, shared_date__gte=end_hours_ago ).count() stories_shared.append(shares) stories_shared.reverse() - + values = ( - ('stories_shared', json.encode(stories_shared)), - ('latest_stories_shared', stories_shared[-1]), - ('max_stories_shared', max(stories_shared)), + ("stories_shared", json.encode(stories_shared)), + ("latest_stories_shared", stories_shared[-1]), + ("max_stories_shared", max(stories_shared)), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) - + @classmethod def collect_statistics_for_db(cls, debug=False): lag = db_functions.mongo_max_replication_lag(settings.MONGODB) - cls.set('mongodb_replication_lag', lag) - + cls.set("mongodb_replication_lag", lag) + now = round_time(datetime.datetime.now(), round_to=60) r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) db_times = {} latest_db_times = {} - for db in ['sql', - 'mongo', - 'redis', - 'redis_user', - 'redis_story', - 'redis_session', - 'redis_pubsub', - 'task_sql', - 'task_mongo', - 'task_redis', - 'task_redis_user', - 'task_redis_story', - 'task_redis_session', - 'task_redis_pubsub', - ]: + for db in [ + "sql", + "mongo", + "redis", + "redis_user", + "redis_story", + "redis_session", + "redis_pubsub", + "task_sql", + "task_mongo", + "task_redis", + "task_redis_user", + "task_redis_story", + "task_redis_session", + "task_redis_pubsub", + ]: db_times[db] = [] for hour in range(24): - start_hours_ago = now - datetime.timedelta(hours=hour+1) + start_hours_ago = now - datetime.timedelta(hours=hour + 1) pipe = r.pipeline() for m in range(60): minute = start_hours_ago + datetime.timedelta(minutes=m) - key = "DB:%s:%s" % (db, minute.strftime('%s')) + key = "DB:%s:%s" % (db, minute.strftime("%s")) if debug: print(" -> %s:c" % key) pipe.get("%s:c" % key) pipe.get("%s:t" % key) - + times = pipe.execute() - + counts = [int(c or 0) for c in times[::2]] avgs = [float(a or 0) for a in times[1::2]] if counts and avgs: @@ -244,7 +255,7 @@ class MStatistics(mongo.Document): else: count = 0 avg = 0 - + if hour == 0: latest_count = float(counts[-1]) if len(counts) else 0 latest_avg = float(avgs[-1]) if len(avgs) else 0 @@ -254,85 +265,91 @@ class MStatistics(mongo.Document): db_times[db].reverse() values = ( - ('avg_sql_times', json.encode(db_times['sql'])), - ('avg_mongo_times', json.encode(db_times['mongo'])), - ('avg_redis_times', json.encode(db_times['redis'])), - ('latest_sql_avg', latest_db_times['sql']), - ('latest_mongo_avg', latest_db_times['mongo']), - ('latest_redis_user_avg', latest_db_times['redis_user']), - ('latest_redis_story_avg', latest_db_times['redis_story']), - ('latest_redis_session_avg',latest_db_times['redis_session']), - ('latest_redis_pubsub_avg', latest_db_times['redis_pubsub']), - ('latest_task_sql_avg', latest_db_times['task_sql']), - ('latest_task_mongo_avg', latest_db_times['task_mongo']), - ('latest_task_redis_user_avg', latest_db_times['task_redis_user']), - ('latest_task_redis_story_avg', latest_db_times['task_redis_story']), - ('latest_task_redis_session_avg',latest_db_times['task_redis_session']), - ('latest_task_redis_pubsub_avg', latest_db_times['task_redis_pubsub']), + ("avg_sql_times", json.encode(db_times["sql"])), + ("avg_mongo_times", json.encode(db_times["mongo"])), + ("avg_redis_times", json.encode(db_times["redis"])), + ("latest_sql_avg", latest_db_times["sql"]), + ("latest_mongo_avg", latest_db_times["mongo"]), + ("latest_redis_user_avg", latest_db_times["redis_user"]), + ("latest_redis_story_avg", latest_db_times["redis_story"]), + ("latest_redis_session_avg", latest_db_times["redis_session"]), + ("latest_redis_pubsub_avg", latest_db_times["redis_pubsub"]), + ("latest_task_sql_avg", latest_db_times["task_sql"]), + ("latest_task_mongo_avg", latest_db_times["task_mongo"]), + ("latest_task_redis_user_avg", latest_db_times["task_redis_user"]), + ("latest_task_redis_story_avg", latest_db_times["task_redis_story"]), + ("latest_task_redis_session_avg", latest_db_times["task_redis_session"]), + ("latest_task_redis_pubsub_avg", latest_db_times["task_redis_pubsub"]), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) class MFeedback(mongo.Document): - date = mongo.DateTimeField() + date = mongo.DateTimeField() date_short = mongo.StringField() subject = mongo.StringField() - url = mongo.StringField() - style = mongo.StringField() - order = mongo.IntField() - + url = mongo.StringField() + style = mongo.StringField() + order = mongo.IntField() + meta = { - 'collection': 'feedback', - 'allow_inheritance': False, - 'indexes': ['style'], - 'ordering': ['order'], + "collection": "feedback", + "allow_inheritance": False, + "indexes": ["style"], + "ordering": ["order"], } - + CATEGORIES = { - 5: 'idea', - 6: 'problem', - 7: 'praise', - 8: 'question', - 9: 'admin', - 10: 'updates', + 5: "idea", + 6: "problem", + 7: "praise", + 8: "question", + 9: "admin", + 10: "updates", } - + def __str__(self): return "%s: (%s) %s" % (self.style, self.date, self.subject) - + @classmethod def collect_feedback(cls): seen_posts = set() try: - data = requests.get('https://forum.newsblur.com/posts.json', timeout=3).content + data = requests.get("https://forum.newsblur.com/posts.json", timeout=3).content except (urllib.error.HTTPError, requests.exceptions.ConnectTimeout) as e: logging.debug(" ***> Failed to collect feedback: %s" % e) return - data = json.decode(data).get('latest_posts', "") + data = json.decode(data).get("latest_posts", "") if not len(data): print("No data!") return - + cls.objects.delete() post_count = 0 for post in data: - if post['topic_id'] in seen_posts: continue - seen_posts.add(post['topic_id']) + if post["topic_id"] in seen_posts: + continue + seen_posts.add(post["topic_id"]) feedback = {} - feedback['order'] = post_count + feedback["order"] = post_count post_count += 1 - feedback['date'] = dateutil.parser.parse(post['created_at']).replace(tzinfo=None) - feedback['date_short'] = relative_date(feedback['date']) - feedback['subject'] = post['topic_title'] - feedback['url'] = "https://forum.newsblur.com/t/%s/%s/%s" % (post['topic_slug'], post['topic_id'], post['post_number']) - feedback['style'] = cls.CATEGORIES[post['category_id']] + feedback["date"] = dateutil.parser.parse(post["created_at"]).replace(tzinfo=None) + feedback["date_short"] = relative_date(feedback["date"]) + feedback["subject"] = post["topic_title"] + feedback["url"] = "https://forum.newsblur.com/t/%s/%s/%s" % ( + post["topic_slug"], + post["topic_id"], + post["post_number"], + ) + feedback["style"] = cls.CATEGORIES[post["category_id"]] cls.objects.create(**feedback) # if settings.DEBUG: # print("%s: %s (%s)" % (feedback['style'], feedback['subject'], feedback['date_short'])) - if post_count >= 4: break - + if post_count >= 4: + break + @classmethod def all(cls): feedbacks = cls.objects.all()[:4] @@ -350,28 +367,31 @@ class MAnalyticsFetcher(mongo.Document): total = mongo.FloatField() server = mongo.StringField() feed_code = mongo.IntField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'feed_fetches', - 'allow_inheritance': False, - 'indexes': ['date', 'feed_id', 'server', 'feed_code'], - 'ordering': ['date'], + "db_alias": "nbanalytics", + "collection": "feed_fetches", + "allow_inheritance": False, + "indexes": ["date", "feed_id", "server", "feed_code"], + "ordering": ["date"], } - + def __str__(self): - return "%s: %.4s+%.4s+%.4s+%.4s = %.4ss" % (self.feed_id, self.feed_fetch, - self.feed_process, - self.page, - self.icon, - self.total) - + return "%s: %.4s+%.4s+%.4s+%.4s = %.4ss" % ( + self.feed_id, + self.feed_fetch, + self.feed_process, + self.page, + self.icon, + self.total, + ) + @classmethod - def add(cls, feed_id, feed_fetch, feed_process, - page, icon, total, feed_code): + def add(cls, feed_id, feed_fetch, feed_process, page, icon, total, feed_code): server_name = settings.SERVER_NAME - if 'app' in server_name: return - + if "app" in server_name: + return + if icon and page: icon -= page if page and feed_process: @@ -380,12 +400,18 @@ class MAnalyticsFetcher(mongo.Document): page -= feed_fetch if feed_process and feed_fetch: feed_process -= feed_fetch - - cls.objects.create(feed_id=feed_id, feed_fetch=feed_fetch, - feed_process=feed_process, - page=page, icon=icon, total=total, - server=server_name, feed_code=feed_code) - + + cls.objects.create( + feed_id=feed_id, + feed_fetch=feed_fetch, + feed_process=feed_process, + page=page, + icon=icon, + total=total, + server=server_name, + feed_code=feed_code, + ) + @classmethod def calculate_stats(cls, stats): return cls.aggregate(**stats) @@ -395,24 +421,24 @@ class MAnalyticsLoader(mongo.Document): date = mongo.DateTimeField(default=datetime.datetime.now) page_load = mongo.FloatField() server = mongo.StringField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'page_loads', - 'allow_inheritance': False, - 'indexes': ['date', 'server'], - 'ordering': ['date'], + "db_alias": "nbanalytics", + "collection": "page_loads", + "allow_inheritance": False, + "indexes": ["date", "server"], + "ordering": ["date"], } - + def __str__(self): return "%s: %.4ss" % (self.server, self.page_load) - + @classmethod def add(cls, page_load): server_name = settings.SERVER_NAME cls.objects.create(page_load=page_load, server=server_name) - + @classmethod def calculate_stats(cls, stats): return cls.aggregate(**stats) diff --git a/apps/statistics/rstats.py b/apps/statistics/rstats.py index e25a61f16..3b8785d75 100644 --- a/apps/statistics/rstats.py +++ b/apps/statistics/rstats.py @@ -6,87 +6,86 @@ from django.conf import settings class RStats: - STATS_TYPE = { - 'page_load': 'PLT', - 'feed_fetch': 'FFH', + "page_load": "PLT", + "feed_fetch": "FFH", } - + @classmethod def stats_type(cls, name): return cls.STATS_TYPE[name] - + @classmethod def add(cls, name, duration=None): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) pipe = r.pipeline() minute = round_time(round_to=60) - key = "%s:%s" % (cls.stats_type(name), minute.strftime('%s')) + key = "%s:%s" % (cls.stats_type(name), minute.strftime("%s")) pipe.incr("%s:s" % key) if duration: pipe.incrbyfloat("%s:a" % key, duration) pipe.expireat("%s:a" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) pipe.expireat("%s:s" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) pipe.execute() - + @classmethod def clean_path(cls, path): if not path: return - - if path.startswith('/reader/feed/'): - path = '/reader/feed/' - elif path.startswith('/social/stories'): - path = '/social/stories/' - elif path.startswith('/reader/river_stories'): - path = '/reader/river_stories/' - elif path.startswith('/social/river_stories'): - path = '/social/river_stories/' - elif path.startswith('/reader/page/'): - path = '/reader/page/' - elif path.startswith('/api/check_share_on_site'): - path = '/api/check_share_on_site/' - + + if path.startswith("/reader/feed/"): + path = "/reader/feed/" + elif path.startswith("/social/stories"): + path = "/social/stories/" + elif path.startswith("/reader/river_stories"): + path = "/reader/river_stories/" + elif path.startswith("/social/river_stories"): + path = "/social/river_stories/" + elif path.startswith("/reader/page/"): + path = "/reader/page/" + elif path.startswith("/api/check_share_on_site"): + path = "/api/check_share_on_site/" + return path - + @classmethod def count(cls, name, hours=24): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) stats_type = cls.stats_type(name) now = datetime.datetime.now() pipe = r.pipeline() - for minutes_ago in range(60*hours): + for minutes_ago in range(60 * hours): dt_min_ago = now - datetime.timedelta(minutes=minutes_ago) minute = round_time(dt=dt_min_ago, round_to=60) - key = "%s:%s" % (stats_type, minute.strftime('%s')) + key = "%s:%s" % (stats_type, minute.strftime("%s")) pipe.get("%s:s" % key) values = pipe.execute() total = sum(int(v) for v in values if v) return total - + @classmethod def sample(cls, sample=1000, pool=None): if not pool: pool = settings.REDIS_STORY_HASH_POOL - r = redis.Redis(connection_pool=pool) - keys = set() - errors = set() - prefixes = defaultdict(set) - sizes = defaultdict(int) + r = redis.Redis(connection_pool=pool) + keys = set() + errors = set() + prefixes = defaultdict(set) + sizes = defaultdict(int) prefixes_ttls = defaultdict(lambda: defaultdict(int)) - prefix_re = re.compile(r"(\w+):(.*)") + prefix_re = re.compile(r"(\w+):(.*)") - p = r.pipeline() + p = r.pipeline() [p.randomkey() for _ in range(sample)] - keys = set(p.execute()) + keys = set(p.execute()) - p = r.pipeline() + p = r.pipeline() [p.ttl(key) for key in keys] - ttls = p.execute() + ttls = p.execute() + + dump = [r.execute_command("dump", key) for key in keys] - dump = [r.execute_command('dump', key) for key in keys] - for k, key in enumerate(keys): match = prefix_re.match(key) if not match or dump[k] is None: @@ -96,39 +95,49 @@ class RStats: prefixes[prefix].add(rest) sizes[prefix] += len(dump[k]) ttl = ttls[k] - if ttl < 0: # Never expire - prefixes_ttls[prefix]['-'] += 1 + if ttl < 0: # Never expire + prefixes_ttls[prefix]["-"] += 1 elif ttl == 0: - prefixes_ttls[prefix]['X'] += 1 - elif ttl < 60*60: # 1 hour - prefixes_ttls[prefix]['1h'] += 1 - elif ttl < 60*60*24: - prefixes_ttls[prefix]['1d'] += 1 - elif ttl < 60*60*24*7: - prefixes_ttls[prefix]['1w'] += 1 - elif ttl < 60*60*24*14: - prefixes_ttls[prefix]['2w'] += 1 - elif ttl < 60*60*24*30: - prefixes_ttls[prefix]['4w'] += 1 + prefixes_ttls[prefix]["X"] += 1 + elif ttl < 60 * 60: # 1 hour + prefixes_ttls[prefix]["1h"] += 1 + elif ttl < 60 * 60 * 24: + prefixes_ttls[prefix]["1d"] += 1 + elif ttl < 60 * 60 * 24 * 7: + prefixes_ttls[prefix]["1w"] += 1 + elif ttl < 60 * 60 * 24 * 14: + prefixes_ttls[prefix]["2w"] += 1 + elif ttl < 60 * 60 * 24 * 30: + prefixes_ttls[prefix]["4w"] += 1 else: - prefixes_ttls[prefix]['4w+'] += 1 - + prefixes_ttls[prefix]["4w+"] += 1 + keys_count = len(keys) total_size = float(sum([k for k in sizes.values()])) print(" ---> %s total keys" % keys_count) for prefix, rest in prefixes.items(): total_expiring = sum([k for p, k in dict(prefixes_ttls[prefix]).items() if p != "-"]) - print(" ---> %s: (%s keys - %s space) %s keys (%s expiring: %s)" % (str(prefix, 100. * (len(rest) / float(keys_count)))[:4], str(100 * (sizes[prefix] / total_size))[:4], str(len(rest))[:4], total_expiring, dict(prefixes_ttls[prefix]))) + print( + " ---> %s: (%s keys - %s space) %s keys (%s expiring: %s)" + % ( + str(prefix, 100.0 * (len(rest) / float(keys_count)))[:4], + str(100 * (sizes[prefix] / total_size))[:4], + str(len(rest))[:4], + total_expiring, + dict(prefixes_ttls[prefix]), + ) + ) print(" ---> %s errors: %s" % (len(errors), errors)) + def round_time(dt=None, round_to=60): - """Round a datetime object to any time laps in seconds - dt : datetime.datetime object, default now. - round_to : Closest number of seconds to round to, default 1 minute. - Author: Thierry Husson 2012 - Use it as you want but don't blame me. - """ - if dt == None : dt = datetime.datetime.now() - seconds = (dt - dt.min).seconds - rounding = (seconds+round_to/2) // round_to * round_to - return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond) - + """Round a datetime object to any time laps in seconds + dt : datetime.datetime object, default now. + round_to : Closest number of seconds to round to, default 1 minute. + Author: Thierry Husson 2012 - Use it as you want but don't blame me. + """ + if dt == None: + dt = datetime.datetime.now() + seconds = (dt - dt.min).seconds + rounding = (seconds + round_to / 2) // round_to * round_to + return dt + datetime.timedelta(0, rounding - seconds, -dt.microsecond) diff --git a/apps/statistics/tasks.py b/apps/statistics/tasks.py index b05a5108b..79eb3ada9 100644 --- a/apps/statistics/tasks.py +++ b/apps/statistics/tasks.py @@ -4,14 +4,13 @@ from apps.statistics.models import MFeedback from utils import log as logging - -@app.task(name='collect-stats') +@app.task(name="collect-stats") def CollectStats(): logging.debug(" ---> ~FBCollecting stats...") MStatistics.collect_statistics() - - -@app.task(name='collect-feedback') + + +@app.task(name="collect-feedback") def CollectFeedback(): logging.debug(" ---> ~FBCollecting feedback...") MFeedback.collect_feedback() diff --git a/apps/statistics/templatetags/statistics_tags.py b/apps/statistics/templatetags/statistics_tags.py index 70015a842..9961ef9c1 100644 --- a/apps/statistics/templatetags/statistics_tags.py +++ b/apps/statistics/templatetags/statistics_tags.py @@ -3,19 +3,22 @@ from apps.statistics.models import MFeedback register = template.Library() -@register.inclusion_tag('statistics/render_statistics_graphs.xhtml') + +@register.inclusion_tag("statistics/render_statistics_graphs.xhtml") def render_statistics_graphs(statistics): return { - 'statistics': statistics, + "statistics": statistics, } - + + @register.filter def format_graph(n, max_value, height=30): if n == 0 or max_value == 0: return 1 - return max(1, height * (n/float(max_value))) - -@register.inclusion_tag('statistics/render_feedback_table.xhtml') + return max(1, height * (n / float(max_value))) + + +@register.inclusion_tag("statistics/render_feedback_table.xhtml") def render_feedback_table(): feedbacks = MFeedback.all() - return dict(feedbacks=feedbacks) \ No newline at end of file + return dict(feedbacks=feedbacks) diff --git a/apps/statistics/tests.py b/apps/statistics/tests.py index c7c4668e1..f51d798ff 100644 --- a/apps/statistics/tests.py +++ b/apps/statistics/tests.py @@ -7,6 +7,7 @@ Replace these with more appropriate tests for your application. from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ class SimpleTest(TestCase): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/statistics/urls.py b/apps/statistics/urls.py index ee2ede961..d0dc66881 100644 --- a/apps/statistics/urls.py +++ b/apps/statistics/urls.py @@ -2,8 +2,8 @@ from django.conf.urls import * from apps.statistics import views urlpatterns = [ - url(r'^dashboard_graphs', views.dashboard_graphs, name='statistics-graphs'), - url(r'^feedback_table', views.feedback_table, name='feedback-table'), - url(r'^revenue', views.revenue, name='revenue'), - url(r'^slow', views.slow, name='slow'), + url(r"^dashboard_graphs", views.dashboard_graphs, name="statistics-graphs"), + url(r"^feedback_table", views.feedback_table, name="feedback-table"), + url(r"^revenue", views.revenue, name="revenue"), + url(r"^slow", views.slow, name="slow"), ] diff --git a/apps/statistics/views.py b/apps/statistics/views.py index 8d769c07a..0ce091b6b 100644 --- a/apps/statistics/views.py +++ b/apps/statistics/views.py @@ -17,48 +17,47 @@ from apps.statistics.rstats import round_time from apps.profile.models import PaymentHistory from utils import log as logging + def dashboard_graphs(request): statistics = MStatistics.all() - return render( - request, - 'statistics/render_statistics_graphs.xhtml', - {'statistics': statistics} - ) + return render(request, "statistics/render_statistics_graphs.xhtml", {"statistics": statistics}) + def feedback_table(request): feedbacks = MFeedback.all() - return render( - request, - 'statistics/render_feedback_table.xhtml', - {'feedbacks': feedbacks} - ) + return render(request, "statistics/render_feedback_table.xhtml", {"feedbacks": feedbacks}) + def revenue(request): data = {} - data['title'] = "NewsBlur Revenue" - data['link'] = "https://www.newsblur.com" - data['description'] = "Revenue" - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Revenue Writer' - data['docs'] = None + data["title"] = "NewsBlur Revenue" + data["link"] = "https://www.newsblur.com" + data["description"] = "Revenue" + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Revenue Writer" + data["docs"] = None rss = feedgenerator.Atom1Feed(**data) - + report = PaymentHistory.report() - content = "%s revenue: $%s
%s" % (datetime.datetime.now().strftime('%Y'), report['annual'], report['output'].replace('\n', '
')) - + content = "%s revenue: $%s
%s" % ( + datetime.datetime.now().strftime("%Y"), + report["annual"], + report["output"].replace("\n", "
"), + ) + story = { - 'title': "Daily snapshot: %s" % (datetime.datetime.now().strftime('%a %b %-d, %Y')), - 'link': 'https://www.newsblur.com', - 'description': content, - 'unique_id': datetime.datetime.now().strftime('%a %b %-d, %Y'), - 'pubdate': datetime.datetime.now(), + "title": "Daily snapshot: %s" % (datetime.datetime.now().strftime("%a %b %-d, %Y")), + "link": "https://www.newsblur.com", + "description": content, + "unique_id": datetime.datetime.now().strftime("%a %b %-d, %Y"), + "pubdate": datetime.datetime.now(), } rss.add_item(**story) - - logging.user(request, "~FBGenerating Revenue RSS feed: ~FM%s" % ( - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, "~FBGenerating Revenue RSS feed: ~FM%s" % (request.META.get("HTTP_USER_AGENT", "")[:24]) + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") @login_required @@ -74,8 +73,8 @@ def slow(request): user_id_counts = {} path_counts = {} users = {} - - for minutes_ago in range(60*6): + + for minutes_ago in range(60 * 6): dt_ago = now - datetime.timedelta(minutes=minutes_ago) minute = round_time(dt_ago, round_to=60) dt_ago_str = minute.strftime("%a %b %-d, %Y %H:%M") @@ -83,7 +82,7 @@ def slow(request): minute_queries = r.lrange(name, 0, -1) for query_raw in minute_queries: query = pickle.loads(base64.b64decode(query_raw)) - user_id = query['user_id'] + user_id = query["user_id"] if dt_ago_str not in all_queries: all_queries[dt_ago_str] = [] if user_id in users: @@ -97,22 +96,26 @@ def slow(request): else: user = AnonymousUser() users[user_id] = user - query['user'] = user - query['datetime'] = minute + query["user"] = user + query["datetime"] = minute all_queries[dt_ago_str].append(query) if user_id not in user_id_counts: user_id_counts[user_id] = 0 user_id_counts[user_id] += 1 - if query['path'] not in path_counts: - path_counts[query['path']] = 0 - path_counts[query['path']] += 1 + if query["path"] not in path_counts: + path_counts[query["path"]] = 0 + path_counts[query["path"]] += 1 user_counts = [] for user_id, count in user_id_counts.items(): - user_counts.append({'user': users[user_id], 'count': count}) - - return render(request, 'statistics/slow.xhtml', { - 'all_queries': all_queries, - 'user_counts': user_counts, - 'path_counts': path_counts, - }) + user_counts.append({"user": users[user_id], "count": count}) + + return render( + request, + "statistics/slow.xhtml", + { + "all_queries": all_queries, + "user_counts": user_counts, + "path_counts": path_counts, + }, + ) diff --git a/archive/ansible/do_inventory.py b/archive/ansible/do_inventory.py index 3cfa63e33..1766fb19f 100755 --- a/archive/ansible/do_inventory.py +++ b/archive/ansible/do_inventory.py @@ -121,7 +121,8 @@ optional arguments: # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type ###################################################################### @@ -145,21 +146,23 @@ import json class DoManager: def __init__(self, api_token): self.api_token = api_token - self.api_endpoint = 'https://api.digitalocean.com/v2' - self.headers = {'Authorization': 'Bearer {0}'.format(self.api_token), - 'Content-type': 'application/json'} + self.api_endpoint = "https://api.digitalocean.com/v2" + self.headers = { + "Authorization": "Bearer {0}".format(self.api_token), + "Content-type": "application/json", + } self.timeout = 60 def _url_builder(self, path): - if path[0] == '/': + if path[0] == "/": path = path[1:] - return '%s/%s' % (self.api_endpoint, path) + return "%s/%s" % (self.api_endpoint, path) - def send(self, url, method='GET', data=None): + def send(self, url, method="GET", data=None): url = self._url_builder(url) data = json.dumps(data) try: - if method == 'GET': + if method == "GET": resp_data = {} incomplete = True while incomplete: @@ -173,7 +176,7 @@ class DoManager: resp_data[key] = value try: - url = json_resp['links']['pages']['next'] + url = json_resp["links"]["pages"]["next"] except KeyError: incomplete = False @@ -182,54 +185,53 @@ class DoManager: return resp_data def all_active_droplets(self): - resp = self.send('droplets/') - return resp['droplets'] + resp = self.send("droplets/") + return resp["droplets"] def all_regions(self): - resp = self.send('regions/') - return resp['regions'] + resp = self.send("regions/") + return resp["regions"] - def all_images(self, filter_name='global'): - params = {'filter': filter_name} - resp = self.send('images/', data=params) - return resp['images'] + def all_images(self, filter_name="global"): + params = {"filter": filter_name} + resp = self.send("images/", data=params) + return resp["images"] def sizes(self): - resp = self.send('sizes/') - return resp['sizes'] + resp = self.send("sizes/") + return resp["sizes"] def all_ssh_keys(self): - resp = self.send('account/keys') - return resp['ssh_keys'] + resp = self.send("account/keys") + return resp["ssh_keys"] def all_domains(self): - resp = self.send('domains/') - return resp['domains'] + resp = self.send("domains/") + return resp["domains"] def show_droplet(self, droplet_id): - resp = self.send('droplets/%s' % droplet_id) - return resp['droplet'] + resp = self.send("droplets/%s" % droplet_id) + return resp["droplet"] def all_tags(self): - resp = self.send('tags') - return resp['tags'] + resp = self.send("tags") + return resp["tags"] class DigitalOceanInventory(object): - ########################################################################### # Main execution path ########################################################################### def __init__(self): - """Main execution path """ + """Main execution path""" # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults - self.cache_path = '.' + self.cache_path = "." self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} @@ -240,9 +242,11 @@ class DigitalOceanInventory(object): self.read_cli_args() # Verify credentials were set - if not hasattr(self, 'api_token'): - msg = 'Could not find values for DigitalOcean api_token. They must be specified via either ini file, ' \ - 'command line argument (--api-token), or environment variables (DO_API_TOKEN)\n' + if not hasattr(self, "api_token"): + msg = ( + "Could not find values for DigitalOcean api_token. They must be specified via either ini file, " + "command line argument (--api-token), or environment variables (DO_API_TOKEN)\n" + ) sys.stderr.write(msg) sys.exit(-1) @@ -259,40 +263,40 @@ class DigitalOceanInventory(object): self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: - sys.stderr.write('Cache is empty and --force-cache was specified\n') + sys.stderr.write("Cache is empty and --force-cache was specified\n") sys.exit(-1) self.manager = DoManager(self.api_token) # Pick the json_data to print based on the CLI command if self.args.droplets: - self.load_from_digital_ocean('droplets') - json_data = {'droplets': self.data['droplets']} + self.load_from_digital_ocean("droplets") + json_data = {"droplets": self.data["droplets"]} elif self.args.regions: - self.load_from_digital_ocean('regions') - json_data = {'regions': self.data['regions']} + self.load_from_digital_ocean("regions") + json_data = {"regions": self.data["regions"]} elif self.args.images: - self.load_from_digital_ocean('images') - json_data = {'images': self.data['images']} + self.load_from_digital_ocean("images") + json_data = {"images": self.data["images"]} elif self.args.sizes: - self.load_from_digital_ocean('sizes') - json_data = {'sizes': self.data['sizes']} + self.load_from_digital_ocean("sizes") + json_data = {"sizes": self.data["sizes"]} elif self.args.ssh_keys: - self.load_from_digital_ocean('ssh_keys') - json_data = {'ssh_keys': self.data['ssh_keys']} + self.load_from_digital_ocean("ssh_keys") + json_data = {"ssh_keys": self.data["ssh_keys"]} elif self.args.domains: - self.load_from_digital_ocean('domains') - json_data = {'domains': self.data['domains']} + self.load_from_digital_ocean("domains") + json_data = {"domains": self.data["domains"]} elif self.args.tags: - self.load_from_digital_ocean('tags') - json_data = {'tags': self.data['tags']} + self.load_from_digital_ocean("tags") + json_data = {"tags": self.data["tags"]} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() - else: # '--list' this is last to make it default - self.load_from_digital_ocean('droplets') + else: # '--list' this is last to make it default + self.load_from_digital_ocean("droplets") self.build_inventory() json_data = self.inventory @@ -309,31 +313,31 @@ class DigitalOceanInventory(object): ########################################################################### def read_settings(self): - """ Reads the settings from the digital_ocean.ini file """ + """Reads the settings from the digital_ocean.ini file""" config = ConfigParser.ConfigParser() - config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'digital_ocean.ini') + config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "digital_ocean.ini") config.read(config_path) # Credentials - if config.has_option('digital_ocean', 'api_token'): - self.api_token = config.get('digital_ocean', 'api_token') + if config.has_option("digital_ocean", "api_token"): + self.api_token = config.get("digital_ocean", "api_token") # Cache related - if config.has_option('digital_ocean', 'cache_path'): - self.cache_path = config.get('digital_ocean', 'cache_path') - if config.has_option('digital_ocean', 'cache_max_age'): - self.cache_max_age = config.getint('digital_ocean', 'cache_max_age') + if config.has_option("digital_ocean", "cache_path"): + self.cache_path = config.get("digital_ocean", "cache_path") + if config.has_option("digital_ocean", "cache_max_age"): + self.cache_max_age = config.getint("digital_ocean", "cache_max_age") # Private IP Address - if config.has_option('digital_ocean', 'use_private_network'): - self.use_private_network = config.getboolean('digital_ocean', 'use_private_network') + if config.has_option("digital_ocean", "use_private_network"): + self.use_private_network = config.getboolean("digital_ocean", "use_private_network") # Group variables - if config.has_option('digital_ocean', 'group_variables'): - self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables')) + if config.has_option("digital_ocean", "group_variables"): + self.group_variables = ast.literal_eval(config.get("digital_ocean", "group_variables")) def read_environment(self): - """ Reads the settings from environment variables """ + """Reads the settings from environment variables""" # Setup credentials if os.getenv("DO_API_TOKEN"): self.api_token = os.getenv("DO_API_TOKEN") @@ -341,31 +345,48 @@ class DigitalOceanInventory(object): self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): - """ Command line argument processing """ - parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') + """Command line argument processing""" + parser = argparse.ArgumentParser( + description="Produce an Ansible Inventory file based on DigitalOcean credentials" + ) - parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') - parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') + parser.add_argument( + "--list", + action="store_true", + help="List all active Droplets as Ansible inventory (default: True)", + ) + parser.add_argument( + "--host", action="store", help="Get all Ansible inventory variables about a specific Droplet" + ) - parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') - parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON') - parser.add_argument('--regions', action='store_true', help='List Regions as JSON') - parser.add_argument('--images', action='store_true', help='List Images as JSON') - parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') - parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') - parser.add_argument('--domains', action='store_true', help='List Domains as JSON') - parser.add_argument('--tags', action='store_true', help='List Tags as JSON') + parser.add_argument("--all", action="store_true", help="List all DigitalOcean information as JSON") + parser.add_argument("--droplets", "-d", action="store_true", help="List Droplets as JSON") + parser.add_argument("--regions", action="store_true", help="List Regions as JSON") + parser.add_argument("--images", action="store_true", help="List Images as JSON") + parser.add_argument("--sizes", action="store_true", help="List Sizes as JSON") + parser.add_argument("--ssh-keys", action="store_true", help="List SSH keys as JSON") + parser.add_argument("--domains", action="store_true", help="List Domains as JSON") + parser.add_argument("--tags", action="store_true", help="List Tags as JSON") - parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') + parser.add_argument("--pretty", "-p", action="store_true", help="Pretty-print results") - parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') - parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') - parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') - parser.add_argument('--refresh-cache', '-r', action='store_true', default=False, - help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') + parser.add_argument("--cache-path", action="store", help="Path to the cache files (default: .)") + parser.add_argument( + "--cache-max_age", action="store", help="Maximum age of the cached items (default: 0)" + ) + parser.add_argument( + "--force-cache", action="store_true", default=False, help="Only use data from the cache" + ) + parser.add_argument( + "--refresh-cache", + "-r", + action="store_true", + default=False, + help="Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)", + ) - parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN') - parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token') + parser.add_argument("--env", "-e", action="store_true", help="Display DO_API_TOKEN") + parser.add_argument("--api-token", "-a", action="store", help="DigitalOcean API Token") self.args = parser.parse_args() @@ -373,11 +394,17 @@ class DigitalOceanInventory(object): self.api_token = self.args.api_token # Make --list default if none of the other commands are specified - if (not self.args.droplets and not self.args.regions and - not self.args.images and not self.args.sizes and - not self.args.ssh_keys and not self.args.domains and - not self.args.tags and - not self.args.all and not self.args.host): + if ( + not self.args.droplets + and not self.args.regions + and not self.args.images + and not self.args.sizes + and not self.args.ssh_keys + and not self.args.domains + and not self.args.tags + and not self.args.all + and not self.args.host + ): self.args.list = True ########################################################################### @@ -385,117 +412,112 @@ class DigitalOceanInventory(object): ########################################################################### def load_from_digital_ocean(self, resource=None): - """Get JSON from DigitalOcean API """ + """Get JSON from DigitalOcean API""" if self.args.force_cache and os.path.isfile(self.cache_filename): return # We always get fresh droplets - if self.is_cache_valid() and not (resource == 'droplets' or resource is None): + if self.is_cache_valid() and not (resource == "droplets" or resource is None): return if self.args.refresh_cache: resource = None - if resource == 'droplets' or resource is None: - self.data['droplets'] = self.manager.all_active_droplets() + if resource == "droplets" or resource is None: + self.data["droplets"] = self.manager.all_active_droplets() self.cache_refreshed = True - if resource == 'regions' or resource is None: - self.data['regions'] = self.manager.all_regions() + if resource == "regions" or resource is None: + self.data["regions"] = self.manager.all_regions() self.cache_refreshed = True - if resource == 'images' or resource is None: - self.data['images'] = self.manager.all_images() + if resource == "images" or resource is None: + self.data["images"] = self.manager.all_images() self.cache_refreshed = True - if resource == 'sizes' or resource is None: - self.data['sizes'] = self.manager.sizes() + if resource == "sizes" or resource is None: + self.data["sizes"] = self.manager.sizes() self.cache_refreshed = True - if resource == 'ssh_keys' or resource is None: - self.data['ssh_keys'] = self.manager.all_ssh_keys() + if resource == "ssh_keys" or resource is None: + self.data["ssh_keys"] = self.manager.all_ssh_keys() self.cache_refreshed = True - if resource == 'domains' or resource is None: - self.data['domains'] = self.manager.all_domains() + if resource == "domains" or resource is None: + self.data["domains"] = self.manager.all_domains() self.cache_refreshed = True - if resource == 'tags' or resource is None: - self.data['tags'] = self.manager.all_tags() + if resource == "tags" or resource is None: + self.data["tags"] = self.manager.all_tags() self.cache_refreshed = True def add_inventory_group(self, key): - """ Method to create group dict """ - host_dict = {'hosts': [], 'vars': {}} + """Method to create group dict""" + host_dict = {"hosts": [], "vars": {}} self.inventory[key] = host_dict return def add_host(self, group, host): - """ Helper method to reduce host duplication """ + """Helper method to reduce host duplication""" if group not in self.inventory: self.add_inventory_group(group) - if host not in self.inventory[group]['hosts']: - self.inventory[group]['hosts'].append(host) + if host not in self.inventory[group]["hosts"]: + self.inventory[group]["hosts"].append(host) return def build_inventory(self): - """ Build Ansible inventory of droplets """ - self.inventory = { - 'all': { - 'hosts': [], - 'vars': self.group_variables - }, - '_meta': {'hostvars': {}} - } + """Build Ansible inventory of droplets""" + self.inventory = {"all": {"hosts": [], "vars": self.group_variables}, "_meta": {"hostvars": {}}} # add all droplets by id and name - for droplet in self.data['droplets']: - for net in droplet['networks']['v4']: - if net['type'] == 'public': - dest = net['ip_address'] + for droplet in self.data["droplets"]: + for net in droplet["networks"]["v4"]: + if net["type"] == "public": + dest = net["ip_address"] else: continue - self.inventory['all']['hosts'].append(dest) + self.inventory["all"]["hosts"].append(dest) - self.add_host(droplet['id'], dest) + self.add_host(droplet["id"], dest) - self.add_host(droplet['name'], dest) + self.add_host(droplet["name"], dest) - roledef = re.split(r"([0-9]+)", droplet['name'])[0] + roledef = re.split(r"([0-9]+)", droplet["name"])[0] self.add_host(roledef, dest) # groups that are always present - for group in ('digital_ocean', - 'region_' + droplet['region']['slug'], - 'image_' + str(droplet['image']['id']), - 'size_' + droplet['size']['slug'], - 'distro_' + DigitalOceanInventory.to_safe(droplet['image']['distribution']), - 'status_' + droplet['status']): + for group in ( + "digital_ocean", + "region_" + droplet["region"]["slug"], + "image_" + str(droplet["image"]["id"]), + "size_" + droplet["size"]["slug"], + "distro_" + DigitalOceanInventory.to_safe(droplet["image"]["distribution"]), + "status_" + droplet["status"], + ): # self.add_host(group, dest) pass # groups that are not always present - for group in (droplet['image']['slug'], - droplet['image']['name']): + for group in (droplet["image"]["slug"], droplet["image"]["name"]): if group: - image = 'image_' + DigitalOceanInventory.to_safe(group) + image = "image_" + DigitalOceanInventory.to_safe(group) # self.add_host(image, dest) - if droplet['tags']: - for tag in droplet['tags']: + if droplet["tags"]: + for tag in droplet["tags"]: self.add_host(tag, dest) # hostvars info = self.do_namespace(droplet) - self.inventory['_meta']['hostvars'][dest] = info + self.inventory["_meta"]["hostvars"][dest] = info def load_droplet_variables_for_host(self): - """ Generate a JSON response to a --host call """ + """Generate a JSON response to a --host call""" host = int(self.args.host) droplet = self.manager.show_droplet(host) info = self.do_namespace(droplet) - return {'droplet': info} + return {"droplet": info} ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): - """ Determines if the cache files have expired, or if it is still valid """ + """Determines if the cache files have expired, or if it is still valid""" if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() @@ -504,23 +526,23 @@ class DigitalOceanInventory(object): return False def load_from_cache(self): - """ Reads the data from the cache file and assigns it to member variables as Python Objects """ + """Reads the data from the cache file and assigns it to member variables as Python Objects""" try: - with open(self.cache_filename, 'r') as cache: + with open(self.cache_filename, "r") as cache: json_data = cache.read() data = json.loads(json_data) except IOError: - data = {'data': {}, 'inventory': {}} + data = {"data": {}, "inventory": {}} - self.data = data['data'] - self.inventory = data['inventory'] + self.data = data["data"] + self.inventory = data["inventory"] def write_to_cache(self): - """ Writes data in JSON format to a file """ - data = {'data': self.data, 'inventory': self.inventory} + """Writes data in JSON format to a file""" + data = {"data": self.data, "inventory": self.inventory} json_data = json.dumps(data, indent=2) - with open(self.cache_filename, 'w') as cache: + with open(self.cache_filename, "w") as cache: cache.write(json_data) ########################################################################### @@ -528,15 +550,15 @@ class DigitalOceanInventory(object): ########################################################################### @staticmethod def to_safe(word): - """ Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """ + """Converts 'bad' characters in a string to underscores so they can be used as Ansible groups""" return re.sub(r"[^A-Za-z0-9\-.]", "_", word) @staticmethod def do_namespace(data): - """ Returns a copy of the dictionary with all the keys put in a 'do_' namespace """ + """Returns a copy of the dictionary with all the keys put in a 'do_' namespace""" info = {} for k, v in data.items(): - info['do_' + k] = v + info["do_" + k] = v return info diff --git a/archive/fabfile.py b/archive/fabfile.py index 1c35d76f9..e8415cbfd 100644 --- a/archive/fabfile.py +++ b/archive/fabfile.py @@ -4,6 +4,7 @@ from fabric.operations import prompt from fabric.contrib import django from fabric.contrib import files from fabric.state import connections + # from fabric.colors import red, green, blue, cyan, magenta, white, yellow from boto.s3.connection import S3Connection from boto.s3.key import Key @@ -25,7 +26,7 @@ except ImportError: print("Digital Ocean's API not loaded. Install python-digitalocean.") -django.settings_module('newsblur_web.settings') +django.settings_module("newsblur_web.settings") try: from django.conf import settings as django_settings except ImportError: @@ -37,10 +38,10 @@ except ImportError: # ============ env.NEWSBLUR_PATH = "/srv/newsblur" -env.SECRETS_PATH = "/srv/secrets-newsblur" -env.VENDOR_PATH = "/srv/code" -env.user = 'sclay' -env.key_filename = os.path.join(env.SECRETS_PATH, 'keys/newsblur.key') +env.SECRETS_PATH = "/srv/secrets-newsblur" +env.VENDOR_PATH = "/srv/code" +env.user = "sclay" +env.key_filename = os.path.join(env.SECRETS_PATH, "keys/newsblur.key") env.connection_attempts = 10 env.do_ip_to_hostname = {} env.colorize_errors = True @@ -50,7 +51,7 @@ env.colorize_errors = True # ========= try: - hosts_path = os.path.expanduser(os.path.join(env.SECRETS_PATH, 'configs/hosts.yml')) + hosts_path = os.path.expanduser(os.path.join(env.SECRETS_PATH, "configs/hosts.yml")) roles = yaml.load(open(hosts_path)) for role_name, hosts in list(roles.items()): if isinstance(hosts, dict): @@ -59,11 +60,12 @@ try: except: print(" ***> No role definitions found in %s. Using default roles." % hosts_path) env.roledefs = { - 'app' : ['app01.newsblur.com'], - 'db' : ['db01.newsblur.com'], - 'task' : ['task01.newsblur.com'], + "app": ["app01.newsblur.com"], + "db": ["db01.newsblur.com"], + "task": ["task01.newsblur.com"], } + def do_roledefs(split=False, debug=False): doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) droplets = doapi.get_all_droplets() @@ -76,7 +78,7 @@ def do_roledefs(split=False, debug=False): if roledef not in hostnames: hostnames[roledef] = [] if droplet.ip_address not in hostnames[roledef]: - hostnames[roledef].append({'name': droplet.name, 'address': droplet.ip_address}) + hostnames[roledef].append({"name": droplet.name, "address": droplet.ip_address}) env.do_ip_to_hostname[droplet.ip_address] = droplet.name if droplet.ip_address not in env.roledefs[roledef]: env.roledefs[roledef].append(droplet.ip_address) @@ -85,6 +87,7 @@ def do_roledefs(split=False, debug=False): return hostnames return droplets + def list_do(): droplets = assign_digitalocean_roledefs(split=True) pprint(droplets) @@ -94,7 +97,7 @@ def list_do(): # for server in group: # if 'address' in server: # print(server['address']) - + doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) droplets = doapi.get_all_droplets() sizes = doapi.get_all_sizes() @@ -103,31 +106,35 @@ def list_do(): total_cost = 0 for droplet in droplets: roledef = re.split(r"([0-9]+)", droplet.name)[0] - cost = droplet.size['price_monthly'] + cost = droplet.size["price_monthly"] role_costs[roledef] += cost total_cost += cost - + print("\n\n Costs:") pprint(dict(role_costs)) print(" ---> Total cost: $%s/month" % total_cost) - + + def host(*names): env.hosts = [] - env.doname = ','.join(names) + env.doname = ",".join(names) hostnames = assign_digitalocean_roledefs(split=True) for role, hosts in list(hostnames.items()): for host in hosts: - if isinstance(host, dict) and host['name'] in names: - env.hosts.append(host['address']) + if isinstance(host, dict) and host["name"] in names: + env.hosts.append(host["address"]) print(" ---> Using %s as hosts" % env.hosts) - + + # ================ # = Environments = # ================ + def server(): env.NEWSBLUR_PATH = "/srv/newsblur" - env.VENDOR_PATH = "/srv/code" + env.VENDOR_PATH = "/srv/code" + def assign_digitalocean_roledefs(split=False): server() @@ -136,66 +143,81 @@ def assign_digitalocean_roledefs(split=False): for roledef, hosts in list(env.roledefs.items()): if roledef not in droplets: droplets[roledef] = hosts - + return droplets + def app(): assign_digitalocean_roledefs() - env.roles = ['app'] + env.roles = ["app"] + def web(): assign_digitalocean_roledefs() - env.roles = ['app', 'push', 'work', 'search'] + env.roles = ["app", "push", "work", "search"] + def work(): assign_digitalocean_roledefs() - env.roles = ['work'] + env.roles = ["work"] + def www(): assign_digitalocean_roledefs() - env.roles = ['www'] + env.roles = ["www"] + def dev(): assign_digitalocean_roledefs() - env.roles = ['dev'] + env.roles = ["dev"] + def debug(): assign_digitalocean_roledefs() - env.roles = ['debug'] + env.roles = ["debug"] + def node(): assign_digitalocean_roledefs() - env.roles = ['node'] + env.roles = ["node"] + def push(): assign_digitalocean_roledefs() - env.roles = ['push'] + env.roles = ["push"] + def db(): assign_digitalocean_roledefs() - env.roles = ['db', 'search'] + env.roles = ["db", "search"] + def task(): assign_digitalocean_roledefs() - env.roles = ['task'] + env.roles = ["task"] + def ec2task(): ec2() - env.roles = ['ec2task'] + env.roles = ["ec2task"] + def ec2(): - env.user = 'ubuntu' - env.key_filename = ['/Users/sclay/.ec2/sclay.pem'] + env.user = "ubuntu" + env.key_filename = ["/Users/sclay/.ec2/sclay.pem"] assign_digitalocean_roledefs() + def all(): assign_digitalocean_roledefs() - env.roles = ['app', 'db', 'debug', 'node', 'push', 'work', 'www', 'search'] + env.roles = ["app", "db", "debug", "node", "push", "work", "www", "search"] + # ============= # = Bootstrap = # ============= + def setup_common(): setup_installs() change_shell() @@ -224,17 +246,19 @@ def setup_common(): setup_nginx() setup_munin() + def setup_all(): setup_common() setup_app(skip_common=True) setup_db(skip_common=True) setup_task(skip_common=True) + def setup_app_docker(skip_common=False): if not skip_common: setup_common() setup_app_firewall() - setup_motd('app') + setup_motd("app") change_shell() setup_user() @@ -248,13 +272,14 @@ def setup_app_docker(skip_common=False): setup_docker() done() - sudo('reboot') + sudo("reboot") + def setup_app(skip_common=False, node=False): if not skip_common: setup_common() setup_app_firewall() - setup_motd('app') + setup_motd("app") copy_app_settings() config_nginx() setup_gunicorn(supervisor=True) @@ -264,7 +289,8 @@ def setup_app(skip_common=False, node=False): config_monit_app() setup_usage_monitor() done() - sudo('reboot') + sudo("reboot") + def setup_app_image(): copy_app_settings() @@ -274,17 +300,19 @@ def setup_app_image(): pip() deploy_web() done() - sudo('reboot') + sudo("reboot") + def setup_node(): setup_node_app() config_node(full=True) - + + def setup_db(engine=None, skip_common=False, skip_benchmark=False): if not skip_common: setup_common() setup_db_firewall() - setup_motd('db') + setup_motd("db") copy_db_settings() if engine == "postgres": setup_postgres(standby=False) @@ -316,18 +344,20 @@ def setup_db(engine=None, skip_common=False, skip_benchmark=False): # if env.user == 'ubuntu': # setup_db_mdadm() + def setup_task(queue=None, skip_common=False): if not skip_common: setup_common() setup_task_firewall() - setup_motd('task') + setup_motd("task") copy_task_settings() enable_celery_supervisor(queue) setup_gunicorn(supervisor=False) config_monit_task() setup_usage_monitor() done() - sudo('reboot') + sudo("reboot") + def setup_task_image(): setup_installs() @@ -338,198 +368,229 @@ def setup_task_image(): pip() deploy(reload=True) done() - sudo('reboot') + sudo("reboot") + # ================== # = Setup - Docker = # ================== + def setup_docker(): packages = [ - 'build-essential', + "build-essential", ] - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' % ' '.join(packages)) + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' + % " ".join(packages) + ) - sudo('apt install -fy docker docker-compose') - sudo('usermod -aG docker ${USER}') - sudo('su - ${USER}') + sudo("apt install -fy docker docker-compose") + sudo("usermod -aG docker ${USER}") + sudo("su - ${USER}") copy_certificates() - + + # ================== # = Setup - Common = # ================== + def done(): print("\n\n\n\n-----------------------------------------------------") - print("\n\n %s / %s IS SUCCESSFULLY BOOTSTRAPPED" % (env.get('doname') or env.host_string, env.host_string)) + print( + "\n\n %s / %s IS SUCCESSFULLY BOOTSTRAPPED" + % (env.get("doname") or env.host_string, env.host_string) + ) print("\n\n-----------------------------------------------------\n\n\n\n") + def setup_installs(): packages = [ - 'build-essential', - 'gcc', - 'scons', - 'libreadline-dev', - 'sysstat', - 'iotop', - 'git', - 'python2', - 'python2.7-dev', - 'locate', - 'software-properties-common', - 'libpcre3-dev', - 'libncurses5-dev', - 'libdbd-pg-perl', - 'libssl-dev', - 'libffi-dev', - 'libevent-dev', - 'make', - 'postgresql-common', - 'ssl-cert', - 'python-setuptools', - 'libyaml-0-2', - 'pgbouncer', - 'python-yaml', - 'python-numpy', - 'curl', - 'monit', - 'ufw', - 'libjpeg8', - 'libjpeg62-dev', - 'libfreetype6', - 'libfreetype6-dev', - 'libmysqlclient-dev', - 'libblas-dev', - 'liblapack-dev', - 'libatlas-base-dev', - 'gfortran', - 'libpq-dev', + "build-essential", + "gcc", + "scons", + "libreadline-dev", + "sysstat", + "iotop", + "git", + "python2", + "python2.7-dev", + "locate", + "software-properties-common", + "libpcre3-dev", + "libncurses5-dev", + "libdbd-pg-perl", + "libssl-dev", + "libffi-dev", + "libevent-dev", + "make", + "postgresql-common", + "ssl-cert", + "python-setuptools", + "libyaml-0-2", + "pgbouncer", + "python-yaml", + "python-numpy", + "curl", + "monit", + "ufw", + "libjpeg8", + "libjpeg62-dev", + "libfreetype6", + "libfreetype6-dev", + "libmysqlclient-dev", + "libblas-dev", + "liblapack-dev", + "libatlas-base-dev", + "gfortran", + "libpq-dev", ] # sudo("sed -i -e 's/archive.ubuntu.com\|security.ubuntu.com/old-releases.ubuntu.com/g' /etc/apt/sources.list") put("config/apt_sources.conf", "/etc/apt/sources.list", use_sudo=True) - run('sleep 10') # Dies on a lock, so just delay - sudo('apt-get -y update') - run('sleep 10') # Dies on a lock, so just delay - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" dist-upgrade') - run('sleep 10') # Dies on a lock, so just delay - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' % ' '.join(packages)) - + run("sleep 10") # Dies on a lock, so just delay + sudo("apt-get -y update") + run("sleep 10") # Dies on a lock, so just delay + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" dist-upgrade' + ) + run("sleep 10") # Dies on a lock, so just delay + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' + % " ".join(packages) + ) + with settings(warn_only=True): sudo("ln -s /usr/lib/x86_64-linux-gnu/libjpeg.so /usr/lib") sudo("ln -s /usr/lib/x86_64-linux-gnu/libfreetype.so /usr/lib") sudo("ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib") - + with settings(warn_only=True): - sudo('mkdir -p %s' % env.VENDOR_PATH) - sudo('chown %s.%s %s' % (env.user, env.user, env.VENDOR_PATH)) + sudo("mkdir -p %s" % env.VENDOR_PATH) + sudo("chown %s.%s %s" % (env.user, env.user, env.VENDOR_PATH)) + def change_shell(): - sudo('apt-get -fy install zsh') + sudo("apt-get -fy install zsh") with settings(warn_only=True): - run('git clone git://github.com/robbyrussell/oh-my-zsh.git ~/.oh-my-zsh') - run('git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting') - sudo('chsh %s -s /bin/zsh' % env.user) + run("git clone git://github.com/robbyrussell/oh-my-zsh.git ~/.oh-my-zsh") + run( + "git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting" + ) + sudo("chsh %s -s /bin/zsh" % env.user) + def setup_user(): # run('useradd -c "NewsBlur" -m newsblur -s /bin/zsh') # run('openssl rand -base64 8 | tee -a ~conesus/.password | passwd -stdin conesus') - run('mkdir -p ~/.ssh && chmod 700 ~/.ssh') - run('rm -fr ~/.ssh/id_dsa*') + run("mkdir -p ~/.ssh && chmod 700 ~/.ssh") + run("rm -fr ~/.ssh/id_dsa*") run('ssh-keygen -t dsa -f ~/.ssh/id_dsa -N ""') - run('touch ~/.ssh/authorized_keys') + run("touch ~/.ssh/authorized_keys") put("~/.ssh/id_dsa.pub", "authorized_keys") - run("echo \"\n\" >> ~sclay/.ssh/authorized_keys") - run('echo `cat authorized_keys` >> ~sclay/.ssh/authorized_keys') - run('rm authorized_keys') + run('echo "\n" >> ~sclay/.ssh/authorized_keys') + run("echo `cat authorized_keys` >> ~sclay/.ssh/authorized_keys") + run("rm authorized_keys") -def copy_ssh_keys(username='sclay', private=False): - sudo('mkdir -p ~%s/.ssh' % username) - - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key.pub'), 'local.key.pub') - sudo('mv local.key.pub ~%s/.ssh/id_rsa.pub' % username) + +def copy_ssh_keys(username="sclay", private=False): + sudo("mkdir -p ~%s/.ssh" % username) + + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key.pub"), "local.key.pub") + sudo("mv local.key.pub ~%s/.ssh/id_rsa.pub" % username) if private: - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key'), 'local.key') - sudo('mv local.key ~%s/.ssh/id_rsa' % username) - - sudo("echo \"\n\" >> ~%s/.ssh/authorized_keys" % username) + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key"), "local.key") + sudo("mv local.key ~%s/.ssh/id_rsa" % username) + + sudo('echo "\n" >> ~%s/.ssh/authorized_keys' % username) sudo("echo `cat ~%s/.ssh/id_rsa.pub` >> ~%s/.ssh/authorized_keys" % (username, username)) - sudo('chown -R %s.%s ~%s/.ssh' % (username, username, username)) - sudo('chmod 700 ~%s/.ssh' % username) - sudo('chmod 600 ~%s/.ssh/id_rsa*' % username) + sudo("chown -R %s.%s ~%s/.ssh" % (username, username, username)) + sudo("chmod 700 ~%s/.ssh" % username) + sudo("chmod 600 ~%s/.ssh/id_rsa*" % username) + def setup_repo(): - sudo('mkdir -p /srv') - sudo('chown -R %s.%s /srv' % (env.user, env.user)) + sudo("mkdir -p /srv") + sudo("chown -R %s.%s /srv" % (env.user, env.user)) with settings(warn_only=True): - run('git clone https://github.com/samuelclay/NewsBlur.git %s' % env.NEWSBLUR_PATH) + run("git clone https://github.com/samuelclay/NewsBlur.git %s" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('ln -sfn /srv/code /home/%s/code' % env.user) - sudo('ln -sfn /srv/newsblur /home/%s/newsblur' % env.user) + sudo("ln -sfn /srv/code /home/%s/code" % env.user) + sudo("ln -sfn /srv/newsblur /home/%s/newsblur" % env.user) + def setup_repo_local_settings(): with virtualenv(): - run('cp newsblur/local_settings.py.template newsblur/local_settings.py') - run('mkdir -p logs') - run('touch logs/newsblur.log') + run("cp newsblur/local_settings.py.template newsblur/local_settings.py") + run("mkdir -p logs") + run("touch logs/newsblur.log") + def setup_local_files(): - run('mkdir -p ~/.config/procps') + run("mkdir -p ~/.config/procps") put("config/toprc", "~/.config/procps/toprc") - run('rm -f ~/.toprc') + run("rm -f ~/.toprc") put("config/zshrc", "~/.zshrc") - put('config/gitconfig.txt', '~/.gitconfig') - put('config/ssh.conf', '~/.ssh/config') + put("config/gitconfig.txt", "~/.gitconfig") + put("config/ssh.conf", "~/.ssh/config") + def setup_psql_client(): - sudo('apt-get -y install postgresql-client') - sudo('mkdir -p /var/run/postgresql') + sudo("apt-get -y install postgresql-client") + sudo("mkdir -p /var/run/postgresql") with settings(warn_only=True): - sudo('chown postgres.postgres /var/run/postgresql') + sudo("chown postgres.postgres /var/run/postgresql") + def setup_libxml(): - sudo('apt-get -y install libxml2-dev libxslt1-dev python-lxml') + sudo("apt-get -y install libxml2-dev libxslt1-dev python-lxml") + def setup_libxml_code(): with cd(env.VENDOR_PATH): - run('git clone git://git.gnome.org/libxml2') - run('git clone git://git.gnome.org/libxslt') + run("git clone git://git.gnome.org/libxml2") + run("git clone git://git.gnome.org/libxslt") - with cd(os.path.join(env.VENDOR_PATH, 'libxml2')): - run('./configure && make && sudo make install') + with cd(os.path.join(env.VENDOR_PATH, "libxml2")): + run("./configure && make && sudo make install") + + with cd(os.path.join(env.VENDOR_PATH, "libxslt")): + run("./configure && make && sudo make install") - with cd(os.path.join(env.VENDOR_PATH, 'libxslt')): - run('./configure && make && sudo make install') def setup_psycopg(): - sudo('easy_install -U psycopg2') + sudo("easy_install -U psycopg2") + def setup_virtualenv(): - sudo('rm -fr ~/.cache') # Clean `sudo pip` - sudo('pip install --upgrade virtualenv') - sudo('pip install --upgrade virtualenvwrapper') + sudo("rm -fr ~/.cache") # Clean `sudo pip` + sudo("pip install --upgrade virtualenv") + sudo("pip install --upgrade virtualenvwrapper") setup_local_files() - with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')): - with prefix('source /usr/local/bin/virtualenvwrapper.sh'): + with prefix("WORKON_HOME=%s" % os.path.join(env.NEWSBLUR_PATH, "venv")): + with prefix("source /usr/local/bin/virtualenvwrapper.sh"): with cd(env.NEWSBLUR_PATH): # sudo('rmvirtualenv newsblur') # sudo('rm -fr venv') with settings(warn_only=True): - run('mkvirtualenv newsblur') + run("mkvirtualenv newsblur") # run('echo "import sys; sys.setdefaultencoding(\'utf-8\')" | sudo tee venv/newsblur/lib/python2.7/sitecustomize.py') # run('echo "/srv/newsblur" | sudo tee venv/newsblur/lib/python2.7/site-packages/newsblur.pth') - + + @_contextmanager def virtualenv(): - with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')): - with prefix('source /usr/local/bin/virtualenvwrapper.sh'): + with prefix("WORKON_HOME=%s" % os.path.join(env.NEWSBLUR_PATH, "venv")): + with prefix("source /usr/local/bin/virtualenvwrapper.sh"): with cd(env.NEWSBLUR_PATH): - with prefix('workon newsblur'): + with prefix("workon newsblur"): yield + def setup_pip(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('curl https://bootstrap.pypa.io/2.6/get-pip.py | sudo python2') + run("curl https://bootstrap.pypa.io/2.6/get-pip.py | sudo python2") # sudo('python2 get-pip.py') @@ -541,18 +602,19 @@ def pip(): with virtualenv(): if role == "task": with settings(warn_only=True): - sudo('fallocate -l 4G /swapfile') - sudo('chmod 600 /swapfile') - sudo('mkswap /swapfile') - sudo('swapon /swapfile') - sudo('chown %s.%s -R %s' % (env.user, env.user, os.path.join(env.NEWSBLUR_PATH, 'venv'))) + sudo("fallocate -l 4G /swapfile") + sudo("chmod 600 /swapfile") + sudo("mkswap /swapfile") + sudo("swapon /swapfile") + sudo("chown %s.%s -R %s" % (env.user, env.user, os.path.join(env.NEWSBLUR_PATH, "venv"))) # run('easy_install -U pip') # run('pip install --upgrade pip') # run('pip install --upgrade setuptools') - run('pip install -r requirements.txt') + run("pip install -r requirements.txt") if role == "task": with settings(warn_only=True): - sudo('swapoff /swapfile') + sudo("swapoff /swapfile") + def solo_pip(role): if role == "app": @@ -564,170 +626,195 @@ def solo_pip(role): copy_task_settings() pip() celery() - + + def setup_supervisor(): - sudo('apt-get update') - sudo('apt-get -y install supervisor') - put('config/supervisord.conf', '/etc/supervisor/supervisord.conf', use_sudo=True) - sudo('/etc/init.d/supervisor stop') - sudo('sleep 2') - sudo('ulimit -n 100000 && /etc/init.d/supervisor start') + sudo("apt-get update") + sudo("apt-get -y install supervisor") + put("config/supervisord.conf", "/etc/supervisor/supervisord.conf", use_sudo=True) + sudo("/etc/init.d/supervisor stop") + sudo("sleep 2") + sudo("ulimit -n 100000 && /etc/init.d/supervisor start") sudo("/usr/sbin/update-rc.d -f supervisor defaults") - sudo('systemctl enable supervisor') - sudo('systemctl start supervisor') + sudo("systemctl enable supervisor") + sudo("systemctl start supervisor") + @parallel def setup_hosts(): - put(os.path.join(env.SECRETS_PATH, 'configs/hosts'), '/etc/hosts', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/hosts"), "/etc/hosts", use_sudo=True) sudo('echo "\n\n127.0.0.1 `hostname`" | sudo tee -a /etc/hosts') + def setup_pgbouncer(): - sudo('apt-get remove -y pgbouncer') - sudo('apt-get install -y libevent-dev pkg-config libc-ares2 libc-ares-dev') - PGBOUNCER_VERSION = '1.15.0' + sudo("apt-get remove -y pgbouncer") + sudo("apt-get install -y libevent-dev pkg-config libc-ares2 libc-ares-dev") + PGBOUNCER_VERSION = "1.15.0" with cd(env.VENDOR_PATH), settings(warn_only=True): - run('wget https://pgbouncer.github.io/downloads/files/%s/pgbouncer-%s.tar.gz' % (PGBOUNCER_VERSION, PGBOUNCER_VERSION)) - run('tar -xzf pgbouncer-%s.tar.gz' % PGBOUNCER_VERSION) - run('rm pgbouncer-%s.tar.gz' % PGBOUNCER_VERSION) - with cd('pgbouncer-%s' % PGBOUNCER_VERSION): - run('./configure --prefix=/usr/local') - run('make') - sudo('make install') - sudo('ln -s /usr/local/bin/pgbouncer /usr/sbin/pgbouncer') + run( + "wget https://pgbouncer.github.io/downloads/files/%s/pgbouncer-%s.tar.gz" + % (PGBOUNCER_VERSION, PGBOUNCER_VERSION) + ) + run("tar -xzf pgbouncer-%s.tar.gz" % PGBOUNCER_VERSION) + run("rm pgbouncer-%s.tar.gz" % PGBOUNCER_VERSION) + with cd("pgbouncer-%s" % PGBOUNCER_VERSION): + run("./configure --prefix=/usr/local") + run("make") + sudo("make install") + sudo("ln -s /usr/local/bin/pgbouncer /usr/sbin/pgbouncer") config_pgbouncer() - + + def config_pgbouncer(): - sudo('mkdir -p /etc/pgbouncer') - put('config/pgbouncer.conf', 'pgbouncer.conf') - sudo('mv pgbouncer.conf /etc/pgbouncer/pgbouncer.ini') - put(os.path.join(env.SECRETS_PATH, 'configs/pgbouncer_auth.conf'), 'userlist.txt') - sudo('mv userlist.txt /etc/pgbouncer/userlist.txt') + sudo("mkdir -p /etc/pgbouncer") + put("config/pgbouncer.conf", "pgbouncer.conf") + sudo("mv pgbouncer.conf /etc/pgbouncer/pgbouncer.ini") + put(os.path.join(env.SECRETS_PATH, "configs/pgbouncer_auth.conf"), "userlist.txt") + sudo("mv userlist.txt /etc/pgbouncer/userlist.txt") sudo('echo "START=1" | sudo tee /etc/default/pgbouncer') # sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False) with settings(warn_only=True): - sudo('/etc/init.d/pgbouncer stop') - sudo('pkill -9 pgbouncer -e') - run('sleep 2') - sudo('/etc/init.d/pgbouncer start', pty=False) + sudo("/etc/init.d/pgbouncer stop") + sudo("pkill -9 pgbouncer -e") + run("sleep 2") + sudo("/etc/init.d/pgbouncer start", pty=False) + @parallel def kill_pgbouncer(stop=False): # sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False) with settings(warn_only=True): - sudo('/etc/init.d/pgbouncer stop') - run('sleep 2') - sudo('rm /var/log/postgresql/pgbouncer.pid') + sudo("/etc/init.d/pgbouncer stop") + run("sleep 2") + sudo("rm /var/log/postgresql/pgbouncer.pid") with settings(warn_only=True): - sudo('pkill -9 pgbouncer') - run('sleep 2') + sudo("pkill -9 pgbouncer") + run("sleep 2") if not stop: - run('sudo /etc/init.d/pgbouncer start', pty=False) + run("sudo /etc/init.d/pgbouncer start", pty=False) + def config_monit_task(): - put('config/monit_task.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True) + put("config/monit_task.conf", "/etc/monit/conf.d/celery.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_node(): - put('config/monit_node.conf', '/etc/monit/conf.d/node.conf', use_sudo=True) + put("config/monit_node.conf", "/etc/monit/conf.d/node.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_original(): - put('config/monit_original.conf', '/etc/monit/conf.d/node_original.conf', use_sudo=True) + put("config/monit_original.conf", "/etc/monit/conf.d/node_original.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_app(): - put('config/monit_app.conf', '/etc/monit/conf.d/gunicorn.conf', use_sudo=True) + put("config/monit_app.conf", "/etc/monit/conf.d/gunicorn.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_work(): - put('config/monit_work.conf', '/etc/monit/conf.d/work.conf', use_sudo=True) + put("config/monit_work.conf", "/etc/monit/conf.d/work.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_redis(): - sudo('chown root.root /etc/init.d/redis') - sudo('chmod a+x /etc/init.d/redis') - put('config/monit_debug.sh', '/etc/monit/monit_debug.sh', use_sudo=True) - sudo('chmod a+x /etc/monit/monit_debug.sh') - put('config/monit_redis.conf', '/etc/monit/conf.d/redis.conf', use_sudo=True) + sudo("chown root.root /etc/init.d/redis") + sudo("chmod a+x /etc/init.d/redis") + put("config/monit_debug.sh", "/etc/monit/monit_debug.sh", use_sudo=True) + sudo("chmod a+x /etc/monit/monit_debug.sh") + put("config/monit_redis.conf", "/etc/monit/conf.d/redis.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def setup_mongoengine_repo(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('rm -fr mongoengine') - run('git clone https://github.com/MongoEngine/mongoengine.git') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine-*') - sudo('ln -sfn %s /usr/local/lib/python2.7/dist-packages/mongoengine' % - os.path.join(env.VENDOR_PATH, 'mongoengine/mongoengine')) - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')), settings(warn_only=True): - run('git co v0.8.2') + run("rm -fr mongoengine") + run("git clone https://github.com/MongoEngine/mongoengine.git") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine-*") + sudo( + "ln -sfn %s /usr/local/lib/python2.7/dist-packages/mongoengine" + % os.path.join(env.VENDOR_PATH, "mongoengine/mongoengine") + ) + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")), settings(warn_only=True): + run("git co v0.8.2") + def clear_pymongo_repo(): - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/pymongo*') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/bson*') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/gridfs*') - + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/pymongo*") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/bson*") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/gridfs*") + + def setup_pymongo_repo(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('git clone git://github.com/mongodb/mongo-python-driver.git pymongo') + run("git clone git://github.com/mongodb/mongo-python-driver.git pymongo") # with cd(os.path.join(env.VENDOR_PATH, 'pymongo')): # sudo('python setup.py install') clear_pymongo_repo() - sudo('ln -sfn %s /usr/local/lib/python2.7/dist-packages/' % - os.path.join(env.VENDOR_PATH, 'pymongo/{pymongo,bson,gridfs}')) + sudo( + "ln -sfn %s /usr/local/lib/python2.7/dist-packages/" + % os.path.join(env.VENDOR_PATH, "pymongo/{pymongo,bson,gridfs}") + ) + def setup_forked_mongoengine(): - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')), settings(warn_only=True): - run('git remote add clay https://github.com/samuelclay/mongoengine.git') - run('git pull') - run('git fetch clay') - run('git checkout -b clay_master clay/master') + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")), settings(warn_only=True): + run("git remote add clay https://github.com/samuelclay/mongoengine.git") + run("git pull") + run("git fetch clay") + run("git checkout -b clay_master clay/master") + def switch_forked_mongoengine(): - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')): - run('git co dev') - run('git pull %s dev --force' % env.user) + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")): + run("git co dev") + run("git pull %s dev --force" % env.user) # run('git checkout .') # run('git checkout master') # run('get branch -D dev') # run('git checkout -b dev origin/dev') + def setup_logrotate(clear=True): if clear: - run('find /srv/newsblur/logs/*.log | xargs tee') + run("find /srv/newsblur/logs/*.log | xargs tee") with settings(warn_only=True): - sudo('find /var/log/mongodb/*.log | xargs tee') - put('config/logrotate.conf', '/etc/logrotate.d/newsblur', use_sudo=True) - put('config/logrotate.mongo.conf', '/etc/logrotate.d/mongodb', use_sudo=True) - put('config/logrotate.nginx.conf', '/etc/logrotate.d/nginx', use_sudo=True) - sudo('chown root.root /etc/logrotate.d/{newsblur,mongodb,nginx}') - sudo('chmod 644 /etc/logrotate.d/{newsblur,mongodb,nginx}') + sudo("find /var/log/mongodb/*.log | xargs tee") + put("config/logrotate.conf", "/etc/logrotate.d/newsblur", use_sudo=True) + put("config/logrotate.mongo.conf", "/etc/logrotate.d/mongodb", use_sudo=True) + put("config/logrotate.nginx.conf", "/etc/logrotate.d/nginx", use_sudo=True) + sudo("chown root.root /etc/logrotate.d/{newsblur,mongodb,nginx}") + sudo("chmod 644 /etc/logrotate.d/{newsblur,mongodb,nginx}") with settings(warn_only=True): - sudo('chown sclay.sclay /srv/newsblur/logs/*.log') - sudo('logrotate -f /etc/logrotate.d/newsblur') - sudo('logrotate -f /etc/logrotate.d/nginx') - sudo('logrotate -f /etc/logrotate.d/mongodb') + sudo("chown sclay.sclay /srv/newsblur/logs/*.log") + sudo("logrotate -f /etc/logrotate.d/newsblur") + sudo("logrotate -f /etc/logrotate.d/nginx") + sudo("logrotate -f /etc/logrotate.d/mongodb") + def setup_ulimit(): # Increase File Descriptor limits. - run('export FILEMAX=`sysctl -n fs.file-max`', pty=False) - sudo('mv /etc/security/limits.conf /etc/security/limits.conf.bak', pty=False) - sudo('touch /etc/security/limits.conf', pty=False) + run("export FILEMAX=`sysctl -n fs.file-max`", pty=False) + sudo("mv /etc/security/limits.conf /etc/security/limits.conf.bak", pty=False) + sudo("touch /etc/security/limits.conf", pty=False) run('echo "root soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "root hard nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "* soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "* hard nofile 100090\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "fs.file-max = 100000\n" | sudo tee -a /etc/sysctl.conf', pty=False) - sudo('sysctl -p') - sudo('ulimit -n 100000') + sudo("sysctl -p") + sudo("ulimit -n 100000") connections.connect(env.host_string) - + # run('touch /home/ubuntu/.bash_profile') # run('echo "ulimit -n $FILEMAX" >> /home/ubuntu/.bash_profile') @@ -736,67 +823,78 @@ def setup_ulimit(): # echo "net.ipv4.ip_local_port_range = 1024 65535" >> /etc/sysctl.conf # sudo chmod 644 /etc/sysctl.conf + def setup_do_monitoring(): - run('curl -sSL https://agent.digitalocean.com/install.sh | sh') - + run("curl -sSL https://agent.digitalocean.com/install.sh | sh") + + def setup_syncookies(): - sudo('echo 1 | sudo tee /proc/sys/net/ipv4/tcp_syncookies') - sudo('sudo /sbin/sysctl -w net.ipv4.tcp_syncookies=1') + sudo("echo 1 | sudo tee /proc/sys/net/ipv4/tcp_syncookies") + sudo("sudo /sbin/sysctl -w net.ipv4.tcp_syncookies=1") + def setup_sudoers(user=None): sudo('echo "%s ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/sclay' % (user or env.user)) - sudo('chmod 0440 /etc/sudoers.d/sclay') + sudo("chmod 0440 /etc/sudoers.d/sclay") + def setup_nginx(): - NGINX_VERSION = '1.19.5' + NGINX_VERSION = "1.19.5" with cd(env.VENDOR_PATH), settings(warn_only=True): sudo("groupadd nginx") sudo("useradd -g nginx -d /var/www/htdocs -s /bin/false nginx") - run('wget http://nginx.org/download/nginx-%s.tar.gz' % NGINX_VERSION) - run('tar -xzf nginx-%s.tar.gz' % NGINX_VERSION) - run('rm nginx-%s.tar.gz' % NGINX_VERSION) - with cd('nginx-%s' % NGINX_VERSION): - run('./configure --with-http_ssl_module --with-http_stub_status_module --with-http_gzip_static_module --with-http_realip_module ') - run('make') - sudo('make install') + run("wget http://nginx.org/download/nginx-%s.tar.gz" % NGINX_VERSION) + run("tar -xzf nginx-%s.tar.gz" % NGINX_VERSION) + run("rm nginx-%s.tar.gz" % NGINX_VERSION) + with cd("nginx-%s" % NGINX_VERSION): + run( + "./configure --with-http_ssl_module --with-http_stub_status_module --with-http_gzip_static_module --with-http_realip_module " + ) + run("make") + sudo("make install") config_nginx() + def config_nginx(): put("config/nginx.conf", "/usr/local/nginx/conf/nginx.conf", use_sudo=True) sudo("mkdir -p /usr/local/nginx/conf/sites-enabled") sudo("mkdir -p /var/log/nginx") put("config/nginx.newsblur.conf", "/usr/local/nginx/conf/sites-enabled/newsblur.conf", use_sudo=True) put("config/nginx-init", "/etc/init.d/nginx", use_sudo=True) - sudo('sed -i -e s/nginx_none/`cat /etc/hostname`/g /usr/local/nginx/conf/sites-enabled/newsblur.conf') + sudo("sed -i -e s/nginx_none/`cat /etc/hostname`/g /usr/local/nginx/conf/sites-enabled/newsblur.conf") sudo("chmod 0755 /etc/init.d/nginx") sudo("/usr/sbin/update-rc.d -f nginx defaults") sudo("/etc/init.d/nginx restart") copy_certificates() + # =============== # = Setup - App = # =============== + def setup_app_firewall(): - sudo('ufw default deny') - sudo('ufw allow ssh') # ssh - sudo('ufw allow 80') # http - sudo('ufw allow 8000') # gunicorn - sudo('ufw allow 8888') # socket.io - sudo('ufw allow 8889') # socket.io ssl - sudo('ufw allow 443') # https - sudo('ufw --force enable') + sudo("ufw default deny") + sudo("ufw allow ssh") # ssh + sudo("ufw allow 80") # http + sudo("ufw allow 8000") # gunicorn + sudo("ufw allow 8888") # socket.io + sudo("ufw allow 8889") # socket.io ssl + sudo("ufw allow 443") # https + sudo("ufw --force enable") + def remove_gunicorn(): with cd(env.VENDOR_PATH): - sudo('rm -fr gunicorn') - + sudo("rm -fr gunicorn") + + def setup_gunicorn(supervisor=True, restart=True): if supervisor: - put('config/supervisor_gunicorn.conf', '/etc/supervisor/conf.d/gunicorn.conf', use_sudo=True) - sudo('supervisorctl reread') + put("config/supervisor_gunicorn.conf", "/etc/supervisor/conf.d/gunicorn.conf", use_sudo=True) + sudo("supervisorctl reread") if restart: - sudo('supervisorctl update') + sudo("supervisorctl update") # with cd(env.VENDOR_PATH): # sudo('rm -fr gunicorn') # run('git clone git://github.com/benoitc/gunicorn.git') @@ -806,265 +904,304 @@ def setup_gunicorn(supervisor=True, restart=True): def update_gunicorn(): - with cd(os.path.join(env.VENDOR_PATH, 'gunicorn')): - run('git pull') - sudo('python setup.py develop') + with cd(os.path.join(env.VENDOR_PATH, "gunicorn")): + run("git pull") + sudo("python setup.py develop") + def setup_staging(): - run('git clone https://github.com/samuelclay/NewsBlur.git staging') - with cd('~/staging'): - run('cp ../newsblur/local_settings.py local_settings.py') - run('mkdir -p logs') - run('touch logs/newsblur.log') + run("git clone https://github.com/samuelclay/NewsBlur.git staging") + with cd("~/staging"): + run("cp ../newsblur/local_settings.py local_settings.py") + run("mkdir -p logs") + run("touch logs/newsblur.log") + def setup_node_app(): - sudo('curl -sL https://deb.nodesource.com/setup_14.x | sudo bash -') - sudo('apt-get install -y nodejs') + sudo("curl -sL https://deb.nodesource.com/setup_14.x | sudo bash -") + sudo("apt-get install -y nodejs") # run('curl -L https://npmjs.org/install.sh | sudo sh') # sudo('apt-get install npm') - sudo('sudo npm install -g npm') - sudo('npm install -g supervisor') - sudo('ufw allow 8888') - sudo('ufw allow 4040') + sudo("sudo npm install -g npm") + sudo("npm install -g supervisor") + sudo("ufw allow 8888") + sudo("ufw allow 4040") + def config_node(full=False): - sudo('rm -f /etc/supervisor/conf.d/gunicorn.conf') - sudo('rm -f /etc/supervisor/conf.d/node.conf') - put('config/supervisor_node_unread.conf', '/etc/supervisor/conf.d/node_unread.conf', use_sudo=True) - put('config/supervisor_node_unread_ssl.conf', '/etc/supervisor/conf.d/node_unread_ssl.conf', use_sudo=True) - put('config/supervisor_node_favicons.conf', '/etc/supervisor/conf.d/node_favicons.conf', use_sudo=True) - put('config/supervisor_node_text.conf', '/etc/supervisor/conf.d/node_text.conf', use_sudo=True) - + sudo("rm -f /etc/supervisor/conf.d/gunicorn.conf") + sudo("rm -f /etc/supervisor/conf.d/node.conf") + put("config/supervisor_node_unread.conf", "/etc/supervisor/conf.d/node_unread.conf", use_sudo=True) + put( + "config/supervisor_node_unread_ssl.conf", "/etc/supervisor/conf.d/node_unread_ssl.conf", use_sudo=True + ) + put("config/supervisor_node_favicons.conf", "/etc/supervisor/conf.d/node_favicons.conf", use_sudo=True) + put("config/supervisor_node_text.conf", "/etc/supervisor/conf.d/node_text.conf", use_sudo=True) + if full: run("rm -fr /srv/newsblur/node/node_modules") with cd(os.path.join(env.NEWSBLUR_PATH, "node")): run("npm install") - - sudo('supervisorctl reload') + + sudo("supervisorctl reload") + @parallel def copy_app_settings(): - run('rm -f %s/local_settings.py' % env.NEWSBLUR_PATH) - put(os.path.join(env.SECRETS_PATH, 'settings/app_settings.py'), - '%s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) + run("rm -f %s/local_settings.py" % env.NEWSBLUR_PATH) + put( + os.path.join(env.SECRETS_PATH, "settings/app_settings.py"), + "%s/newsblur/local_settings.py" % env.NEWSBLUR_PATH, + ) run('echo "\nSERVER_NAME = \\\\"`hostname`\\\\"" >> %s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) + def assemble_certificates(): - with lcd(os.path.join(env.SECRETS_PATH, 'certificates/comodo')): - local('pwd') - local('cat STAR_newsblur_com.crt EssentialSSLCA_2.crt ComodoUTNSGCCA.crt UTNAddTrustSGCCA.crt AddTrustExternalCARoot.crt > newsblur.com.crt') - + with lcd(os.path.join(env.SECRETS_PATH, "certificates/comodo")): + local("pwd") + local( + "cat STAR_newsblur_com.crt EssentialSSLCA_2.crt ComodoUTNSGCCA.crt UTNAddTrustSGCCA.crt AddTrustExternalCARoot.crt > newsblur.com.crt" + ) + + def copy_certificates(copy=False): - cert_path = os.path.join(env.NEWSBLUR_PATH, 'config/certificates') - run('mkdir -p %s' % cert_path) + cert_path = os.path.join(env.NEWSBLUR_PATH, "config/certificates") + run("mkdir -p %s" % cert_path) fullchain_path = "/etc/letsencrypt/live/newsblur.com/fullchain.pem" privkey_path = "/etc/letsencrypt/live/newsblur.com/privkey.pem" if copy: - sudo('mkdir -p %s' % os.path.dirname(fullchain_path)) - put(os.path.join(env.SECRETS_PATH, 'certificates/newsblur.com.pem'), fullchain_path, use_sudo=True) - put(os.path.join(env.SECRETS_PATH, 'certificates/newsblur.com.key'), privkey_path, use_sudo=True) + sudo("mkdir -p %s" % os.path.dirname(fullchain_path)) + put(os.path.join(env.SECRETS_PATH, "certificates/newsblur.com.pem"), fullchain_path, use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "certificates/newsblur.com.key"), privkey_path, use_sudo=True) - run('ln -fs %s %s' % (fullchain_path, os.path.join(cert_path, 'newsblur.com.crt'))) - run('ln -fs %s %s' % (fullchain_path, os.path.join(cert_path, 'newsblur.com.pem'))) # For backwards compatibility with hard-coded nginx configs - run('ln -fs %s %s' % (privkey_path, os.path.join(cert_path, 'newsblur.com.key'))) - run('ln -fs %s %s' % (privkey_path, os.path.join(cert_path, 'newsblur.com.crt.key'))) # HAProxy - put(os.path.join(env.SECRETS_PATH, 'certificates/comodo/dhparams.pem'), cert_path) - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps_development.pem'), cert_path) + run("ln -fs %s %s" % (fullchain_path, os.path.join(cert_path, "newsblur.com.crt"))) + run( + "ln -fs %s %s" % (fullchain_path, os.path.join(cert_path, "newsblur.com.pem")) + ) # For backwards compatibility with hard-coded nginx configs + run("ln -fs %s %s" % (privkey_path, os.path.join(cert_path, "newsblur.com.key"))) + run("ln -fs %s %s" % (privkey_path, os.path.join(cert_path, "newsblur.com.crt.key"))) # HAProxy + put(os.path.join(env.SECRETS_PATH, "certificates/comodo/dhparams.pem"), cert_path) + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps_development.pem"), cert_path) # Export aps.cer from Apple issued certificate using Keychain Assistant # openssl x509 -in aps.cer -inform DER -outform PEM -out aps.pem - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.pem'), cert_path) + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps.pem"), cert_path) # Export aps.p12 from aps.cer using Keychain Assistant # openssl pkcs12 -in aps.p12 -out aps.p12.pem -nodes - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.p12.pem'), cert_path) - + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps.p12.pem"), cert_path) + + def setup_certbot(): - sudo('snap install --classic certbot') - sudo('snap set certbot trust-plugin-with-root=ok') - sudo('snap install certbot-dns-dnsimple') - sudo('ln -fs /snap/bin/certbot /usr/bin/certbot') - put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), - os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) - sudo('chmod 0600 %s' % os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) - sudo('certbot certonly -n --agree-tos ' - ' --dns-dnsimple --dns-dnsimple-credentials %s' - ' --email samuel@newsblur.com --domains newsblur.com ' - ' -d "*.newsblur.com" -d "popular.global.newsblur.com"' % - (os.path.join(env.NEWSBLUR_PATH, 'certbot.conf'))) - sudo('chmod 0755 /etc/letsencrypt/{live,archive}') - sudo('chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem') - + sudo("snap install --classic certbot") + sudo("snap set certbot trust-plugin-with-root=ok") + sudo("snap install certbot-dns-dnsimple") + sudo("ln -fs /snap/bin/certbot /usr/bin/certbot") + put( + os.path.join(env.SECRETS_PATH, "configs/certbot.conf"), + os.path.join(env.NEWSBLUR_PATH, "certbot.conf"), + ) + sudo("chmod 0600 %s" % os.path.join(env.NEWSBLUR_PATH, "certbot.conf")) + sudo( + "certbot certonly -n --agree-tos " + " --dns-dnsimple --dns-dnsimple-credentials %s" + " --email samuel@newsblur.com --domains newsblur.com " + ' -d "*.newsblur.com" -d "popular.global.newsblur.com"' + % (os.path.join(env.NEWSBLUR_PATH, "certbot.conf")) + ) + sudo("chmod 0755 /etc/letsencrypt/{live,archive}") + sudo("chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem") + + # def setup_certbot_old(): # sudo('add-apt-repository -y universe') # sudo('add-apt-repository -y ppa:certbot/certbot') # sudo('apt-get update') # sudo('apt-get install -y certbot') # sudo('apt-get install -y python3-certbot-dns-dnsimple') -# put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), +# put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), # os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) # sudo('chmod 0600 %s' % os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) # sudo('certbot certonly -n --agree-tos ' # ' --dns-dnsimple --dns-dnsimple-credentials %s' # ' --email samuel@newsblur.com --domains newsblur.com ' -# ' -d "*.newsblur.com" -d "global.popular.newsblur.com"' % +# ' -d "*.newsblur.com" -d "global.popular.newsblur.com"' % # (os.path.join(env.NEWSBLUR_PATH, 'certbot.conf'))) # sudo('chmod 0755 /etc/letsencrypt/{live,archive}') # sudo('chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem') - + + @parallel def maintenance_on(): role = role_for_host() - if role in ['work', 'search']: - sudo('supervisorctl stop all') + if role in ["work", "search"]: + sudo("supervisorctl stop all") else: - put('templates/maintenance_off.html', '%s/templates/maintenance_off.html' % env.NEWSBLUR_PATH) + put("templates/maintenance_off.html", "%s/templates/maintenance_off.html" % env.NEWSBLUR_PATH) with virtualenv(): - run('mv templates/maintenance_off.html templates/maintenance_on.html') + run("mv templates/maintenance_off.html templates/maintenance_on.html") + @parallel def maintenance_off(): role = role_for_host() - if role in ['work', 'search']: - sudo('supervisorctl start all') + if role in ["work", "search"]: + sudo("supervisorctl start all") else: with virtualenv(): - run('mv templates/maintenance_on.html templates/maintenance_off.html') - run('git checkout templates/maintenance_off.html') + run("mv templates/maintenance_on.html templates/maintenance_off.html") + run("git checkout templates/maintenance_off.html") + def setup_haproxy(debug=False): version = "2.3.3" - sudo('ufw allow 81') # nginx moved - sudo('ufw allow 1936') # haproxy stats + sudo("ufw allow 81") # nginx moved + sudo("ufw allow 1936") # haproxy stats # sudo('apt-get install -y haproxy') # sudo('apt-get remove -y haproxy') with cd(env.VENDOR_PATH): - run('wget http://www.haproxy.org/download/2.3/src/haproxy-%s.tar.gz' % version) - run('tar -xf haproxy-%s.tar.gz' % version) - with cd('haproxy-%s' % version): - run('make TARGET=linux-glibc USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1') - sudo('make install') - put('config/haproxy-init', '/etc/init.d/haproxy', use_sudo=True) - sudo('chmod u+x /etc/init.d/haproxy') - sudo('mkdir -p /etc/haproxy') + run("wget http://www.haproxy.org/download/2.3/src/haproxy-%s.tar.gz" % version) + run("tar -xf haproxy-%s.tar.gz" % version) + with cd("haproxy-%s" % version): + run("make TARGET=linux-glibc USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1") + sudo("make install") + put("config/haproxy-init", "/etc/init.d/haproxy", use_sudo=True) + sudo("chmod u+x /etc/init.d/haproxy") + sudo("mkdir -p /etc/haproxy") if debug: - put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) + put("config/debug_haproxy.conf", "/etc/haproxy/haproxy.cfg", use_sudo=True) else: build_haproxy() - put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), - '/etc/haproxy/haproxy.cfg', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "/etc/haproxy/haproxy.cfg", use_sudo=True) sudo('echo "ENABLED=1" | sudo tee /etc/default/haproxy') cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH - run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path)) - run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path)) - run('ln -s %s/newsblur.com.key %s/newsblur.pem.key' % (cert_path, cert_path)) - put('config/haproxy_rsyslog.conf', '/etc/rsyslog.d/49-haproxy.conf', use_sudo=True) + run("cat %s/newsblur.com.crt > %s/newsblur.pem" % (cert_path, cert_path)) + run("cat %s/newsblur.com.key >> %s/newsblur.pem" % (cert_path, cert_path)) + run("ln -s %s/newsblur.com.key %s/newsblur.pem.key" % (cert_path, cert_path)) + put("config/haproxy_rsyslog.conf", "/etc/rsyslog.d/49-haproxy.conf", use_sudo=True) # sudo('restart rsyslog') - sudo('update-rc.d -f haproxy defaults') + sudo("update-rc.d -f haproxy defaults") + + sudo("/etc/init.d/haproxy stop") + run("sleep 5") + sudo("/etc/init.d/haproxy start") - sudo('/etc/init.d/haproxy stop') - run('sleep 5') - sudo('/etc/init.d/haproxy start') def config_haproxy(debug=False): if debug: - put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) + put("config/debug_haproxy.conf", "/etc/haproxy/haproxy.cfg", use_sudo=True) else: build_haproxy() - put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), - '/etc/haproxy/haproxy.cfg', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "/etc/haproxy/haproxy.cfg", use_sudo=True) - haproxy_check = run('haproxy -c -f /etc/haproxy/haproxy.cfg') + haproxy_check = run("haproxy -c -f /etc/haproxy/haproxy.cfg") if haproxy_check.return_code == 0: - sudo('/etc/init.d/haproxy reload') + sudo("/etc/init.d/haproxy reload") else: print(" !!!> Uh-oh, HAProxy config doesn't check out: %s" % haproxy_check.return_code) + def build_haproxy(): droplets = assign_digitalocean_roledefs(split=True) servers = defaultdict(list) - gunicorn_counts_servers = ['app22', 'app26'] - gunicorn_refresh_servers = ['app20', 'app21'] - maintenance_servers = ['app20'] - node_socket3_servers = ['node02', 'node03'] + gunicorn_counts_servers = ["app22", "app26"] + gunicorn_refresh_servers = ["app20", "app21"] + maintenance_servers = ["app20"] + node_socket3_servers = ["node02", "node03"] ignore_servers = [] - - for group_type in ['app', 'push', 'work', 'node_socket', 'node_socket3', 'node_favicon', 'node_text', 'www']: + + for group_type in [ + "app", + "push", + "work", + "node_socket", + "node_socket3", + "node_favicon", + "node_text", + "www", + ]: group_type_name = group_type - if 'node' in group_type: - group_type_name = 'node' + if "node" in group_type: + group_type_name = "node" for server in droplets[group_type_name]: - droplet_nums = re.findall(r'\d+', server['name']) - droplet_num = droplet_nums[0] if droplet_nums else '' + droplet_nums = re.findall(r"\d+", server["name"]) + droplet_num = droplet_nums[0] if droplet_nums else "" server_type = group_type port = 80 check_inter = 3000 - - if server['name'] in ignore_servers: - print(" ---> Ignoring %s" % server['name']) - continue - if server['name'] in node_socket3_servers and group_type != 'node_socket3': - continue - if server['name'] not in node_socket3_servers and group_type == 'node_socket3': - continue - if server_type == 'www': - port = 81 - if group_type == 'node_socket': - port = 8888 - if group_type == 'node_socket3': - port = 8888 - if group_type == 'node_text': - port = 4040 - if group_type in ['app', 'push']: - port = 8000 - address = "%s:%s" % (server['address'], port) - if server_type == 'app': - nginx_address = "%s:80" % (server['address']) - servers['nginx'].append(" server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address)) - if server['name'] in maintenance_servers: - nginx_address = "%s:80" % (server['address']) - servers['maintenance'].append(" server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address)) - - if server['name'] in gunicorn_counts_servers: - server_type = 'gunicorn_counts' + if server["name"] in ignore_servers: + print(" ---> Ignoring %s" % server["name"]) + continue + if server["name"] in node_socket3_servers and group_type != "node_socket3": + continue + if server["name"] not in node_socket3_servers and group_type == "node_socket3": + continue + if server_type == "www": + port = 81 + if group_type == "node_socket": + port = 8888 + if group_type == "node_socket3": + port = 8888 + if group_type == "node_text": + port = 4040 + if group_type in ["app", "push"]: + port = 8000 + address = "%s:%s" % (server["address"], port) + + if server_type == "app": + nginx_address = "%s:80" % (server["address"]) + servers["nginx"].append( + " server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address) + ) + if server["name"] in maintenance_servers: + nginx_address = "%s:80" % (server["address"]) + servers["maintenance"].append( + " server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address) + ) + + if server["name"] in gunicorn_counts_servers: + server_type = "gunicorn_counts" check_inter = 15000 - elif server['name'] in gunicorn_refresh_servers: - server_type = 'gunicorn_refresh' + elif server["name"] in gunicorn_refresh_servers: + server_type = "gunicorn_refresh" check_inter = 30000 - + server_name = "%s%s" % (server_type, droplet_num) - servers[server_type].append(" server %-20s %-22s check inter %sms" % (server_name, address, check_inter)) - - h = open(os.path.join(env.NEWSBLUR_PATH, 'config/haproxy.conf.template'), 'r') + servers[server_type].append( + " server %-20s %-22s check inter %sms" % (server_name, address, check_inter) + ) + + h = open(os.path.join(env.NEWSBLUR_PATH, "config/haproxy.conf.template"), "r") haproxy_template = h.read() for sub, server_list in list(servers.items()): - sorted_servers = '\n'.join(sorted(server_list)) + sorted_servers = "\n".join(sorted(server_list)) haproxy_template = haproxy_template.replace("{{ %s }}" % sub, sorted_servers) - f = open(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), 'w') + f = open(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "w") f.write(haproxy_template) f.close() + def upgrade_django(role=None): if not role: role = role_for_host() with virtualenv(), settings(warn_only=True): - sudo('sudo dpkg --configure -a') + sudo("sudo dpkg --configure -a") setup_supervisor() pull() - run('git co django1.11') + run("git co django1.11") if role == "task": - sudo('supervisorctl stop celery') - run('./utils/kill_celery.sh') + sudo("supervisorctl stop celery") + run("./utils/kill_celery.sh") copy_task_settings() enable_celery_supervisor(update=False) elif role == "work": copy_app_settings() enable_celerybeat() elif role == "web" or role == "app": - sudo('supervisorctl stop gunicorn') - run('./utils/kill_gunicorn.sh') + sudo("supervisorctl stop gunicorn") + run("./utils/kill_gunicorn.sh") copy_app_settings() setup_gunicorn(restart=False) elif role == "node": @@ -1078,90 +1215,97 @@ def upgrade_django(role=None): # sudo('reboot') + def clean(): with virtualenv(), settings(warn_only=True): run('find . -name "*.pyc" -exec rm -f {} \;') - + + def downgrade_django(role=None): with virtualenv(), settings(warn_only=True): pull() - run('git co master') + run("git co master") pip() - run('pip uninstall -y django-paypal') + run("pip uninstall -y django-paypal") if role == "task": copy_task_settings() enable_celery_supervisor() else: copy_app_settings() deploy() - + + def vendorize_paypal(): with virtualenv(), settings(warn_only=True): - run('pip uninstall -y django-paypal') + run("pip uninstall -y django-paypal") + def upgrade_pil(): with virtualenv(): pull() - run('pip install --upgrade pillow') + run("pip install --upgrade pillow") # celery_stop() - sudo('apt-get remove -y python-imaging') - sudo('supervisorctl reload') + sudo("apt-get remove -y python-imaging") + sudo("supervisorctl reload") # kill() + def downgrade_pil(): with virtualenv(): - sudo('apt-get install -y python-imaging') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/Pillow*') + sudo("apt-get install -y python-imaging") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/Pillow*") pull() - sudo('supervisorctl reload') + sudo("supervisorctl reload") # kill() + def setup_db_monitor(): pull() with virtualenv(): - sudo('apt-get install -y libpq-dev python2.7-dev') - run('pip install -r flask/requirements.txt') - put('flask/supervisor_db_monitor.conf', '/etc/supervisor/conf.d/db_monitor.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') - + sudo("apt-get install -y libpq-dev python2.7-dev") + run("pip install -r flask/requirements.txt") + put("flask/supervisor_db_monitor.conf", "/etc/supervisor/conf.d/db_monitor.conf", use_sudo=True) + sudo("supervisorctl reread") + sudo("supervisorctl update") + + # ============== # = Setup - DB = # ============== + @parallel def setup_db_firewall(): ports = [ - 5432, # PostgreSQL + 5432, # PostgreSQL 27017, # MongoDB 28017, # MongoDB web 27019, # MongoDB config - 6379, # Redis + 6379, # Redis # 11211, # Memcached - 3060, # Node original page server - 9200, # Elasticsearch - 5000, # DB Monitor + 3060, # Node original page server + 9200, # Elasticsearch + 5000, # DB Monitor ] - sudo('ufw --force reset') - sudo('ufw default deny') - sudo('ufw allow ssh') - sudo('ufw allow 80') - sudo('ufw allow 443') + sudo("ufw --force reset") + sudo("ufw default deny") + sudo("ufw allow ssh") + sudo("ufw allow 80") + sudo("ufw allow 443") # DigitalOcean - for ip in set(env.roledefs['app'] + - env.roledefs['db'] + - env.roledefs['debug'] + - env.roledefs['task'] + - env.roledefs['work'] + - env.roledefs['push'] + - env.roledefs['www'] + - env.roledefs['search'] + - env.roledefs['node']): - sudo('ufw allow proto tcp from %s to any port %s' % ( - ip, - ','.join(map(str, ports)) - )) + for ip in set( + env.roledefs["app"] + + env.roledefs["db"] + + env.roledefs["debug"] + + env.roledefs["task"] + + env.roledefs["work"] + + env.roledefs["push"] + + env.roledefs["www"] + + env.roledefs["search"] + + env.roledefs["node"] + ): + sudo("ufw allow proto tcp from %s to any port %s" % (ip, ",".join(map(str, ports)))) # EC2 # for host in set(env.roledefs['ec2task']): @@ -1171,67 +1315,77 @@ def setup_db_firewall(): # ','.join(map(str, ports)) # )) - sudo('ufw --force enable') + sudo("ufw --force enable") + def setup_rabbitmq(): sudo('echo "deb http://www.rabbitmq.com/debian/ testing main" | sudo tee -a /etc/apt/sources.list') - run('wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc') - sudo('apt-key add rabbitmq-signing-key-public.asc') - run('rm rabbitmq-signing-key-public.asc') - sudo('apt-get update') - sudo('apt-get install -y rabbitmq-server') - sudo('rabbitmqctl add_user newsblur newsblur') - sudo('rabbitmqctl add_vhost newsblurvhost') + run("wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc") + sudo("apt-key add rabbitmq-signing-key-public.asc") + run("rm rabbitmq-signing-key-public.asc") + sudo("apt-get update") + sudo("apt-get install -y rabbitmq-server") + sudo("rabbitmqctl add_user newsblur newsblur") + sudo("rabbitmqctl add_vhost newsblurvhost") sudo('rabbitmqctl set_permissions -p newsblurvhost newsblur ".*" ".*" ".*"') + # def setup_memcached(): # sudo('apt-get -y install memcached') + def setup_postgres(standby=False): shmmax = 17818362112 hugepages = 9000 - sudo('echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" |sudo tee /etc/apt/sources.list.d/pgdg.list') - sudo('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -') - sudo('apt update') - sudo('apt install -y postgresql-13') - put('config/postgresql-13.conf', '/etc/postgresql/13/main/postgresql.conf', use_sudo=True) - put('config/postgres_hba-13.conf', '/etc/postgresql/13/main/pg_hba.conf', use_sudo=True) - sudo('mkdir -p /var/lib/postgresql/13/archive') - sudo('chown -R postgres.postgres /etc/postgresql/13/main') - sudo('chown -R postgres.postgres /var/lib/postgresql/13/main') - sudo('chown -R postgres.postgres /var/lib/postgresql/13/archive') + sudo( + 'echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" |sudo tee /etc/apt/sources.list.d/pgdg.list' + ) + sudo("wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -") + sudo("apt update") + sudo("apt install -y postgresql-13") + put("config/postgresql-13.conf", "/etc/postgresql/13/main/postgresql.conf", use_sudo=True) + put("config/postgres_hba-13.conf", "/etc/postgresql/13/main/pg_hba.conf", use_sudo=True) + sudo("mkdir -p /var/lib/postgresql/13/archive") + sudo("chown -R postgres.postgres /etc/postgresql/13/main") + sudo("chown -R postgres.postgres /var/lib/postgresql/13/main") + sudo("chown -R postgres.postgres /var/lib/postgresql/13/archive") sudo('echo "%s" | sudo tee /proc/sys/kernel/shmmax' % shmmax) sudo('echo "\nkernel.shmmax = %s" | sudo tee -a /etc/sysctl.conf' % shmmax) sudo('echo "\nvm.nr_hugepages = %s\n" | sudo tee -a /etc/sysctl.conf' % hugepages) run('echo "ulimit -n 100000" > postgresql.defaults') - sudo('mv postgresql.defaults /etc/default/postgresql') - sudo('sysctl -p') - sudo('rm -f /lib/systemd/system/postgresql.service') # Ubuntu 16 has wrong default - sudo('systemctl daemon-reload') - sudo('systemctl enable postgresql') + sudo("mv postgresql.defaults /etc/default/postgresql") + sudo("sysctl -p") + sudo("rm -f /lib/systemd/system/postgresql.service") # Ubuntu 16 has wrong default + sudo("systemctl daemon-reload") + sudo("systemctl enable postgresql") if standby: - put('config/postgresql_recovery.conf', '/var/lib/postgresql/13/recovery.conf', use_sudo=True) - sudo('chown -R postgres.postgres /var/lib/postgresql/13/recovery.conf') + put("config/postgresql_recovery.conf", "/var/lib/postgresql/13/recovery.conf", use_sudo=True) + sudo("chown -R postgres.postgres /var/lib/postgresql/13/recovery.conf") + + sudo("/etc/init.d/postgresql stop") + sudo("/etc/init.d/postgresql start") - sudo('/etc/init.d/postgresql stop') - sudo('/etc/init.d/postgresql start') def config_postgres(standby=False): - put('config/postgresql-13.conf', '/etc/postgresql/13/main/postgresql.conf', use_sudo=True) - put('config/postgres_hba.conf', '/etc/postgresql/13/main/pg_hba.conf', use_sudo=True) - sudo('chown postgres.postgres /etc/postgresql/13/main/postgresql.conf') + put("config/postgresql-13.conf", "/etc/postgresql/13/main/postgresql.conf", use_sudo=True) + put("config/postgres_hba.conf", "/etc/postgresql/13/main/pg_hba.conf", use_sudo=True) + sudo("chown postgres.postgres /etc/postgresql/13/main/postgresql.conf") run('echo "ulimit -n 100000" > postgresql.defaults') - sudo('mv postgresql.defaults /etc/default/postgresql') - - sudo('/etc/init.d/postgresql reload 13') + sudo("mv postgresql.defaults /etc/default/postgresql") + + sudo("/etc/init.d/postgresql reload 13") + def upgrade_postgres(): - sudo('su postgres -c "/usr/lib/postgresql/10/bin/pg_upgrade -b /usr/lib/postgresql/9.4/bin -B /usr/lib/postgresql/10/bin -d /var/lib/postgresql/9.4/main -D /var/lib/postgresql/10/main"') - -def copy_postgres_to_standby(master='db01'): + sudo( + 'su postgres -c "/usr/lib/postgresql/10/bin/pg_upgrade -b /usr/lib/postgresql/9.4/bin -B /usr/lib/postgresql/10/bin -d /var/lib/postgresql/9.4/main -D /var/lib/postgresql/10/main"' + ) + + +def copy_postgres_to_standby(master="db01"): # http://www.rassoc.com/gregr/weblog/2013/02/16/zero-to-postgresql-streaming-replication-in-10-mins/ - + # Make sure you can ssh from master to slave and back with the postgres user account. # Need to give postgres accounts keys in authroized_keys. @@ -1240,259 +1394,319 @@ def copy_postgres_to_standby(master='db01'): # new: sudo su postgres; ssh db_pgsql # old: sudo su postgres; ssh new # old: sudo su postgres -c "psql -c \"SELECT pg_start_backup('label', true)\"" - sudo('systemctl stop postgresql') - sudo('mkdir -p /var/lib/postgresql/9.4/archive') - sudo('chown postgres.postgres /var/lib/postgresql/9.4/archive') + sudo("systemctl stop postgresql") + sudo("mkdir -p /var/lib/postgresql/9.4/archive") + sudo("chown postgres.postgres /var/lib/postgresql/9.4/archive") with settings(warn_only=True): - sudo('su postgres -c "rsync -Pav -e \'ssh -i ~postgres/.ssh/newsblur.key\' --stats --progress postgres@%s:/var/lib/postgresql/9.4/main /var/lib/postgresql/9.4/ --exclude postmaster.pid"' % master) - put('config/postgresql_recovery.conf', '/var/lib/postgresql/9.4/main/recovery.conf', use_sudo=True) - sudo('systemctl start postgresql') + sudo( + "su postgres -c \"rsync -Pav -e 'ssh -i ~postgres/.ssh/newsblur.key' --stats --progress postgres@%s:/var/lib/postgresql/9.4/main /var/lib/postgresql/9.4/ --exclude postmaster.pid\"" + % master + ) + put("config/postgresql_recovery.conf", "/var/lib/postgresql/9.4/main/recovery.conf", use_sudo=True) + sudo("systemctl start postgresql") # old: sudo su postgres -c "psql -c \"SELECT pg_stop_backup()\"" - + # Don't forget to add 'setup_postgres_backups' to new - + def disable_thp(): - put('config/disable_transparent_hugepages.sh', '/etc/init.d/disable-transparent-hugepages', use_sudo=True) - sudo('chmod 755 /etc/init.d/disable-transparent-hugepages') - sudo('update-rc.d disable-transparent-hugepages defaults') - + put("config/disable_transparent_hugepages.sh", "/etc/init.d/disable-transparent-hugepages", use_sudo=True) + sudo("chmod 755 /etc/init.d/disable-transparent-hugepages") + sudo("update-rc.d disable-transparent-hugepages defaults") + + def setup_mongo(): MONGODB_VERSION = "3.4.24" pull() disable_thp() - sudo('systemctl enable rc-local.service') # Enable rc.local - sudo('echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ + sudo("systemctl enable rc-local.service") # Enable rc.local + sudo( + 'echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/enabled\n\ fi\n\ if test -f /sys/kernel/mm/transparent_hugepage/defrag; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/defrag\n\ fi\n\n\ - exit 0" | sudo tee /etc/rc.local') - sudo('curl -fsSL https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add -') + exit 0" | sudo tee /etc/rc.local' + ) + sudo("curl -fsSL https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add -") # sudo('echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list') # sudo('echo "\ndeb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" | sudo tee -a /etc/apt/sources.list') # sudo('echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list') - sudo('echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list') - sudo('apt-get update') - sudo('apt-get install -y mongodb-org=%s mongodb-org-server=%s mongodb-org-shell=%s mongodb-org-mongos=%s mongodb-org-tools=%s' % - (MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION)) - put('config/mongodb.%s.conf' % ('prod' if env.user != 'ubuntu' else 'ec2'), - '/etc/mongodb.conf', use_sudo=True) - put('config/mongodb.service', '/etc/systemd/system/mongodb.service', use_sudo=True) + sudo( + 'echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list' + ) + sudo("apt-get update") + sudo( + "apt-get install -y mongodb-org=%s mongodb-org-server=%s mongodb-org-shell=%s mongodb-org-mongos=%s mongodb-org-tools=%s" + % (MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION) + ) + put( + "config/mongodb.%s.conf" % ("prod" if env.user != "ubuntu" else "ec2"), + "/etc/mongodb.conf", + use_sudo=True, + ) + put("config/mongodb.service", "/etc/systemd/system/mongodb.service", use_sudo=True) run('echo "ulimit -n 100000" > mongodb.defaults') - sudo('mv mongodb.defaults /etc/default/mongod') - sudo('mkdir -p /var/log/mongodb') - sudo('chown mongodb /var/log/mongodb') - put('config/logrotate.mongo.conf', '/etc/logrotate.d/mongod', use_sudo=True) - sudo('systemctl enable mongodb') - + sudo("mv mongodb.defaults /etc/default/mongod") + sudo("mkdir -p /var/log/mongodb") + sudo("chown mongodb /var/log/mongodb") + put("config/logrotate.mongo.conf", "/etc/logrotate.d/mongod", use_sudo=True) + sudo("systemctl enable mongodb") + # Reclaim 5% disk space used for root logs. Set to 1%. with settings(warn_only=True): - sudo('tune2fs -m 1 /dev/vda1') + sudo("tune2fs -m 1 /dev/vda1") + def setup_mongo_configsvr(): - sudo('mkdir -p /var/lib/mongodb_configsvr') - sudo('chown mongodb.mongodb /var/lib/mongodb_configsvr') - put('config/mongodb.configsvr.conf', '/etc/mongodb.configsvr.conf', use_sudo=True) - put('config/mongodb.configsvr-init', '/etc/init.d/mongodb-configsvr', use_sudo=True) - sudo('chmod u+x /etc/init.d/mongodb-configsvr') + sudo("mkdir -p /var/lib/mongodb_configsvr") + sudo("chown mongodb.mongodb /var/lib/mongodb_configsvr") + put("config/mongodb.configsvr.conf", "/etc/mongodb.configsvr.conf", use_sudo=True) + put("config/mongodb.configsvr-init", "/etc/init.d/mongodb-configsvr", use_sudo=True) + sudo("chmod u+x /etc/init.d/mongodb-configsvr") run('echo "ulimit -n 100000" > mongodb_configsvr.defaults') - sudo('mv mongodb_configsvr.defaults /etc/default/mongodb_configsvr') - sudo('update-rc.d -f mongodb-configsvr defaults') - sudo('/etc/init.d/mongodb-configsvr start') + sudo("mv mongodb_configsvr.defaults /etc/default/mongodb_configsvr") + sudo("update-rc.d -f mongodb-configsvr defaults") + sudo("/etc/init.d/mongodb-configsvr start") + def setup_mongo_mongos(): - put('config/mongodb.mongos.conf', '/etc/mongodb.mongos.conf', use_sudo=True) - put('config/mongodb.mongos-init', '/etc/init.d/mongodb-mongos', use_sudo=True) - sudo('chmod u+x /etc/init.d/mongodb-mongos') + put("config/mongodb.mongos.conf", "/etc/mongodb.mongos.conf", use_sudo=True) + put("config/mongodb.mongos-init", "/etc/init.d/mongodb-mongos", use_sudo=True) + sudo("chmod u+x /etc/init.d/mongodb-mongos") run('echo "ulimit -n 100000" > mongodb_mongos.defaults') - sudo('mv mongodb_mongos.defaults /etc/default/mongodb_mongos') - sudo('update-rc.d -f mongodb-mongos defaults') - sudo('/etc/init.d/mongodb-mongos restart') + sudo("mv mongodb_mongos.defaults /etc/default/mongodb_mongos") + sudo("update-rc.d -f mongodb-mongos defaults") + sudo("/etc/init.d/mongodb-mongos restart") + def setup_mongo_mms(): pull() - sudo('rm -f /etc/supervisor/conf.d/mongomms.conf') - sudo('supervisorctl reread') - sudo('supervisorctl update') + sudo("rm -f /etc/supervisor/conf.d/mongomms.conf") + sudo("supervisorctl reread") + sudo("supervisorctl update") with cd(env.VENDOR_PATH): - sudo('apt-get remove -y mongodb-mms-monitoring-agent') - run('curl -OL https://mms.mongodb.com/download/agent/monitoring/mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - sudo('dpkg -i mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - run('rm mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - put(os.path.join(env.SECRETS_PATH, 'settings/mongo_mms_config.txt'), - 'mongo_mms_config.txt') - sudo("echo \"\n\" | sudo tee -a /etc/mongodb-mms/monitoring-agent.config") - sudo('cat mongo_mms_config.txt | sudo tee -a /etc/mongodb-mms/monitoring-agent.config') - sudo('start mongodb-mms-monitoring-agent') + sudo("apt-get remove -y mongodb-mms-monitoring-agent") + run( + "curl -OL https://mms.mongodb.com/download/agent/monitoring/mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb" + ) + sudo("dpkg -i mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb") + run("rm mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb") + put(os.path.join(env.SECRETS_PATH, "settings/mongo_mms_config.txt"), "mongo_mms_config.txt") + sudo('echo "\n" | sudo tee -a /etc/mongodb-mms/monitoring-agent.config') + sudo("cat mongo_mms_config.txt | sudo tee -a /etc/mongodb-mms/monitoring-agent.config") + sudo("start mongodb-mms-monitoring-agent") + def setup_redis(slave=False): - redis_version = '3.2.6' + redis_version = "3.2.6" with cd(env.VENDOR_PATH): - run('wget http://download.redis.io/releases/redis-%s.tar.gz' % redis_version) - run('tar -xzf redis-%s.tar.gz' % redis_version) - run('rm redis-%s.tar.gz' % redis_version) - with cd(os.path.join(env.VENDOR_PATH, 'redis-%s' % redis_version)): - sudo('make install') - put('config/redis-init', '/etc/init.d/redis', use_sudo=True) - sudo('chmod u+x /etc/init.d/redis') - put('config/redis.conf', '/etc/redis.conf', use_sudo=True) + run("wget http://download.redis.io/releases/redis-%s.tar.gz" % redis_version) + run("tar -xzf redis-%s.tar.gz" % redis_version) + run("rm redis-%s.tar.gz" % redis_version) + with cd(os.path.join(env.VENDOR_PATH, "redis-%s" % redis_version)): + sudo("make install") + put("config/redis-init", "/etc/init.d/redis", use_sudo=True) + sudo("chmod u+x /etc/init.d/redis") + put("config/redis.conf", "/etc/redis.conf", use_sudo=True) if slave: - put('config/redis_slave.conf', '/etc/redis_server.conf', use_sudo=True) + put("config/redis_slave.conf", "/etc/redis_server.conf", use_sudo=True) else: - put('config/redis_master.conf', '/etc/redis_server.conf', use_sudo=True) + put("config/redis_master.conf", "/etc/redis_server.conf", use_sudo=True) # sudo('chmod 666 /proc/sys/vm/overcommit_memory', pty=False) # run('echo "1" > /proc/sys/vm/overcommit_memory', pty=False) # sudo('chmod 644 /proc/sys/vm/overcommit_memory', pty=False) disable_thp() - sudo('systemctl enable rc-local.service') # Enable rc.local - sudo('echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ + sudo("systemctl enable rc-local.service") # Enable rc.local + sudo( + 'echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/enabled\n\ fi\n\ if test -f /sys/kernel/mm/transparent_hugepage/defrag; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/defrag\n\ fi\n\n\ - exit 0" | sudo tee /etc/rc.local') + exit 0" | sudo tee /etc/rc.local' + ) sudo("echo 1 | sudo tee /proc/sys/vm/overcommit_memory") sudo('echo "vm.overcommit_memory = 1" | sudo tee -a /etc/sysctl.conf') sudo("sysctl vm.overcommit_memory=1") - put('config/redis_rclocal.txt', '/etc/rc.local', use_sudo=True) + put("config/redis_rclocal.txt", "/etc/rc.local", use_sudo=True) sudo("chown root.root /etc/rc.local") sudo("chmod a+x /etc/rc.local") sudo('echo "never" | sudo tee /sys/kernel/mm/transparent_hugepage/enabled') run('echo "\nnet.core.somaxconn=65535\n" | sudo tee -a /etc/sysctl.conf', pty=False) - sudo('mkdir -p /var/lib/redis') - sudo('update-rc.d redis defaults') - sudo('/etc/init.d/redis stop') - sudo('/etc/init.d/redis start') + sudo("mkdir -p /var/lib/redis") + sudo("update-rc.d redis defaults") + sudo("/etc/init.d/redis stop") + sudo("/etc/init.d/redis start") setup_syncookies() config_monit_redis() - + + def setup_munin(): - sudo('apt-get update') - sudo('apt-get install -y munin munin-node munin-plugins-extra spawn-fcgi') - put('config/munin.conf', '/etc/munin/munin.conf', use_sudo=True) # Only use on main munin - put('config/spawn_fcgi_munin_graph.conf', '/etc/init.d/spawn_fcgi_munin_graph', use_sudo=True) - put('config/spawn_fcgi_munin_html.conf', '/etc/init.d/spawn_fcgi_munin_html', use_sudo=True) - sudo('chmod u+x /etc/init.d/spawn_fcgi_munin_graph') - sudo('chmod u+x /etc/init.d/spawn_fcgi_munin_html') + sudo("apt-get update") + sudo("apt-get install -y munin munin-node munin-plugins-extra spawn-fcgi") + put("config/munin.conf", "/etc/munin/munin.conf", use_sudo=True) # Only use on main munin + put("config/spawn_fcgi_munin_graph.conf", "/etc/init.d/spawn_fcgi_munin_graph", use_sudo=True) + put("config/spawn_fcgi_munin_html.conf", "/etc/init.d/spawn_fcgi_munin_html", use_sudo=True) + sudo("chmod u+x /etc/init.d/spawn_fcgi_munin_graph") + sudo("chmod u+x /etc/init.d/spawn_fcgi_munin_html") with settings(warn_only=True): - sudo('chown nginx.www-data /var/log/munin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*') + sudo("chown nginx.www-data /var/log/munin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*") with settings(warn_only=True): - sudo('/etc/init.d/spawn_fcgi_munin_graph stop') - sudo('/etc/init.d/spawn_fcgi_munin_graph start') - sudo('update-rc.d spawn_fcgi_munin_graph defaults') - sudo('/etc/init.d/spawn_fcgi_munin_html stop') - sudo('/etc/init.d/spawn_fcgi_munin_html start') - sudo('update-rc.d spawn_fcgi_munin_html defaults') - sudo('/etc/init.d/munin-node stop') + sudo("/etc/init.d/spawn_fcgi_munin_graph stop") + sudo("/etc/init.d/spawn_fcgi_munin_graph start") + sudo("update-rc.d spawn_fcgi_munin_graph defaults") + sudo("/etc/init.d/spawn_fcgi_munin_html stop") + sudo("/etc/init.d/spawn_fcgi_munin_html start") + sudo("update-rc.d spawn_fcgi_munin_html defaults") + sudo("/etc/init.d/munin-node stop") time.sleep(2) - sudo('/etc/init.d/munin-node start') + sudo("/etc/init.d/munin-node start") with settings(warn_only=True): - sudo('chown nginx.www-data /var/log/munin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*') - sudo('chmod a+rw /var/log/munin/*') + sudo("chown nginx.www-data /var/log/munin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*") + sudo("chmod a+rw /var/log/munin/*") with settings(warn_only=True): - sudo('/etc/init.d/spawn_fcgi_munin_graph start') - sudo('/etc/init.d/spawn_fcgi_munin_html start') + sudo("/etc/init.d/spawn_fcgi_munin_graph start") + sudo("/etc/init.d/spawn_fcgi_munin_html start") + def copy_munin_data(from_server): - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key'), '~/.ssh/newsblur.key') - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key.pub'), '~/.ssh/newsblur.key.pub') - run('chmod 600 ~/.ssh/newsblur*') + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key"), "~/.ssh/newsblur.key") + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key.pub"), "~/.ssh/newsblur.key.pub") + run("chmod 600 ~/.ssh/newsblur*") # put("config/munin.nginx.conf", "/usr/local/nginx/conf/sites-enabled/munin.conf", use_sudo=True) - sudo('/etc/init.d/nginx reload') + sudo("/etc/init.d/nginx reload") - run("rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/var/lib/munin/ /srv/munin" % from_server) - sudo('rm -fr /var/lib/bak-munin') + run( + 'rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/var/lib/munin/ /srv/munin' + % from_server + ) + sudo("rm -fr /var/lib/bak-munin") sudo("mv /var/lib/munin /var/lib/bak-munin") sudo("mv /srv/munin /var/lib/") sudo("chown munin.munin -R /var/lib/munin") - run("sudo rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/etc/munin/ /srv/munin-etc" % from_server) - sudo('rm -fr /etc/munin') + run( + 'sudo rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/etc/munin/ /srv/munin-etc' + % from_server + ) + sudo("rm -fr /etc/munin") sudo("mv /srv/munin-etc /etc/munin") sudo("chown munin.munin -R /etc/munin") - run("sudo rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/var/cache/munin/www/ /srv/munin-www" % from_server) - sudo('rm -fr /var/cache/munin/www') + run( + 'sudo rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/var/cache/munin/www/ /srv/munin-www' + % from_server + ) + sudo("rm -fr /var/cache/munin/www") sudo("mv /srv/munin-www /var/cache/munin/www") sudo("chown munin.munin -R /var/cache/munin/www") sudo("/etc/init.d/munin restart") sudo("/etc/init.d/munin-node restart") - + def setup_db_munin(): - sudo('rm -f /etc/munin/plugins/mongo*') - sudo('rm -f /etc/munin/plugins/pg_*') - sudo('rm -f /etc/munin/plugins/redis_*') - sudo('cp -frs %s/config/munin/mongo* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('cp -frs %s/config/munin/pg_* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('cp -frs %s/config/munin/redis_* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('/etc/init.d/munin-node stop') + sudo("rm -f /etc/munin/plugins/mongo*") + sudo("rm -f /etc/munin/plugins/pg_*") + sudo("rm -f /etc/munin/plugins/redis_*") + sudo("cp -frs %s/config/munin/mongo* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("cp -frs %s/config/munin/pg_* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("cp -frs %s/config/munin/redis_* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("/etc/init.d/munin-node stop") time.sleep(2) - sudo('/etc/init.d/munin-node start') + sudo("/etc/init.d/munin-node start") def enable_celerybeat(): with virtualenv(): - run('mkdir -p data') - put('config/supervisor_celerybeat.conf', '/etc/supervisor/conf.d/celerybeat.conf', use_sudo=True) - put('config/supervisor_celeryd_work_queue.conf', '/etc/supervisor/conf.d/celeryd_work_queue.conf', use_sudo=True) - put('config/supervisor_celeryd_beat.conf', '/etc/supervisor/conf.d/celeryd_beat.conf', use_sudo=True) - put('config/supervisor_celeryd_beat_feeds.conf', '/etc/supervisor/conf.d/celeryd_beat_feeds.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') + run("mkdir -p data") + put("config/supervisor_celerybeat.conf", "/etc/supervisor/conf.d/celerybeat.conf", use_sudo=True) + put( + "config/supervisor_celeryd_work_queue.conf", + "/etc/supervisor/conf.d/celeryd_work_queue.conf", + use_sudo=True, + ) + put("config/supervisor_celeryd_beat.conf", "/etc/supervisor/conf.d/celeryd_beat.conf", use_sudo=True) + put( + "config/supervisor_celeryd_beat_feeds.conf", + "/etc/supervisor/conf.d/celeryd_beat_feeds.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + def setup_db_mdadm(): - sudo('apt-get -y install xfsprogs mdadm') - sudo('yes | mdadm --create /dev/md0 --level=0 -c256 --raid-devices=4 /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi') - sudo('mkfs.xfs /dev/md0') - sudo('mkdir -p /srv/db') - sudo('mount -t xfs -o rw,nobarrier,noatime,nodiratime /dev/md0 /srv/db') - sudo('mkdir -p /srv/db/mongodb') - sudo('chown mongodb.mongodb /srv/db/mongodb') + sudo("apt-get -y install xfsprogs mdadm") + sudo( + "yes | mdadm --create /dev/md0 --level=0 -c256 --raid-devices=4 /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi" + ) + sudo("mkfs.xfs /dev/md0") + sudo("mkdir -p /srv/db") + sudo("mount -t xfs -o rw,nobarrier,noatime,nodiratime /dev/md0 /srv/db") + sudo("mkdir -p /srv/db/mongodb") + sudo("chown mongodb.mongodb /srv/db/mongodb") sudo("echo 'DEVICE /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi' | sudo tee -a /etc/mdadm/mdadm.conf") sudo("mdadm --examine --scan | sudo tee -a /etc/mdadm/mdadm.conf") - sudo("echo '/dev/md0 /srv/db xfs rw,nobarrier,noatime,nodiratime,noauto 0 0' | sudo tee -a /etc/fstab") + sudo( + "echo '/dev/md0 /srv/db xfs rw,nobarrier,noatime,nodiratime,noauto 0 0' | sudo tee -a /etc/fstab" + ) sudo("sudo update-initramfs -u -v -k `uname -r`") + def setup_original_page_server(): setup_node_app() - sudo('mkdir -p /srv/originals') - sudo('chown %s.%s -R /srv/originals' % (env.user, env.user)) # We assume that the group is the same name as the user. It's common on linux + sudo("mkdir -p /srv/originals") + sudo( + "chown %s.%s -R /srv/originals" % (env.user, env.user) + ) # We assume that the group is the same name as the user. It's common on linux config_monit_original() - put('config/supervisor_node_original.conf', - '/etc/supervisor/conf.d/node_original.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl reload') + put("config/supervisor_node_original.conf", "/etc/supervisor/conf.d/node_original.conf", use_sudo=True) + sudo("supervisorctl reread") + sudo("supervisorctl reload") + def setup_elasticsearch(): ES_VERSION = "2.4.4" - sudo('add-apt-repository -y ppa:openjdk-r/ppa') - sudo('apt-get update') - sudo('apt-get install openjdk-7-jre -y') + sudo("add-apt-repository -y ppa:openjdk-r/ppa") + sudo("apt-get update") + sudo("apt-get install openjdk-7-jre -y") with cd(env.VENDOR_PATH): - run('mkdir -p elasticsearch-%s' % ES_VERSION) - with cd(os.path.join(env.VENDOR_PATH, 'elasticsearch-%s' % ES_VERSION)): + run("mkdir -p elasticsearch-%s" % ES_VERSION) + with cd(os.path.join(env.VENDOR_PATH, "elasticsearch-%s" % ES_VERSION)): # run('wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-%s.deb' % ES_VERSION) # For v5+ - run('wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-%s.deb' % ES_VERSION) # For v1-v2 - sudo('dpkg -i elasticsearch-%s.deb' % ES_VERSION) - if not files.exists('/usr/share/elasticsearch/plugins/head'): - sudo('/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head') + run( + "wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-%s.deb" + % ES_VERSION + ) # For v1-v2 + sudo("dpkg -i elasticsearch-%s.deb" % ES_VERSION) + if not files.exists("/usr/share/elasticsearch/plugins/head"): + sudo("/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head") + def setup_db_search(): - put('config/supervisor_celeryd_search_indexer.conf', '/etc/supervisor/conf.d/celeryd_search_indexer.conf', use_sudo=True) - put('config/supervisor_celeryd_search_indexer_tasker.conf', '/etc/supervisor/conf.d/celeryd_search_indexer_tasker.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') + put( + "config/supervisor_celeryd_search_indexer.conf", + "/etc/supervisor/conf.d/celeryd_search_indexer.conf", + use_sudo=True, + ) + put( + "config/supervisor_celeryd_search_indexer_tasker.conf", + "/etc/supervisor/conf.d/celeryd_search_indexer_tasker.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + def setup_imageproxy(install_go=False): # sudo('apt-get update') @@ -1500,86 +1714,105 @@ def setup_imageproxy(install_go=False): if install_go: with cd(env.VENDOR_PATH): with settings(warn_only=True): - run('git clone https://github.com/willnorris/imageproxy.git') - run('wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz') - run('tar -xzf go1.13.3.linux-amd64.tar.gz') - run('rm go1.13.3.linux-amd64.tar.gz') - sudo('rm /usr/bin/go') - sudo('ln -s /srv/code/go/bin/go /usr/bin/go') - with cd(os.path.join(env.VENDOR_PATH, 'imageproxy')): - run('go get willnorris.com/go/imageproxy/cmd/imageproxy') - put(os.path.join(env.SECRETS_PATH, 'settings/imageproxy.key'), - '/etc/imageproxy.key', use_sudo=True) - put(os.path.join(env.NEWSBLUR_PATH, 'config/supervisor_imageproxy.conf'), '/etc/supervisor/conf.d/supervisor_imageproxy.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') - sudo('ufw allow 443') - sudo('ufw allow 80') - put(os.path.join(env.NEWSBLUR_PATH, 'config/nginx.imageproxy.conf'), "/usr/local/nginx/conf/sites-enabled/imageproxy.conf", use_sudo=True) + run("git clone https://github.com/willnorris/imageproxy.git") + run("wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz") + run("tar -xzf go1.13.3.linux-amd64.tar.gz") + run("rm go1.13.3.linux-amd64.tar.gz") + sudo("rm /usr/bin/go") + sudo("ln -s /srv/code/go/bin/go /usr/bin/go") + with cd(os.path.join(env.VENDOR_PATH, "imageproxy")): + run("go get willnorris.com/go/imageproxy/cmd/imageproxy") + put(os.path.join(env.SECRETS_PATH, "settings/imageproxy.key"), "/etc/imageproxy.key", use_sudo=True) + put( + os.path.join(env.NEWSBLUR_PATH, "config/supervisor_imageproxy.conf"), + "/etc/supervisor/conf.d/supervisor_imageproxy.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + sudo("ufw allow 443") + sudo("ufw allow 80") + put( + os.path.join(env.NEWSBLUR_PATH, "config/nginx.imageproxy.conf"), + "/usr/local/nginx/conf/sites-enabled/imageproxy.conf", + use_sudo=True, + ) sudo("/etc/init.d/nginx restart") - - - + + @parallel def setup_usage_monitor(): - sudo('ln -fs %s/utils/monitor_disk_usage.py /etc/cron.daily/monitor_disk_usage' % env.NEWSBLUR_PATH) - sudo('/etc/cron.daily/monitor_disk_usage') - + sudo("ln -fs %s/utils/monitor_disk_usage.py /etc/cron.daily/monitor_disk_usage" % env.NEWSBLUR_PATH) + sudo("/etc/cron.daily/monitor_disk_usage") + + @parallel def setup_feeds_fetched_monitor(): - sudo('ln -fs %s/utils/monitor_task_fetches.py /etc/cron.hourly/monitor_task_fetches' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_task_fetches') - + sudo("ln -fs %s/utils/monitor_task_fetches.py /etc/cron.hourly/monitor_task_fetches" % env.NEWSBLUR_PATH) + sudo("/etc/cron.hourly/monitor_task_fetches") + + @parallel def setup_newsletter_monitor(): - sudo('ln -fs %s/utils/monitor_newsletter_delivery.py /etc/cron.hourly/monitor_newsletter_delivery' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_newsletter_delivery') - + sudo( + "ln -fs %s/utils/monitor_newsletter_delivery.py /etc/cron.hourly/monitor_newsletter_delivery" + % env.NEWSBLUR_PATH + ) + sudo("/etc/cron.hourly/monitor_newsletter_delivery") + + @parallel def setup_queue_monitor(): - sudo('ln -fs %s/utils/monitor_work_queue.py /etc/cron.hourly/monitor_work_queue' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_work_queue') - + sudo("ln -fs %s/utils/monitor_work_queue.py /etc/cron.hourly/monitor_work_queue" % env.NEWSBLUR_PATH) + sudo("/etc/cron.hourly/monitor_work_queue") + + @parallel def setup_redis_monitor(): - run('sleep 5') # Wait for redis to startup so the log file is there - sudo('ln -fs %s/utils/monitor_redis_bgsave.py /etc/cron.daily/monitor_redis_bgsave' % env.NEWSBLUR_PATH) + run("sleep 5") # Wait for redis to startup so the log file is there + sudo("ln -fs %s/utils/monitor_redis_bgsave.py /etc/cron.daily/monitor_redis_bgsave" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('/etc/cron.daily/monitor_redis_bgsave') - + sudo("/etc/cron.daily/monitor_redis_bgsave") + + # ================ # = Setup - Task = # ================ -def setup_task_firewall(): - sudo('ufw default deny') - sudo('ufw allow ssh') - sudo('ufw allow 80') - sudo('ufw --force enable') -def setup_motd(role='app'): - motd = '/etc/update-motd.d/22-newsblur-motd' - put('config/motd_%s.txt' % role, motd, use_sudo=True) - sudo('chown root.root %s' % motd) - sudo('chmod a+x %s' % motd) +def setup_task_firewall(): + sudo("ufw default deny") + sudo("ufw allow ssh") + sudo("ufw allow 80") + sudo("ufw --force enable") + + +def setup_motd(role="app"): + motd = "/etc/update-motd.d/22-newsblur-motd" + put("config/motd_%s.txt" % role, motd, use_sudo=True) + sudo("chown root.root %s" % motd) + sudo("chmod a+x %s" % motd) + def enable_celery_supervisor(queue=None, update=True): if not queue: - put('config/supervisor_celeryd.conf', '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True) + put("config/supervisor_celeryd.conf", "/etc/supervisor/conf.d/celeryd.conf", use_sudo=True) else: - put('config/supervisor_celeryd_%s.conf' % queue, '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True) + put("config/supervisor_celeryd_%s.conf" % queue, "/etc/supervisor/conf.d/celeryd.conf", use_sudo=True) - sudo('supervisorctl reread') + sudo("supervisorctl reread") if update: - sudo('supervisorctl update') + sudo("supervisorctl update") + @parallel def copy_db_settings(): return copy_task_settings() - + + @parallel def copy_task_settings(): - server_hostname = run('hostname') + server_hostname = run("hostname") # if any([(n in server_hostname) for n in ['task', 'db', 'search', 'node', 'push']]): host = server_hostname # elif env.host: @@ -1588,31 +1821,38 @@ def copy_task_settings(): # host = env.host_string.split('.', 2)[0] with settings(warn_only=True): - run('rm -f %s/local_settings.py' % env.NEWSBLUR_PATH) - put(os.path.join(env.SECRETS_PATH, 'settings/task_settings.py'), - '%s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) - run('echo "\nSERVER_NAME = \\\\"%s\\\\"" >> %s/newsblur/local_settings.py' % (host, env.NEWSBLUR_PATH)) + run("rm -f %s/local_settings.py" % env.NEWSBLUR_PATH) + put( + os.path.join(env.SECRETS_PATH, "settings/task_settings.py"), + "%s/newsblur/local_settings.py" % env.NEWSBLUR_PATH, + ) + run( + 'echo "\nSERVER_NAME = \\\\"%s\\\\"" >> %s/newsblur/local_settings.py' % (host, env.NEWSBLUR_PATH) + ) + @parallel def copy_spam(): - put(os.path.join(env.SECRETS_PATH, 'spam/spam.py'), '%s/apps/social/spam.py' % env.NEWSBLUR_PATH) - + put(os.path.join(env.SECRETS_PATH, "spam/spam.py"), "%s/apps/social/spam.py" % env.NEWSBLUR_PATH) + + # ========================= # = Setup - Digital Ocean = # ========================= DO_SIZES = { - '1': 's-1vcpu-1gb', - '2': 's-1vcpu-2gb', - '4': 's-2vcpu-4gb', - '8': 's-4vcpu-8gb', - '16': 's-6vcpu-16gb', - '32': 's-8vcpu-32gb', - '48': 's-12vcpu-48gb', - '64': 's-16vcpu-64gb', - '32c': 'c-16', + "1": "s-1vcpu-1gb", + "2": "s-1vcpu-2gb", + "4": "s-2vcpu-4gb", + "8": "s-4vcpu-8gb", + "16": "s-6vcpu-16gb", + "32": "s-8vcpu-32gb", + "48": "s-12vcpu-48gb", + "64": "s-16vcpu-64gb", + "32c": "c-16", } + def setup_do(name, size=1, image=None): instance_size = DO_SIZES[str(size)] doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) @@ -1623,25 +1863,27 @@ def setup_do(name, size=1, image=None): image = "ubuntu-20-04-x64" else: images = dict((s.name, s.id) for s in doapi.get_all_images()) - if image == "task": + if image == "task": image = images["task-2018-02"] elif image == "app": image = images["app-2018-02"] else: images = dict((s.name, s.id) for s in doapi.get_all_images()) print(images) - + name = do_name(name) env.doname = name print("Creating droplet: %s" % name) - instance = digitalocean.Droplet(token=django_settings.DO_TOKEN_FABRIC, - name=name, - size_slug=instance_size, - image=image, - region='nyc1', - monitoring=True, - private_networking=True, - ssh_keys=ssh_key_ids) + instance = digitalocean.Droplet( + token=django_settings.DO_TOKEN_FABRIC, + name=name, + size_slug=instance_size, + image=image, + region="nyc1", + monitoring=True, + private_networking=True, + ssh_keys=ssh_key_ids, + ) instance.create() time.sleep(2) instance = digitalocean.Droplet.get_object(django_settings.DO_TOKEN_FABRIC, instance.id) @@ -1649,12 +1891,12 @@ def setup_do(name, size=1, image=None): i = 0 while True: - if instance.status == 'active': + if instance.status == "active": print("...booted: %s" % instance.ip_address) time.sleep(5) break - elif instance.status == 'new': - print(".", end=' ') + elif instance.status == "new": + print(".", end=" ") sys.stdout.flush() instance = digitalocean.Droplet.get_object(django_settings.DO_TOKEN_FABRIC, instance.id) i += 1 @@ -1669,6 +1911,7 @@ def setup_do(name, size=1, image=None): add_user_to_do() assign_digitalocean_roledefs() + def do_name(name): if re.search(r"[0-9]", name): print(" ---> Using %s as hostname" % name) @@ -1680,48 +1923,52 @@ def do_name(name): for i in range(1, 100): try_host = "%s%02d" % (name, i) if try_host not in existing_hosts: - print(" ---> %s hosts in %s (%s). %s is unused." % (len(existing_hosts), name, - ', '.join(existing_hosts), try_host)) + print( + " ---> %s hosts in %s (%s). %s is unused." + % (len(existing_hosts), name, ", ".join(existing_hosts), try_host) + ) return try_host - - + + def add_user_to_do(): env.user = "root" repo_user = "sclay" with settings(warn_only=True): - run('useradd -m %s' % (repo_user)) + run("useradd -m %s" % (repo_user)) setup_sudoers("%s" % (repo_user)) - run('mkdir -p ~%s/.ssh && chmod 700 ~%s/.ssh' % (repo_user, repo_user)) - run('rm -fr ~%s/.ssh/id_dsa*' % (repo_user)) + run("mkdir -p ~%s/.ssh && chmod 700 ~%s/.ssh" % (repo_user, repo_user)) + run("rm -fr ~%s/.ssh/id_dsa*" % (repo_user)) run('ssh-keygen -t dsa -f ~%s/.ssh/id_dsa -N ""' % (repo_user)) - run('touch ~%s/.ssh/authorized_keys' % (repo_user)) + run("touch ~%s/.ssh/authorized_keys" % (repo_user)) copy_ssh_keys() - run('chown %s.%s -R ~%s/.ssh' % (repo_user, repo_user, repo_user)) + run("chown %s.%s -R ~%s/.ssh" % (repo_user, repo_user, repo_user)) env.user = repo_user + # =============== # = Setup - EC2 = # =============== + def setup_ec2(): - AMI_NAME = 'ami-834cf1ea' # Ubuntu 64-bit 12.04 LTS + AMI_NAME = "ami-834cf1ea" # Ubuntu 64-bit 12.04 LTS # INSTANCE_TYPE = 'c1.medium' - INSTANCE_TYPE = 'c1.medium' + INSTANCE_TYPE = "c1.medium" conn = EC2Connection(django_settings.AWS_ACCESS_KEY_ID, django_settings.AWS_SECRET_ACCESS_KEY) - reservation = conn.run_instances(AMI_NAME, instance_type=INSTANCE_TYPE, - key_name=env.user, - security_groups=['db-mongo']) + reservation = conn.run_instances( + AMI_NAME, instance_type=INSTANCE_TYPE, key_name=env.user, security_groups=["db-mongo"] + ) instance = reservation.instances[0] print("Booting reservation: %s/%s (size: %s)" % (reservation, instance, INSTANCE_TYPE)) i = 0 while True: - if instance.state == 'pending': - print(".", end=' ') + if instance.state == "pending": + print(".", end=" ") sys.stdout.flush() instance.update() i += 1 time.sleep(i) - elif instance.state == 'running': + elif instance.state == "running": print("...booted: %s" % instance.public_dns_name) time.sleep(5) break @@ -1732,213 +1979,246 @@ def setup_ec2(): host = instance.public_dns_name env.host_string = host + # ========== # = Deploy = # ========== + @parallel def pull(master=False): with virtualenv(): - run('git pull') + run("git pull") if master: - run('git checkout master') - run('git pull') + run("git checkout master") + run("git pull") + def pre_deploy(): compress_assets(bundle=True) + @serial def post_deploy(): cleanup_assets() + def role_for_host(): for role, hosts in list(env.roledefs.items()): if env.host in hosts: return role + @parallel def deploy(fast=False, reload=False): role = role_for_host() - if role in ['work', 'search', 'debug']: + if role in ["work", "search", "debug"]: deploy_code(copy_assets=False, fast=fast, reload=True) else: deploy_code(copy_assets=False, fast=fast, reload=reload) + @parallel def deploy_web(fast=False): role = role_for_host() - if role in ['work', 'search']: + if role in ["work", "search"]: deploy_code(copy_assets=True, fast=fast, reload=True) else: deploy_code(copy_assets=True, fast=fast) + @parallel def deploy_rebuild(fast=False): deploy_code(copy_assets=True, fast=fast, rebuild=True) + @parallel def kill_gunicorn(): with virtualenv(): - sudo('pkill -9 -u %s -f gunicorn_django' % env.user) - + sudo("pkill -9 -u %s -f gunicorn_django" % env.user) + + @parallel def deploy_code(copy_assets=False, rebuild=False, fast=False, reload=False): with virtualenv(): - run('git pull') - run('mkdir -p static') + run("git pull") + run("mkdir -p static") if rebuild: - run('rm -fr static/*') + run("rm -fr static/*") if copy_assets: transfer_assets() - + with virtualenv(): with settings(warn_only=True): if reload: - sudo('supervisorctl reload') + sudo("supervisorctl reload") elif fast: kill_gunicorn() else: - sudo('kill -HUP `cat /srv/newsblur/logs/gunicorn.pid`') + sudo("kill -HUP `cat /srv/newsblur/logs/gunicorn.pid`") + @parallel def kill(): - sudo('supervisorctl reload') + sudo("supervisorctl reload") with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_gunicorn.sh') + if env.user == "ubuntu": + sudo("./utils/kill_gunicorn.sh") else: - run('./utils/kill_gunicorn.sh') + run("./utils/kill_gunicorn.sh") + @parallel def deploy_node(): pull() with virtualenv(): - run('sudo supervisorctl restart node_unread') - run('sudo supervisorctl restart node_unread_ssl') - run('sudo supervisorctl restart node_favicons') - run('sudo supervisorctl restart node_text') + run("sudo supervisorctl restart node_unread") + run("sudo supervisorctl restart node_unread_ssl") + run("sudo supervisorctl restart node_favicons") + run("sudo supervisorctl restart node_text") + def gunicorn_restart(): restart_gunicorn() + def restart_gunicorn(): with virtualenv(), settings(warn_only=True): - run('sudo supervisorctl restart gunicorn') + run("sudo supervisorctl restart gunicorn") + def gunicorn_stop(): with virtualenv(), settings(warn_only=True): - run('sudo supervisorctl stop gunicorn') + run("sudo supervisorctl stop gunicorn") + def staging(): - with cd('~/staging'): - run('git pull') - run('kill -HUP `cat logs/gunicorn.pid`') - run('curl -s http://dev.newsblur.com > /dev/null') - run('curl -s http://dev.newsblur.com/m/ > /dev/null') + with cd("~/staging"): + run("git pull") + run("kill -HUP `cat logs/gunicorn.pid`") + run("curl -s http://dev.newsblur.com > /dev/null") + run("curl -s http://dev.newsblur.com/m/ > /dev/null") + def staging_build(): - with cd('~/staging'): - run('git pull') - run('./manage.py migrate') - run('kill -HUP `cat logs/gunicorn.pid`') - run('curl -s http://dev.newsblur.com > /dev/null') - run('curl -s http://dev.newsblur.com/m/ > /dev/null') + with cd("~/staging"): + run("git pull") + run("./manage.py migrate") + run("kill -HUP `cat logs/gunicorn.pid`") + run("curl -s http://dev.newsblur.com > /dev/null") + run("curl -s http://dev.newsblur.com/m/ > /dev/null") + @parallel def celery(): celery_slow() + def celery_slow(): with virtualenv(): - run('git pull') + run("git pull") celery_stop() celery_start() + @parallel def celery_fast(): with virtualenv(): - run('git pull') + run("git pull") celery_reload() + @parallel def celery_stop(): with virtualenv(): - sudo('supervisorctl stop celery') + sudo("supervisorctl stop celery") with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_celery.sh') + if env.user == "ubuntu": + sudo("./utils/kill_celery.sh") else: - run('./utils/kill_celery.sh') + run("./utils/kill_celery.sh") + @parallel def celery_start(): with virtualenv(): - run('sudo supervisorctl start celery') - run('tail logs/newsblur.log') + run("sudo supervisorctl start celery") + run("tail logs/newsblur.log") + @parallel def celery_reload(): with virtualenv(): - run('sudo supervisorctl reload celery') - run('tail logs/newsblur.log') + run("sudo supervisorctl reload celery") + run("tail logs/newsblur.log") + def kill_celery(): with virtualenv(): with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_celery.sh') + if env.user == "ubuntu": + sudo("./utils/kill_celery.sh") else: - run('./utils/kill_celery.sh') + run("./utils/kill_celery.sh") + def compress_assets(bundle=False): - local('jammit -c newsblur/assets.yml --base-url https://www.newsblur.com --output static') - local('tar -czf static.tgz static/*') + local("jammit -c newsblur/assets.yml --base-url https://www.newsblur.com --output static") + local("tar -czf static.tgz static/*") tries_left = 5 while True: try: success = False with settings(warn_only=True): - local('PYTHONPATH=/srv/newsblur python utils/backups/s3.py set static.tgz') + local("PYTHONPATH=/srv/newsblur python utils/backups/s3.py set static.tgz") success = True if not success: raise Exception("Ack!") break except Exception as e: - print(" ***> %s. Trying %s more time%s..." % (e, tries_left, '' if tries_left == 1 else 's')) + print(" ***> %s. Trying %s more time%s..." % (e, tries_left, "" if tries_left == 1 else "s")) tries_left -= 1 - if tries_left <= 0: break + if tries_left <= 0: + break def transfer_assets(): # filename = "deploy_%s.tgz" % env.commit # Easy rollback? Eh, can just upload it again. # run('PYTHONPATH=/srv/newsblur python s3.py get deploy_%s.tgz' % filename) - run('PYTHONPATH=/srv/newsblur python utils/backups/s3.py get static.tgz') + run("PYTHONPATH=/srv/newsblur python utils/backups/s3.py get static.tgz") # run('mv %s static/static.tgz' % filename) - run('mv static.tgz static/static.tgz') - run('tar -xzf static/static.tgz') - run('rm -f static/static.tgz') + run("mv static.tgz static/static.tgz") + run("tar -xzf static/static.tgz") + run("rm -f static/static.tgz") + def cleanup_assets(): - local('rm -f static.tgz') + local("rm -f static.tgz") + # =========== # = Backups = # =========== + def setup_redis_backups(name=None): # crontab for redis backups, name is either none, story, sessions, pubsub - crontab = ("0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" % - (("_%s"%name) if name else "")) + crontab = ( + "0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" + % (("_%s" % name) if name else "") + ) run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') + run("crontab -l") + def setup_mongo_backups(): # crontab for mongo backups crontab = "0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py" run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') - + run("crontab -l") + + def setup_postgres_backups(): # crontab for postgres backups crontab = """ @@ -1947,64 +2227,84 @@ def setup_postgres_backups(): 0 * * * * sudo find /var/lib/postgresql/13/archive -type f -mmin +180 -delete""" run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') - + run("crontab -l") + + def backup_redis(name=None): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py' % (("_%s"%name) if name else "")) - + run( + "/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" + % (("_%s" % name) if name else "") + ) + + def backup_mongo(): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py') + run("/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py") + def backup_postgresql(): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_psql.py') + run("/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_psql.py") + # =============== # = Calibration = # =============== + def sync_time(): with settings(warn_only=True): sudo("/etc/init.d/ntp stop") sudo("ntpdate pool.ntp.org") sudo("/etc/init.d/ntp start") + def setup_time_calibration(): - sudo('apt-get -y install ntp') - put('config/ntpdate.cron', '%s/' % env.NEWSBLUR_PATH) - sudo('chown root.root %s/ntpdate.cron' % env.NEWSBLUR_PATH) - sudo('chmod 755 %s/ntpdate.cron' % env.NEWSBLUR_PATH) - sudo('mv %s/ntpdate.cron /etc/cron.hourly/ntpdate' % env.NEWSBLUR_PATH) + sudo("apt-get -y install ntp") + put("config/ntpdate.cron", "%s/" % env.NEWSBLUR_PATH) + sudo("chown root.root %s/ntpdate.cron" % env.NEWSBLUR_PATH) + sudo("chmod 755 %s/ntpdate.cron" % env.NEWSBLUR_PATH) + sudo("mv %s/ntpdate.cron /etc/cron.hourly/ntpdate" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('/etc/cron.hourly/ntpdate') + sudo("/etc/cron.hourly/ntpdate") + # ============== # = Tasks - DB = # ============== + def restore_postgres(port=5432, download=False): with virtualenv(): - backup_date = '2020-12-03-02-51' + backup_date = "2020-12-03-02-51" yes = prompt("Dropping and creating NewsBlur PGSQL db. Sure?") - if yes != 'y': + if yes != "y": return if download: - run('mkdir -p postgres') - run('PYTHONPATH=%s python utils/backups/s3.py get postgres/backup_postgresql_%s.sql.gz' % (env.NEWSBLUR_PATH, backup_date)) + run("mkdir -p postgres") + run( + "PYTHONPATH=%s python utils/backups/s3.py get postgres/backup_postgresql_%s.sql.gz" + % (env.NEWSBLUR_PATH, backup_date) + ) # sudo('su postgres -c "createuser -p %s -U newsblur"' % (port,)) - with settings(warn_only=True): + with settings(warn_only=True): # May not exist - run('dropdb newsblur -p %s -U newsblur' % (port,), pty=False) - run('sudo -u postgres createuser newsblur -s') + run("dropdb newsblur -p %s -U newsblur" % (port,), pty=False) + run("sudo -u postgres createuser newsblur -s") # May already exist - run('createdb newsblur -p %s -O newsblur -U newsblur' % (port,), pty=False) - run('pg_restore -U newsblur -p %s --role=newsblur --dbname=newsblur /srv/newsblur/postgres/backup_postgresql_%s.sql.gz' % (port, backup_date), pty=False) + run("createdb newsblur -p %s -O newsblur -U newsblur" % (port,), pty=False) + run( + "pg_restore -U newsblur -p %s --role=newsblur --dbname=newsblur /srv/newsblur/postgres/backup_postgresql_%s.sql.gz" + % (port, backup_date), + pty=False, + ) + def restore_mongo(download=False): - backup_date = '2020-11-11-04-00' + backup_date = "2020-11-11-04-00" if download: - run('PYTHONPATH=/srv/newsblur python utils/backups/s3.py get backup_mongo_%s.tgz' % (backup_date)) - run('tar -xf backup_mongo_%s.tgz' % backup_date) - run('mongorestore backup_mongo_%s' % backup_date) + run("PYTHONPATH=/srv/newsblur python utils/backups/s3.py get backup_mongo_%s.tgz" % (backup_date)) + run("tar -xf backup_mongo_%s.tgz" % backup_date) + run("mongorestore backup_mongo_%s" % backup_date) + # ====== # = S3 = @@ -2012,48 +2312,54 @@ def restore_mongo(download=False): if django_settings: try: - ACCESS_KEY = django_settings.S3_ACCESS_KEY - SECRET = django_settings.S3_SECRET + ACCESS_KEY = django_settings.S3_ACCESS_KEY + SECRET = django_settings.S3_SECRET BUCKET_NAME = django_settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first except: print(" ---> You need to fix django's settings. Enter python and type `import settings`.") + def save_file_in_s3(filename): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) - k = Key(bucket) - k.key = filename + k = Key(bucket) + k.key = filename k.set_contents_from_filename(filename) + def get_file_from_s3(filename): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) - k = Key(bucket) - k.key = filename + k = Key(bucket) + k.key = filename k.get_contents_to_filename(filename) + def list_backup_in_s3(): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) for i, key in enumerate(bucket.get_all_keys()): print("[%s] %s" % (i, key.name)) + def delete_all_backups(): - #FIXME: validate filename exists - conn = S3Connection(ACCESS_KEY, SECRET) + # FIXME: validate filename exists + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) for i, key in enumerate(bucket.get_all_keys()): print("deleting %s" % (key.name)) key.delete() + def add_revsys_keys(): put("~/Downloads/revsys-keys.pub", "revsys_keys") - run('cat revsys_keys >> ~/.ssh/authorized_keys') - run('rm revsys_keys') + run("cat revsys_keys >> ~/.ssh/authorized_keys") + run("rm revsys_keys") + def upgrade_to_virtualenv(role=None): if not role: @@ -2065,31 +2371,32 @@ def upgrade_to_virtualenv(role=None): elif role == "app": gunicorn_stop() elif role == "node": - run('sudo supervisorctl stop node_unread') - run('sudo supervisorctl stop node_favicons') + run("sudo supervisorctl stop node_unread") + run("sudo supervisorctl stop node_favicons") elif role == "work": - sudo('/etc/init.d/supervisor stop') + sudo("/etc/init.d/supervisor stop") kill_pgbouncer(bounce=False) setup_installs() pip() if role == "task": enable_celery_supervisor(update=False) - sudo('reboot') + sudo("reboot") elif role == "app": setup_gunicorn(supervisor=True, restart=False) - sudo('reboot') + sudo("reboot") elif role == "node": deploy_node() elif role == "search": setup_db_search() elif role == "work": enable_celerybeat() - sudo('reboot') + sudo("reboot") + def benchmark(): - run('curl -s https://packagecloud.io/install/repositories/akopytov/sysbench/script.deb.sh | sudo bash') - sudo('apt-get install -y sysbench') - run('sysbench cpu --cpu-max-prime=20000 run') - run('sysbench fileio --file-total-size=150G prepare') - run('sysbench fileio --file-total-size=150G --file-test-mode=rndrw --time=300 --max-requests=0 run') - run('sysbench fileio --file-total-size=150G cleanup') + run("curl -s https://packagecloud.io/install/repositories/akopytov/sysbench/script.deb.sh | sudo bash") + sudo("apt-get install -y sysbench") + run("sysbench cpu --cpu-max-prime=20000 run") + run("sysbench fileio --file-total-size=150G prepare") + run("sysbench fileio --file-total-size=150G --file-test-mode=rndrw --time=300 --max-requests=0 run") + run("sysbench fileio --file-total-size=150G cleanup") diff --git a/archive/jammit.py b/archive/jammit.py index f1020547e..e3b83ecca 100644 --- a/archive/jammit.py +++ b/archive/jammit.py @@ -8,10 +8,10 @@ DATA_URI_END = "" MHTML_START = "" -class JammitAssets: - ASSET_FILENAME = 'assets.yml' - +class JammitAssets: + ASSET_FILENAME = "assets.yml" + def __init__(self, assets_dir): """ Initializes the Jammit object by reading the assets.yml file and @@ -20,31 +20,31 @@ class JammitAssets: """ self.assets_dir = assets_dir self.assets = self.read_assets() - + def read_assets(self): """ Read the assets from the YAML and store it as a lookup dictionary. """ filepath = os.path.join(self.assets_dir, self.ASSET_FILENAME) - with open(filepath, 'r') as yaml_file: + with open(filepath, "r") as yaml_file: return yaml.safe_load(yaml_file) - + def render_tags(self, asset_type, asset_package): """ Returns rendered ' % path - + def javascript_tag_compressed(self, asset_package, asset_type_ext): - filename = 'static/%s.%s' % (asset_package, asset_type_ext) + filename = "static/%s.%s" % (asset_package, asset_type_ext) asset_mtime = int(os.path.getmtime(filename)) - path = '%s?%s' % (filename, asset_mtime) + path = "%s?%s" % (filename, asset_mtime) return self.javascript_tag(path) - + def stylesheet_tag(self, path): return '' % path def stylesheet_tag_compressed(self, asset_package, asset_type_ext): - datauri_filename = 'static/%s-datauri.%s' % (asset_package, asset_type_ext) - original_filename = 'static/%s.%s' % (asset_package, asset_type_ext) + datauri_filename = "static/%s-datauri.%s" % (asset_package, asset_type_ext) + original_filename = "static/%s.%s" % (asset_package, asset_type_ext) asset_mtime = int(os.path.getmtime(datauri_filename)) - datauri_path = '%s?%s' % (datauri_filename, asset_mtime) - original_path = '%s?%s' % (original_filename, asset_mtime) - - return '\n'.join([ - DATA_URI_START, - self.stylesheet_tag(datauri_path), - DATA_URI_END, - MHTML_START, - self.stylesheet_tag(original_path), - MHTML_END, - ]) + datauri_path = "%s?%s" % (datauri_filename, asset_mtime) + original_path = "%s?%s" % (original_filename, asset_mtime) + + return "\n".join( + [ + DATA_URI_START, + self.stylesheet_tag(datauri_path), + DATA_URI_END, + MHTML_START, + self.stylesheet_tag(original_path), + MHTML_END, + ] + ) + class FileFinder: - @classmethod def filefinder(cls, pattern): paths = [] - if '**' in pattern: - folder, wild, pattern = pattern.partition('/**/') + if "**" in pattern: + folder, wild, pattern = pattern.partition("/**/") for f in cls.recursive_find_files(folder, pattern): paths.append(f) else: diff --git a/archive/munin/munin/__init__.py b/archive/munin/munin/__init__.py index 67fa6a55c..aa3462fc8 100755 --- a/archive/munin/munin/__init__.py +++ b/archive/munin/munin/__init__.py @@ -1,4 +1,3 @@ - __version__ = "1.4" import os @@ -6,6 +5,7 @@ import sys import socket from decimal import Decimal + class MuninPlugin(object): title = "" args = None @@ -15,10 +15,10 @@ class MuninPlugin(object): fields = [] def __init__(self): - if 'GRAPH_TITLE' in os.environ: - self.title = os.environ['GRAPH_TITLE'] - if 'GRAPH_CATEGORY' in os.environ: - self.category = os.environ['GRAPH_CATEGORY'] + if "GRAPH_TITLE" in os.environ: + self.title = os.environ["GRAPH_TITLE"] + if "GRAPH_CATEGORY" in os.environ: + self.category = os.environ["GRAPH_CATEGORY"] super(MuninPlugin, self).__init__() def autoconf(self): @@ -26,18 +26,18 @@ class MuninPlugin(object): def config(self): conf = [] - for k in ('title', 'category', 'args', 'vlabel', 'info', 'scale', 'order'): + for k in ("title", "category", "args", "vlabel", "info", "scale", "order"): v = getattr(self, k, None) if v is not None: if isinstance(v, bool): v = v and "yes" or "no" elif isinstance(v, (tuple, list)): v = " ".join(v) - conf.append('graph_%s %s' % (k, v)) + conf.append("graph_%s %s" % (k, v)) for field_name, field_args in self.fields: for arg_name, arg_value in field_args.items(): - conf.append('%s.%s %s' % (field_name, arg_name, arg_value)) + conf.append("%s.%s %s" % (field_name, arg_name, arg_value)) print("\n".join(conf)) @@ -45,7 +45,7 @@ class MuninPlugin(object): sys.exit(1) def run(self): - cmd = ((len(sys.argv) > 1) and sys.argv[1] or None) or "execute" + cmd = ((len(sys.argv) > 1) and sys.argv[1] or None) or "execute" if cmd == "execute": values = self.execute() if values: @@ -67,11 +67,12 @@ class MuninPlugin(object): self.suggest() sys.exit(0) + class MuninClient(object): def __init__(self, host, port=4949): self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((host, port)) - self.sock.recv(4096) # welcome, TODO: receive all + self.sock.recv(4096) # welcome, TODO: receive all def _command(self, cmd, term): self.sock.send("%s\n" % cmd) @@ -81,15 +82,15 @@ class MuninClient(object): return buf.split(term)[0] def list(self): - return self._command('list', '\n').split(' ') + return self._command("list", "\n").split(" ") def fetch(self, service): data = self._command("fetch %s" % service, ".\n") - if data.startswith('#'): + if data.startswith("#"): raise Exception(data[2:]) values = {} - for line in data.split('\n'): + for line in data.split("\n"): if line: - k, v = line.split(' ', 1) - values[k.split('.')[0]] = Decimal(v) + k, v = line.split(" ", 1) + values[k.split(".")[0]] = Decimal(v) return values diff --git a/archive/munin/munin/cassandra.py b/archive/munin/munin/cassandra.py index f5a75405c..53ea4271f 100755 --- a/archive/munin/munin/cassandra.py +++ b/archive/munin/munin/cassandra.py @@ -8,6 +8,7 @@ from vendor.munin import MuninPlugin space_re = re.compile(r"\s+") + class MuninCassandraPlugin(MuninPlugin): category = "Cassandra" @@ -15,7 +16,7 @@ class MuninCassandraPlugin(MuninPlugin): super(MuninCassandraPlugin, self).__init__(*args, **kwargs) self.nodetool_path = os.environ["NODETOOL_PATH"] self.host = socket.gethostname() - self.keyspaces = [x for x in os.environ.get('CASSANDRA_KEYSPACE', '').split(',') if x] + self.keyspaces = [x for x in os.environ.get("CASSANDRA_KEYSPACE", "").split(",") if x] def execute_nodetool(self, cmd): p = Popen([self.nodetool_path, "-host", self.host, cmd], stdout=PIPE) @@ -23,22 +24,22 @@ class MuninCassandraPlugin(MuninPlugin): return output def parse_cfstats(self, text): - text = text.strip().split('\n') + text = text.strip().split("\n") cfstats = {} cf = None for line in text: line = line.strip() - if not line or line.startswith('-'): + if not line or line.startswith("-"): continue - name, value = line.strip().split(': ', 1) + name, value = line.strip().split(": ", 1) if name == "Keyspace": - ks = {'cf': {}} + ks = {"cf": {}} cf = None cfstats[value] = ks elif name == "Column Family": cf = {} - ks['cf'][value] = cf + ks["cf"][value] = cf elif cf is None: ks[name] = value else: @@ -50,30 +51,30 @@ class MuninCassandraPlugin(MuninPlugin): def cinfo(self): text = self.execute_nodetool("info") - lines = text.strip().split('\n') + lines = text.strip().split("\n") token = lines[0] info = {} for l in lines[1:]: - name, value = l.split(':') + name, value = l.split(":") info[name.strip()] = value.strip() - l_num, l_units = info['Load'].split(' ', 1) + l_num, l_units = info["Load"].split(" ", 1) l_num = float(l_num) if l_units == "KB": scale = 1024 elif l_units == "MB": - scale = 1024*1024 + scale = 1024 * 1024 elif l_units == "GB": - scale = 1024*1024*1024 + scale = 1024 * 1024 * 1024 elif l_units == "TB": - scale = 1024*1024*1024*1024 - info['Load'] = int(l_num * scale) - info['token'] = token + scale = 1024 * 1024 * 1024 * 1024 + info["Load"] = int(l_num * scale) + info["token"] = token return info def tpstats(self): out = self.execute_nodetool("tpstats") tpstats = {} - for line in out.strip().split('\n')[1:]: + for line in out.strip().split("\n")[1:]: name, active, pending, completed = space_re.split(line) tpstats[name] = dict(active=int(active), pending=int(pending), completed=int(completed)) return tpstats diff --git a/archive/munin/munin/ddwrt.py b/archive/munin/munin/ddwrt.py index b053e14ea..2bebf3612 100755 --- a/archive/munin/munin/ddwrt.py +++ b/archive/munin/munin/ddwrt.py @@ -1,4 +1,3 @@ - # https://192.168.1.10/Info.live.htm import os @@ -6,18 +5,16 @@ import re import urllib.request from vendor.munin import MuninPlugin + class DDWrtPlugin(MuninPlugin): category = "Wireless" def __init__(self): super(DDWrtPlugin, self).__init__() - self.root_url = os.environ.get('DDWRT_URL') or "http://192.168.1.1" + self.root_url = os.environ.get("DDWRT_URL") or "http://192.168.1.1" self.url = self.root_url + "/Info.live.htm" def get_info(self): res = urllib.request.urlopen(self.url) text = res.read() - return dict( - x[1:-1].split('::') - for x in text.split('\n') - ) + return dict(x[1:-1].split("::") for x in text.split("\n")) diff --git a/archive/munin/munin/gearman.py b/archive/munin/munin/gearman.py index cf5a1a86e..7165d3855 100755 --- a/archive/munin/munin/gearman.py +++ b/archive/munin/munin/gearman.py @@ -5,16 +5,17 @@ import re import socket from vendor.munin import MuninPlugin -worker_re = re.compile(r'^(?P\d+) (?P[\d\.]+) (?P[^\s]+) :\s?(?P.*)$') +worker_re = re.compile(r"^(?P\d+) (?P[\d\.]+) (?P[^\s]+) :\s?(?P.*)$") + class MuninGearmanPlugin(MuninPlugin): category = "Gearman" def __init__(self): super(MuninGearmanPlugin, self).__init__() - addr = os.environ.get('GM_SERVER') or "127.0.0.1" - port = int(addr.split(':')[-1]) if ':' in addr else 4730 - host = addr.split(':')[0] + addr = os.environ.get("GM_SERVER") or "127.0.0.1" + port = int(addr.split(":")[-1]) if ":" in addr else 4730 + host = addr.split(":")[0] self.addr = (host, port) self._sock = None @@ -36,12 +37,12 @@ class MuninGearmanPlugin(MuninPlugin): buf += sock.recv(8192) info = [] - for l in buf.split('\n'): - if l.strip() == '.': + for l in buf.split("\n"): + if l.strip() == ".": break m = worker_re.match(l) i = m.groupdict() - i['abilities'] = [x for x in i['abilities'].split(' ') if x] + i["abilities"] = [x for x in i["abilities"].split(" ") if x] info.append(i) return info @@ -53,14 +54,14 @@ class MuninGearmanPlugin(MuninPlugin): buf += sock.recv(8192) info = {} - for l in buf.split('\n'): + for l in buf.split("\n"): l = l.strip() - if l == '.': + if l == ".": break - counts = l.split('\t') + counts = l.split("\t") info[counts[0]] = dict( - total = int(counts[1]), - running = int(counts[2]), - workers = int(counts[3]), + total=int(counts[1]), + running=int(counts[2]), + workers=int(counts[3]), ) return info diff --git a/archive/munin/munin/memcached.py b/archive/munin/munin/memcached.py index 066334457..72d6590f1 100755 --- a/archive/munin/munin/memcached.py +++ b/archive/munin/munin/memcached.py @@ -4,6 +4,7 @@ import os import socket from vendor.munin import MuninPlugin + class MuninMemcachedPlugin(MuninPlugin): category = "Memcached" @@ -15,16 +16,16 @@ class MuninMemcachedPlugin(MuninPlugin): return True def get_stats(self): - host = os.environ.get('MEMCACHED_HOST') or '127.0.0.1' - port = int(os.environ.get('MEMCACHED_PORT') or '11211') + host = os.environ.get("MEMCACHED_HOST") or "127.0.0.1" + port = int(os.environ.get("MEMCACHED_PORT") or "11211") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send("stats\n") buf = "" - while 'END\r\n' not in buf: + while "END\r\n" not in buf: buf += s.recv(1024) - stats = (x.split(' ', 2) for x in buf.split('\r\n')) - stats = dict((x[1], x[2]) for x in stats if x[0] == 'STAT') + stats = (x.split(" ", 2) for x in buf.split("\r\n")) + stats = dict((x[1], x[2]) for x in stats if x[0] == "STAT") s.close() return stats diff --git a/archive/munin/munin/mongodb.py b/archive/munin/munin/mongodb.py index 401104052..920805a15 100755 --- a/archive/munin/munin/mongodb.py +++ b/archive/munin/munin/mongodb.py @@ -4,6 +4,7 @@ import os import sys from vendor.munin import MuninPlugin + class MuninMongoDBPlugin(MuninPlugin): dbname_in_args = False category = "MongoDB" @@ -13,13 +14,13 @@ class MuninMongoDBPlugin(MuninPlugin): self.dbname = None if self.dbname_in_args: - self.dbname = sys.argv[0].rsplit('_', 1)[-1] + self.dbname = sys.argv[0].rsplit("_", 1)[-1] if not self.dbname: - self.dbname = os.environ.get('MONGODB_DATABASE') + self.dbname = os.environ.get("MONGODB_DATABASE") - host = os.environ.get('MONGODB_SERVER') or 'localhost' - if ':' in host: - host, port = host.split(':') + host = os.environ.get("MONGODB_SERVER") or "localhost" + if ":" in host: + host, port = host.split(":") port = int(port) else: port = 27017 @@ -27,14 +28,15 @@ class MuninMongoDBPlugin(MuninPlugin): @property def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import pymongo + self._connection = pymongo.MongoClient(self.server[0], self.server[1]) return self._connection @property def db(self): - if not hasattr(self, '_db'): + if not hasattr(self, "_db"): self._db = getattr(self.connection, self.dbname) return self._db diff --git a/archive/munin/munin/mysql.py b/archive/munin/munin/mysql.py index 119de734d..542678cca 100755 --- a/archive/munin/munin/mysql.py +++ b/archive/munin/munin/mysql.py @@ -2,6 +2,7 @@ import os, sys, re from configparser import SafeConfigParser from vendor.munin import MuninPlugin + class MuninMySQLPlugin(MuninPlugin): dbname_in_args = False category = "MySQL" @@ -9,12 +10,15 @@ class MuninMySQLPlugin(MuninPlugin): def __init__(self): super(MuninMySQLPlugin, self).__init__() - self.dbname = ((sys.argv[0].rsplit('_', 1)[-1] if self.dbname_in_args else None) - or os.environ.get('DATABASE') or self.default_table) + self.dbname = ( + (sys.argv[0].rsplit("_", 1)[-1] if self.dbname_in_args else None) + or os.environ.get("DATABASE") + or self.default_table + ) self.conninfo = dict( - user = "root", - host = "localhost", + user="root", + host="localhost", ) cnfpath = "" @@ -34,19 +38,25 @@ class MuninMySQLPlugin(MuninPlugin): for section in ["client", "munin"]: if not cnf.has_section(section): continue - for connkey, opt in [("user", "user"), ("passwd", "password"), ("host", "host"), ("port", "port")]: + for connkey, opt in [ + ("user", "user"), + ("passwd", "password"), + ("host", "host"), + ("port", "port"), + ]: if cnf.has_option(section, opt): self.conninfo[connkey] = cnf.get(section, opt) - for k in ('user', 'passwd', 'host', 'port'): + for k in ("user", "passwd", "host", "port"): # Use lowercase because that's what the existing mysql plugins do v = os.environ.get(k) if v: self.conninfo[k] = v def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import MySQLdb + self._connection = MySQLdb.connect(**self.conninfo) return self._connection diff --git a/archive/munin/munin/nginx.py b/archive/munin/munin/nginx.py index 383e3af12..3e386f8fc 100755 --- a/archive/munin/munin/nginx.py +++ b/archive/munin/munin/nginx.py @@ -5,6 +5,7 @@ import re import urllib.request from vendor.munin import MuninPlugin + class MuninNginxPlugin(MuninPlugin): category = "Nginx" @@ -12,11 +13,12 @@ class MuninNginxPlugin(MuninPlugin): r"Active connections:\s+(?P\d+)\s+" r"server accepts handled requests\s+" r"(?P\d+)\s+(?P\d+)\s+(?P\d+)\s+" - r"Reading: (?P\d+) Writing: (?P\d+) Waiting: (?P\d+)") + r"Reading: (?P\d+) Writing: (?P\d+) Waiting: (?P\d+)" + ) def __init__(self): super(MuninNginxPlugin, self).__init__() - self.url = os.environ.get('NX_STATUS_URL') or "http://localhost/nginx_status" + self.url = os.environ.get("NX_STATUS_URL") or "http://localhost/nginx_status" def autoconf(self): return bool(self.get_status()) diff --git a/archive/munin/munin/pgbouncer.py b/archive/munin/munin/pgbouncer.py index d8f3dd96f..4e9f14609 100755 --- a/archive/munin/munin/pgbouncer.py +++ b/archive/munin/munin/pgbouncer.py @@ -1,6 +1,7 @@ import sys from vendor.munin.postgres import MuninPostgresPlugin + class MuninPgBouncerPlugin(MuninPostgresPlugin): dbname_in_args = False default_table = "pgbouncer" @@ -8,11 +9,12 @@ class MuninPgBouncerPlugin(MuninPostgresPlugin): def __init__(self, *args, **kwargs): super(MuninPgBouncerPlugin, self).__init__(*args, **kwargs) - self.dbwatched = sys.argv[0].rsplit('_', 1)[-1] + self.dbwatched = sys.argv[0].rsplit("_", 1)[-1] def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import psycopg2 + self._connection = psycopg2.connect(self.dsn) self._connection.set_isolation_level(0) return self._connection @@ -25,9 +27,8 @@ class MuninPgBouncerPlugin(MuninPostgresPlugin): totals = dict.fromkeys((field[0] for field in self.fields), 0) for row in cursor: row_dict = dict(zip(columns, row)) - if row_dict['database'] in (self.dbwatched, self.dbwatched + '\x00'): + if row_dict["database"] in (self.dbwatched, self.dbwatched + "\x00"): for field in self.fields: totals[field[0]] += row_dict[field[0]] return dict((field[0], totals[field[0]]) for field in self.fields) - diff --git a/archive/munin/munin/postgres.py b/archive/munin/munin/postgres.py index 541c25bae..42f7022ae 100755 --- a/archive/munin/munin/postgres.py +++ b/archive/munin/munin/postgres.py @@ -1,7 +1,7 @@ - import os, sys from vendor.munin import MuninPlugin + class MuninPostgresPlugin(MuninPlugin): dbname_in_args = False category = "PostgreSQL" @@ -10,18 +10,22 @@ class MuninPostgresPlugin(MuninPlugin): def __init__(self): super(MuninPostgresPlugin, self).__init__() - self.dbname = ((sys.argv[0].rsplit('_', 1)[-1] if self.dbname_in_args else None) - or os.environ.get('PGDATABASE') or self.default_table) + self.dbname = ( + (sys.argv[0].rsplit("_", 1)[-1] if self.dbname_in_args else None) + or os.environ.get("PGDATABASE") + or self.default_table + ) dsn = ["dbname='%s'" % self.dbname] - for k in ('user', 'password', 'host', 'port'): - v = os.environ.get('DB%s' % k.upper()) + for k in ("user", "password", "host", "port"): + v = os.environ.get("DB%s" % k.upper()) if v: dsn.append("db%s='%s'" % (k, v)) - self.dsn = ' '.join(dsn) + self.dsn = " ".join(dsn) def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import psycopg2 + self._connection = psycopg2.connect(self.dsn) return self._connection @@ -32,13 +36,14 @@ class MuninPostgresPlugin(MuninPlugin): return bool(self.connection()) def tables(self): - if not hasattr(self, '_tables'): + if not hasattr(self, "_tables"): c = self.cursor() c.execute( "SELECT c.relname FROM pg_catalog.pg_class c" " LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace" " WHERE c.relkind IN ('r','')" " AND n.nspname NOT IN ('pg_catalog', 'pg_toast')" - " AND pg_catalog.pg_table_is_visible(c.oid)") + " AND pg_catalog.pg_table_is_visible(c.oid)" + ) self._tables = [r[0] for r in c.fetchall()] return self._tables diff --git a/archive/munin/munin/redis.py b/archive/munin/munin/redis.py index a569adc06..00818075b 100755 --- a/archive/munin/munin/redis.py +++ b/archive/munin/munin/redis.py @@ -4,6 +4,7 @@ import os import socket from vendor.munin import MuninPlugin + class MuninRedisPlugin(MuninPlugin): category = "Redis" @@ -15,9 +16,9 @@ class MuninRedisPlugin(MuninPlugin): return True def get_info(self): - host = os.environ.get('REDIS_HOST') or '127.0.0.1' - port = int(os.environ.get('REDIS_PORT') or '6379') - if host.startswith('/'): + host = os.environ.get("REDIS_HOST") or "127.0.0.1" + port = int(os.environ.get("REDIS_PORT") or "6379") + if host.startswith("/"): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect(host) else: @@ -25,9 +26,9 @@ class MuninRedisPlugin(MuninPlugin): s.connect((host, port)) s.send("*1\r\n$4\r\ninfo\r\n") buf = "" - while '\r\n' not in buf: + while "\r\n" not in buf: buf += s.recv(1024) - l, buf = buf.split('\r\n', 1) + l, buf = buf.split("\r\n", 1) if l[0] != "$": s.close() raise Exception("Protocol error") @@ -35,7 +36,7 @@ class MuninRedisPlugin(MuninPlugin): if remaining > 0: buf += s.recv(remaining) s.close() - return dict(x.split(':', 1) for x in buf.split('\r\n') if ':' in x) + return dict(x.split(":", 1) for x in buf.split("\r\n") if ":" in x) def execute(self): stats = self.get_info() diff --git a/archive/munin/munin/riak.py b/archive/munin/munin/riak.py index 7d30e48ca..9d15b27b0 100755 --- a/archive/munin/munin/riak.py +++ b/archive/munin/munin/riak.py @@ -9,15 +9,16 @@ import sys import urllib.request from vendor.munin import MuninPlugin + class MuninRiakPlugin(MuninPlugin): category = "Riak" def __init__(self): super(MuninRiakPlugin, self).__init__() - host = os.environ.get('RIAK_HOST') or 'localhost' - if ':' in host: - host, port = host.split(':') + host = os.environ.get("RIAK_HOST") or "localhost" + if ":" in host: + host, port = host.split(":") port = int(port) else: port = 8098 diff --git a/config/gunicorn_conf.py b/config/gunicorn_conf.py index f7b023475..866639a64 100644 --- a/config/gunicorn_conf.py +++ b/config/gunicorn_conf.py @@ -3,7 +3,7 @@ import os import psutil -GIGS_OF_MEMORY = psutil.virtual_memory().total/1024/1024/1024. +GIGS_OF_MEMORY = psutil.virtual_memory().total / 1024 / 1024 / 1024.0 NUM_CPUS = psutil.cpu_count() bind = "0.0.0.0:8000" @@ -27,12 +27,12 @@ workers = max(int(math.floor(GIGS_OF_MEMORY * 2)), 3) if workers > 16: workers = 16 -if os.environ.get('DOCKERBUILD', False): +if os.environ.get("DOCKERBUILD", False): workers = 2 -prom_folder = '/srv/newsblur/.prom_cache' +prom_folder = "/srv/newsblur/.prom_cache" os.makedirs(prom_folder, exist_ok=True) -os.environ['PROMETHEUS_MULTIPROC_DIR'] = prom_folder +os.environ["PROMETHEUS_MULTIPROC_DIR"] = prom_folder for filename in os.listdir(prom_folder): file_path = os.path.join(prom_folder, filename) try: @@ -41,7 +41,7 @@ for filename in os.listdir(prom_folder): elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - print('Failed to delete %s. Reason: %s' % (file_path, e)) + print("Failed to delete %s. Reason: %s" % (file_path, e)) from prometheus_client import multiprocess diff --git a/config/pystartup.py b/config/pystartup.py index 949523ccc..ab64160a2 100644 --- a/config/pystartup.py +++ b/config/pystartup.py @@ -16,23 +16,37 @@ import rlcompleter historyPath = os.path.expanduser("~/.pyhistory") historyTmp = os.path.expanduser("~/.pyhisttmp.py") -endMarkerStr= "# # # histDUMP # # #" +endMarkerStr = "# # # histDUMP # # #" -saveMacro= "import readline; readline.write_history_file('"+historyTmp+"'); \ +saveMacro = ( + "import readline; readline.write_history_file('" + + historyTmp + + "'); \ print '####>>>>>>>>>>'; print ''.join(filter(lambda lineP: \ - not lineP.strip().endswith('"+endMarkerStr+"'), \ - open('"+historyTmp+"').readlines())[:])+'####<<<<<<<<<<'"+endMarkerStr + not lineP.strip().endswith('" + + endMarkerStr + + "'), \ + open('" + + historyTmp + + "').readlines())[:])+'####<<<<<<<<<<'" + + endMarkerStr +) + +readline.parse_and_bind("tab: complete") +readline.parse_and_bind('\C-w: "' + saveMacro + '"') -readline.parse_and_bind('tab: complete') -readline.parse_and_bind('\C-w: "'+saveMacro+'"') def save_history(historyPath=historyPath, endMarkerStr=endMarkerStr): import readline + readline.write_history_file(historyPath) # Now filter out those line containing the saveMacro - lines= filter(lambda lineP, endMarkerStr=endMarkerStr: - not lineP.strip().endswith(endMarkerStr), open(historyPath).readlines()) - open(historyPath, 'w+').write(''.join(lines)) + lines = filter( + lambda lineP, endMarkerStr=endMarkerStr: not lineP.strip().endswith(endMarkerStr), + open(historyPath).readlines(), + ) + open(historyPath, "w+").write("".join(lines)) + if os.path.exists(historyPath): readline.read_history_file(historyPath) @@ -40,4 +54,4 @@ if os.path.exists(historyPath): atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath -del historyTmp, endMarkerStr, saveMacro \ No newline at end of file +del historyTmp, endMarkerStr, saveMacro diff --git a/flask_metrics/flask_metrics_haproxy.py b/flask_metrics/flask_metrics_haproxy.py index 7576584ee..86cb4ccd2 100644 --- a/flask_metrics/flask_metrics_haproxy.py +++ b/flask_metrics/flask_metrics_haproxy.py @@ -19,22 +19,23 @@ if settings.DOCKERBUILD: STATUS_MAPPING = { - "UNK": 0, # unknown - "INI": 1, # initializing + "UNK": 0, # unknown + "INI": 1, # initializing "SOCKERR": 2, # socket error - "L4OK": 3, # check passed on layer 4, no upper layers testing enabled - "L4TOUT": 4, # layer 1-4 timeout - "L4CON": 5, # layer 1-4 connection problem, for example "Connection refused" (tcp rst) or "No route to host" (icmp) - "L6OK": 6, # check passed on layer 6 - "L6TOUT": 7, # layer 6 (SSL) timeout - "L6RSP": 8, # layer 6 invalid response - protocol error - "L7OK": 9, # check passed on layer 7 - "L7OKC": 10, # check conditionally passed on layer 7, for example 404 with disable-on-404 + "L4OK": 3, # check passed on layer 4, no upper layers testing enabled + "L4TOUT": 4, # layer 1-4 timeout + "L4CON": 5, # layer 1-4 connection problem, for example "Connection refused" (tcp rst) or "No route to host" (icmp) + "L6OK": 6, # check passed on layer 6 + "L6TOUT": 7, # layer 6 (SSL) timeout + "L6RSP": 8, # layer 6 invalid response - protocol error + "L7OK": 9, # check passed on layer 7 + "L7OKC": 10, # check conditionally passed on layer 7, for example 404 with disable-on-404 "L7TOUT": 11, # layer 7 (HTTP/SMTP) timeout - "L7RSP": 12, # layer 7 invalid response - protocol error - "L7STS": 13, # layer 7 response error, for example HTTP 5xx + "L7RSP": 12, # layer 7 invalid response - protocol error + "L7STS": 13, # layer 7 response error, for example HTTP 5xx } + def format_state_data(label, data): formatted_data = {} for k, v in data.items(): @@ -42,37 +43,37 @@ def format_state_data(label, data): formatted_data[k] = f'{label}{{servername="{k}"}} {STATUS_MAPPING[v.strip()]}' return formatted_data -def fetch_states(): - res = requests.get('https://newsblur.com:1936/;csv', auth=HTTPBasicAuth('gimmiestats', 'StatsGiver')) - lines = res.content.decode('utf-8').split('\n') +def fetch_states(): + res = requests.get("https://newsblur.com:1936/;csv", auth=HTTPBasicAuth("gimmiestats", "StatsGiver")) + + lines = res.content.decode("utf-8").split("\n") header_line = lines[0].split(",") - check_status_index = header_line.index('check_status') - servername_index = header_line.index('svname') + check_status_index = header_line.index("check_status") + servername_index = header_line.index("svname") data = {} backends = [line.split(",") for line in lines[1:]] for backend_data in backends: - if len(backend_data) <= check_status_index: continue - if len(backend_data) <= servername_index: continue - if backend_data[servername_index] in ['FRONTEND', 'BACKEND']: continue + if len(backend_data) <= check_status_index: + continue + if len(backend_data) <= servername_index: + continue + if backend_data[servername_index] in ["FRONTEND", "BACKEND"]: + continue backend_status = backend_data[check_status_index].replace("*", "") data[backend_data[servername_index]] = backend_status - + return data @app.route("/state/") def haproxy_state(): backends = fetch_states() - + formatted_data = format_state_data("haproxy_state", backends) - context = { - 'chart_name': 'haproxy_state', - 'chart_type': 'gauge', - 'data': formatted_data - } - html_body = render_template('prometheus_data.html', **context) + context = {"chart_name": "haproxy_state", "chart_type": "gauge", "data": formatted_data} + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_metrics/flask_metrics_mongo.py b/flask_metrics/flask_metrics_mongo.py index 4eee7a501..ebbad8ade 100644 --- a/flask_metrics/flask_metrics_mongo.py +++ b/flask_metrics/flask_metrics_mongo.py @@ -17,10 +17,13 @@ app = Flask(__name__) if settings.DOCKERBUILD: connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['host']}") else: - connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin") + connection = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin" + ) MONGO_HOST = settings.SERVER_NAME + @app.route("/objects/") def objects(): try: @@ -31,44 +34,44 @@ def objects(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict(objects=stats['objects']) + data = dict(objects=stats["objects"]) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_objects{{db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'objects', - "chart_type": 'gauge', + "chart_name": "objects", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/mongo-replset-lag/") def repl_set_lag(): def _get_oplog_length(): - oplog = connection.rs.command('printReplicationInfo') - last_op = oplog.find({}, {'ts': 1}).sort([('$natural', -1)]).limit(1)[0]['ts'].time - first_op = oplog.find({}, {'ts': 1}).sort([('$natural', 1)]).limit(1)[0]['ts'].time + oplog = connection.rs.command("printReplicationInfo") + last_op = oplog.find({}, {"ts": 1}).sort([("$natural", -1)]).limit(1)[0]["ts"].time + first_op = oplog.find({}, {"ts": 1}).sort([("$natural", 1)]).limit(1)[0]["ts"].time oplog_length = last_op - first_op return oplog_length def _get_max_replication_lag(): PRIMARY_STATE = 1 SECONDARY_STATE = 2 - status = connection.admin.command('replSetGetStatus') - members = status['members'] + status = connection.admin.command("replSetGetStatus") + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: raise Exception("Replica set is not healthy") @@ -86,7 +89,7 @@ def repl_set_lag(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - + formatted_data = {} for k, v in oplog_length.items(): formatted_data[k] = f'mongo_oplog{{type="length", db="{MONGO_HOST}"}} {v}' @@ -95,10 +98,10 @@ def repl_set_lag(): context = { "data": formatted_data, - "chart_name": 'oplog_metrics', - "chart_type": 'gauge', + "chart_name": "oplog_metrics", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @@ -112,52 +115,49 @@ def size(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict(size=stats['fsUsedSize']) + data = dict(size=stats["fsUsedSize"]) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_db_size{{db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'db_size_bytes', - "chart_type": 'gauge', + "chart_name": "db_size_bytes", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/ops/") def ops(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict( - (q, status["opcounters"][q]) - for q in status['opcounters'].keys() - ) - + data = dict((q, status["opcounters"][q]) for q in status["opcounters"].keys()) + formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_ops{{type="{k}", db="{MONGO_HOST}"}} {v}' - + context = { "data": formatted_data, - "chart_name": 'ops', - "chart_type": 'gauge', + "chart_name": "ops", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/page-faults/") def page_faults(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: @@ -165,7 +165,7 @@ def page_faults(): except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) try: - value = status['extra_info']['page_faults'] + value = status["extra_info"]["page_faults"] except KeyError: value = "U" data = dict(page_faults=value) @@ -175,37 +175,34 @@ def page_faults(): context = { "data": formatted_data, - "chart_name": 'page_faults', - "chart_type": 'counter', + "chart_name": "page_faults", + "chart_type": "counter", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/page-queues/") def page_queues(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict( - (q, status["globalLock"]["currentQueue"][q]) - for q in ("readers", "writers") - ) + data = dict((q, status["globalLock"]["currentQueue"][q]) for q in ("readers", "writers")) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_page_queues{{type="{k}", db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'queues', - "chart_type": 'gauge', + "chart_name": "queues", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_metrics/flask_metrics_redis.py b/flask_metrics/flask_metrics_redis.py index 44bb4fb14..21322433f 100644 --- a/flask_metrics/flask_metrics_redis.py +++ b/flask_metrics/flask_metrics_redis.py @@ -15,14 +15,14 @@ if settings.FLASK_SENTRY_DSN is not None: app = Flask(__name__) INSTANCES = { - 'db-redis-session': settings.REDIS_SESSIONS, - 'db-redis-story': settings.REDIS_STORY, - 'db-redis-pubsub': settings.REDIS_PUBSUB, - 'db-redis-user': settings.REDIS_USER, + "db-redis-session": settings.REDIS_SESSIONS, + "db-redis-story": settings.REDIS_STORY, + "db-redis-pubsub": settings.REDIS_PUBSUB, + "db-redis-user": settings.REDIS_USER, } -class RedisMetric(object): +class RedisMetric(object): def __init__(self, title, fields): self.title = title self.fields = fields @@ -36,17 +36,17 @@ class RedisMetric(object): if not settings.DOCKERBUILD and instance not in settings.SERVER_NAME: continue self.host = f"{settings.SERVER_NAME}.node.nyc1.consul" - if instance == 'db-redis-session': - self.port = redis_config.get('port', settings.REDIS_SESSION_PORT) - elif instance == 'db-redis-story': - self.port = redis_config.get('port', settings.REDIS_STORY_PORT) - elif instance == 'db-redis-pubsub': - self.port = redis_config.get('port', settings.REDIS_PUBSUB_PORT) - elif instance == 'db-redis-user': - self.port = redis_config.get('port', settings.REDIS_USER_PORT) + if instance == "db-redis-session": + self.port = redis_config.get("port", settings.REDIS_SESSION_PORT) + elif instance == "db-redis-story": + self.port = redis_config.get("port", settings.REDIS_STORY_PORT) + elif instance == "db-redis-pubsub": + self.port = redis_config.get("port", settings.REDIS_PUBSUB_PORT) + elif instance == "db-redis-user": + self.port = redis_config.get("port", settings.REDIS_USER_PORT) stats = self.get_info() yield instance, stats - + def execute(self): data = {} for instance, stats in self.redis_servers_stats(): @@ -61,136 +61,154 @@ class RedisMetric(object): return data def format_data(self, data): - label = self.fields[0][1]['label'] + label = self.fields[0][1]["label"] formatted_data = {} for k, v in data.items(): formatted_data[k] = f'{label}{{db="{k}"}} {v[self.fields[0][0]]}' return formatted_data - + def get_db_size_data(self): data = {} for instance, stats in self.redis_servers_stats(): - dbs = [stat for stat in stats.keys() if stat.startswith('db')] + dbs = [stat for stat in stats.keys() if stat.startswith("db")] for db in dbs: - data[f'{instance}-{db}'] = f'redis_size{{db="{db}"}} {stats[db]["keys"]}' + data[f"{instance}-{db}"] = f'redis_size{{db="{db}"}} {stats[db]["keys"]}' return data def get_context(self): - if self.fields[0][0] == 'size': + if self.fields[0][0] == "size": formatted_data = self.get_db_size_data() else: values = self.execute() formatted_data = self.format_data(values) context = { "data": formatted_data, - "chart_name": self.fields[0][1]['label'], - "chart_type": self.fields[0][1]['type'], + "chart_name": self.fields[0][1]["label"], + "chart_type": self.fields[0][1]["type"], } return context - + @property def response_body(self): context = self.get_context() - return render_template('prometheus_data.html', **context) + return render_template("prometheus_data.html", **context) @app.route("/active-connections/") def active_connections(): conf = { - 'title': "Redis active connections", - 'fields': ( - ('connected_clients', dict( - label="redis_active_connections", - type="gauge", - )), + "title": "Redis active connections", + "fields": ( + ( + "connected_clients", + dict( + label="redis_active_connections", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) return Response(redis_metric.response_body, content_type="text/plain") + @app.route("/commands/") def commands(): conf = { - 'title': "Redis commands", - 'fields': ( - ('total_commands_processed', dict( - label="redis_commands", - type="gauge", - )), + "title": "Redis commands", + "fields": ( + ( + "total_commands_processed", + dict( + label="redis_commands", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/connects/") def connects(): conf = { - 'title': "Redis connections per second", - 'fields': ( - ('total_connections_received', dict( - label="redis_connects", - type="counter", - )), + "title": "Redis connections per second", + "fields": ( + ( + "total_connections_received", + dict( + label="redis_connects", + type="counter", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/size/") def size(): - conf = { - 'title': "Redis DB size", - 'fields': ( - ('size', dict( - label="redis_size", - type="gauge", - )), - ) + "title": "Redis DB size", + "fields": ( + ( + "size", + dict( + label="redis_size", + type="gauge", + ), + ), + ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/memory/") def memory(): conf = { - 'title': "Redis Total Memory", - 'fields': ( - ('total_system_memory', dict( - label="redis_memory", - type="gauge", - )), + "title": "Redis Total Memory", + "fields": ( + ( + "total_system_memory", + dict( + label="redis_memory", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/used-memory/") def memory_used(): conf = { - 'title': "Redis Used Memory", - 'fields': ( - ('used_memory', dict( - label="redis_used_memory", - type="gauge", - )), + "title": "Redis Used Memory", + "fields": ( + ( + "used_memory", + dict( + label="redis_used_memory", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_monitor/db_monitor.py b/flask_monitor/db_monitor.py index eb95dd44b..b37396bb8 100644 --- a/flask_monitor/db_monitor.py +++ b/flask_monitor/db_monitor.py @@ -22,17 +22,18 @@ app = Flask(__name__) PRIMARY_STATE = 1 SECONDARY_STATE = 2 + @app.route("/db_check/postgres") def db_check_postgres(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) connect_params = "dbname='%s' user='%s' password='%s' host='%s' port='%s'" % ( - settings.DATABASES['default']['NAME'], - settings.DATABASES['default']['USER'], - settings.DATABASES['default']['PASSWORD'], - f'{settings.SERVER_NAME}.node.nyc1.consul', - settings.DATABASES['default']['PORT'], + settings.DATABASES["default"]["NAME"], + settings.DATABASES["default"]["USER"], + settings.DATABASES["default"]["PASSWORD"], + f"{settings.SERVER_NAME}.node.nyc1.consul", + settings.DATABASES["default"]["PORT"], ) try: conn = psycopg2.connect(connect_params) @@ -45,28 +46,30 @@ def db_check_postgres(): rows = cur.fetchall() for row in rows: return str(row[0]) - + abort(Response("No rows found", 504)) + @app.route("/db_check/mysql") def db_check_mysql(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) connect_params = "dbname='%s' user='%s' password='%s' host='%s' port='%s'" % ( - settings.DATABASES['default']['NAME'], - settings.DATABASES['default']['USER'], - settings.DATABASES['default']['PASSWORD'], - settings.DATABASES['default']['HOST'], - settings.DATABASES['default']['PORT'], + settings.DATABASES["default"]["NAME"], + settings.DATABASES["default"]["USER"], + settings.DATABASES["default"]["PASSWORD"], + settings.DATABASES["default"]["HOST"], + settings.DATABASES["default"]["PORT"], ) try: - - conn = pymysql.connect(host='mysql', - port=settings.DATABASES['default']['PORT'], - user=settings.DATABASES['default']['USER'], - passwd=settings.DATABASES['default']['PASSWORD'], - db=settings.DATABASES['default']['NAME']) + conn = pymysql.connect( + host="mysql", + port=settings.DATABASES["default"]["PORT"], + user=settings.DATABASES["default"]["USER"], + passwd=settings.DATABASES["default"]["PASSWORD"], + db=settings.DATABASES["default"]["NAME"], + ) except: print(" ---> Mysql can't connect to the database: %s" % connect_params) abort(Response("Can't connect to mysql db", 503)) @@ -76,17 +79,20 @@ def db_check_mysql(): rows = cur.fetchall() for row in rows: return str(row[0]) - + abort(Response("No rows found", 504)) + @app.route("/db_check/mongo") def db_check_mongo(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) try: # The `mongo` hostname below is a reference to the newsblurnet docker network, where 172.18.0.0/16 is defined - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.nyc1.consul/?authSource=admin") + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.nyc1.consul/?authSource=admin" + ) db = client.newsblur except: abort(Response("Can't connect to db", 503)) @@ -98,25 +104,25 @@ def db_check_mongo(): except pymongo.errors.ServerSelectionTimeoutError: abort(Response("Server selection timeout", 503)) except pymongo.errors.OperationFailure as e: - if 'Authentication failed' in str(e): + if "Authentication failed" in str(e): abort(Response("Auth failed", 506)) abort(Response("Operation Failure", 507)) - + if not stories: abort(Response("No stories", 510)) - - status = client.admin.command('replSetGetStatus') - members = status['members'] + + status = client.admin.command("replSetGetStatus") + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: abort(Response("No optime", 511)) @@ -126,43 +132,47 @@ def db_check_mongo(): return str(stories) + @app.route("/db_check/mongo_analytics") def db_check_mongo_analytics(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) try: - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin") + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin" + ) db = client.nbanalytics except: abort(Response("Can't connect to db", 503)) - + try: fetches = db.feed_fetches.estimated_document_count() except (pymongo.errors.NotMasterError, pymongo.errors.ServerSelectionTimeoutError): abort(Response("Not Master / Server selection timeout", 504)) except pymongo.errors.OperationFailure as e: - if 'Authentication failed' in str(e): + if "Authentication failed" in str(e): abort(Response("Auth failed", 505)) abort(Response("Operation failure", 506)) - + if not fetches: abort(Response("No fetches in data", 510)) - + return str(fetches) + @app.route("/db_check/redis_user") def db_check_redis_user(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_USER_PORT) + port = request.args.get("port", settings.REDIS_USER_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=0) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=0) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -173,18 +183,19 @@ def db_check_redis_user(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_story") -def db_check_redis_story(): - if request.args.get('consul') == '1': +def db_check_redis_story(): + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_STORY_PORT) - + port = request.args.get("port", settings.REDIS_STORY_PORT) + try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=1) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=1) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -195,18 +206,19 @@ def db_check_redis_story(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_sessions") def db_check_redis_sessions(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_SESSION_PORT) + port = request.args.get("port", settings.REDIS_SESSION_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=5) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=5) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -217,18 +229,19 @@ def db_check_redis_sessions(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_pubsub") def db_check_redis_pubsub(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_PUBSUB_PORT) + port = request.args.get("port", settings.REDIS_PUBSUB_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=1) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=1) except: abort(Response("Can't connect to db", 503)) - + try: pubsub_numpat = r.pubsub_numpat() except: @@ -239,17 +252,18 @@ def db_check_redis_pubsub(): else: abort(Response("Can't find a pubsub_numpat", 505)) + @app.route("/db_check/elasticsearch") def db_check_elasticsearch(): try: conn = elasticsearch.Elasticsearch("elasticsearch") except: abort(Response("Can't connect to db", 503)) - - if request.args.get('consul') == '1': + + if request.args.get("consul") == "1": return str(1) - - if conn.indices.exists('feeds-index'): + + if conn.indices.exists("feeds-index"): return str("Index exists, but didn't try search") # query = pyes.query.TermQuery("title", "daring fireball") # results = conn.search(query=query, size=1, doc_types=['feeds-type'], sort="num_subscribers:desc") @@ -260,6 +274,7 @@ def db_check_elasticsearch(): else: abort(Response("Couldn't find feeds-index", 504)) + if __name__ == "__main__": print(" ---> Starting NewsBlur DB monitor flask server...") app.run(host="0.0.0.0", port=5579) diff --git a/manage.py b/manage.py index 8ff26d71a..0261a9b88 100755 --- a/manage.py +++ b/manage.py @@ -8,4 +8,3 @@ if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) - diff --git a/newsblur_web/__init__.py b/newsblur_web/__init__.py index a711f1df8..3990cb0a8 100644 --- a/newsblur_web/__init__.py +++ b/newsblur_web/__init__.py @@ -4,4 +4,4 @@ from __future__ import absolute_import, unicode_literals # Django starts so that shared_task will use this app. from .celeryapp import app as celery_app -__all__ = ['celery_app'] +__all__ = ["celery_app"] diff --git a/newsblur_web/celeryapp.py b/newsblur_web/celeryapp.py index 146be96a6..e49929029 100644 --- a/newsblur_web/celeryapp.py +++ b/newsblur_web/celeryapp.py @@ -2,16 +2,17 @@ from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.apps import apps -# set the default Django settings module for the 'celery' program. -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'newsblur_web.settings') -app = Celery('newsblur_web') +# set the default Django settings module for the 'celery' program. +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newsblur_web.settings") + +app = Celery("newsblur_web") # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings', namespace="CELERY") +app.config_from_object("django.conf:settings", namespace="CELERY") # Load task modules from all registered Django app configs. app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) diff --git a/newsblur_web/docker_local_settings.py b/newsblur_web/docker_local_settings.py index 31e353f54..fca266980 100644 --- a/newsblur_web/docker_local_settings.py +++ b/newsblur_web/docker_local_settings.py @@ -5,15 +5,13 @@ import os # = Server Settings = # =================== -ADMINS = ( - ('Samuel Clay', 'samuel@newsblur.com'), -) +ADMINS = (("Samuel Clay", "samuel@newsblur.com"),) -SERVER_EMAIL = 'server@newsblur.com' -HELLO_EMAIL = 'hello@newsblur.com' -NEWSBLUR_URL = 'https://localhost' -PUSH_DOMAIN = 'localhost' -SESSION_COOKIE_DOMAIN = 'localhost' +SERVER_EMAIL = "server@newsblur.com" +HELLO_EMAIL = "hello@newsblur.com" +NEWSBLUR_URL = "https://localhost" +PUSH_DOMAIN = "localhost" +SESSION_COOKIE_DOMAIN = "localhost" # =================== # = Global Settings = @@ -23,24 +21,24 @@ DOCKERBUILD = True DEBUG = False # DEBUG = True -# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run +# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run # `./manage.py collectstatic` first. Turn this on for development so you can see -# changes in your JS/CSS. +# changes in your JS/CSS. DEBUG_ASSETS = False # Make sure to run `./manage.py collectstatic` first DEBUG_ASSETS = True # DEBUG_QUERIES controls the output of the database query logs. Can be rather verbose -# but is useful to catch slow running queries. A summary is also useful in cutting +# but is useful to catch slow running queries. A summary is also useful in cutting # down verbosity. DEBUG_QUERIES = DEBUG DEBUG_QUERIES_SUMMARY_ONLY = True # DEBUG_QUERIES_SUMMARY_ONLY = False -MEDIA_URL = '/media/' -IMAGES_URL = '/imageproxy' +MEDIA_URL = "/media/" +IMAGES_URL = "/imageproxy" # Uncomment below to debug iOS/Android widget # IMAGES_URL = 'https://haproxy/imageproxy' -SECRET_KEY = 'YOUR SECRET KEY' +SECRET_KEY = "YOUR SECRET KEY" AUTO_PREMIUM_NEW_USERS = True AUTO_PREMIUM_ARCHIVE_NEW_USERS = True AUTO_PREMIUM_PRO_NEW_USERS = True @@ -57,27 +55,27 @@ ENABLE_PUSH = False PRO_MINUTES_BETWEEN_FETCHES = 15 CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://db_redis:6579/6', + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://db_redis:6579/6", }, } -EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" # Set this to the username that is shown on the homepage to unauthenticated users. -HOMEPAGE_USERNAME = 'popular' +HOMEPAGE_USERNAME = "popular" # Google Reader OAuth API Keys -OAUTH_KEY = 'www.example.com' -OAUTH_SECRET = 'SECRET_KEY_FROM_GOOGLE' +OAUTH_KEY = "www.example.com" +OAUTH_SECRET = "SECRET_KEY_FROM_GOOGLE" -S3_ACCESS_KEY = 'XXX' -S3_SECRET = 'SECRET' -S3_BACKUP_BUCKET = 'newsblur-backups' -S3_PAGES_BUCKET_NAME = 'pages-XXX.newsblur.com' -S3_ICONS_BUCKET_NAME = 'icons-XXX.newsblur.com' -S3_AVATARS_BUCKET_NAME = 'avatars-XXX.newsblur.com' +S3_ACCESS_KEY = "XXX" +S3_SECRET = "SECRET" +S3_BACKUP_BUCKET = "newsblur-backups" +S3_PAGES_BUCKET_NAME = "pages-XXX.newsblur.com" +S3_ICONS_BUCKET_NAME = "icons-XXX.newsblur.com" +S3_AVATARS_BUCKET_NAME = "avatars-XXX.newsblur.com" STRIPE_SECRET = "YOUR-SECRET-API-KEY" STRIPE_PUBLISHABLE = "YOUR-PUBLISHABLE-API-KEY" @@ -86,10 +84,10 @@ STRIPE_PUBLISHABLE = "YOUR-PUBLISHABLE-API-KEY" # = Social APIs = # =============== -FACEBOOK_APP_ID = '111111111111111' -FACEBOOK_SECRET = '99999999999999999999999999999999' -TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo' -TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' +FACEBOOK_APP_ID = "111111111111111" +FACEBOOK_SECRET = "99999999999999999999999999999999" +TWITTER_CONSUMER_KEY = "ooooooooooooooooooooo" +TWITTER_CONSUMER_SECRET = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # ============= @@ -97,51 +95,34 @@ YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # ============= DATABASES = { - 'default': { - 'NAME': 'newsblur', - 'ENGINE': 'django_prometheus.db.backends.postgresql', + "default": { + "NAME": "newsblur", + "ENGINE": "django_prometheus.db.backends.postgresql", #'ENGINE': 'django.db.backends.mysql', - 'USER': 'newsblur', - 'PASSWORD': 'newsblur', - 'HOST': 'db_postgres', - 'PORT': 5432 + "USER": "newsblur", + "PASSWORD": "newsblur", + "HOST": "db_postgres", + "PORT": 5432, }, } -MONGO_DB = { - 'name': 'newsblur', - 'host': 'db_mongo:29019' -} +MONGO_DB = {"name": "newsblur", "host": "db_mongo:29019"} MONGO_ANALYTICS_DB = { - 'name': 'nbanalytics', - 'host': 'db_mongo:29019', + "name": "nbanalytics", + "host": "db_mongo:29019", } -MONGODB_SLAVE = { - 'host': 'db_mongo' -} +MONGODB_SLAVE = {"host": "db_mongo"} # Celery RabbitMQ/Redis Broker BROKER_URL = "redis://db_redis:6579/0" CELERY_RESULT_BACKEND = BROKER_URL CELERY_WORKER_CONCURRENCY = 1 -REDIS_USER = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_PUBSUB = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_STORY = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_SESSIONS = { - 'host': 'db_redis', - 'port': 6579 -} +REDIS_USER = {"host": "db_redis", "port": 6579} +REDIS_PUBSUB = {"host": "db_redis", "port": 6579} +REDIS_STORY = {"host": "db_redis", "port": 6579} +REDIS_SESSIONS = {"host": "db_redis", "port": 6579} CELERY_REDIS_DB_NUM = 4 SESSION_REDIS_DB = 5 @@ -153,9 +134,9 @@ ELASTICSEARCH_FEED_HOST = "http://db_elasticsearch:9200" ELASTICSEARCH_STORY_HOST = "http://db_elasticsearch:9200" BACKED_BY_AWS = { - 'pages_on_node': False, - 'pages_on_s3': False, - 'icons_on_s3': False, + "pages_on_node": False, + "pages_on_s3": False, + "icons_on_s3": False, } @@ -167,25 +148,27 @@ BACKED_BY_AWS = { LOG_TO_STREAM = True if len(logging._handlerList) < 1: - LOG_FILE = '~/newsblur/logs/development.log' - logging.basicConfig(level=logging.DEBUG, - format='%(asctime)-12s: %(message)s', - datefmt='%b %d %H:%M:%S', - handler=logging.StreamHandler) + LOG_FILE = "~/newsblur/logs/development.log" + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)-12s: %(message)s", + datefmt="%b %d %H:%M:%S", + handler=logging.StreamHandler, + ) -MAILGUN_ACCESS_KEY = 'key-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' -MAILGUN_SERVER_NAME = 'newsblur.com' +MAILGUN_ACCESS_KEY = "key-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +MAILGUN_SERVER_NAME = "newsblur.com" -DO_TOKEN_LOG = '0000000000000000000000000000000000000000000000000000000000000000' -DO_TOKEN_FABRIC = '0000000000000000000000000000000000000000000000000000000000000000' +DO_TOKEN_LOG = "0000000000000000000000000000000000000000000000000000000000000000" +DO_TOKEN_FABRIC = "0000000000000000000000000000000000000000000000000000000000000000" SERVER_NAME = "nblocalhost" NEWSBLUR_URL = os.getenv("NEWSBLUR_URL", "https://localhost") -if NEWSBLUR_URL == 'https://localhost': +if NEWSBLUR_URL == "https://localhost": SESSION_COOKIE_DOMAIN = "localhost" -SESSION_ENGINE = 'redis_sessions.session' +SESSION_ENGINE = "redis_sessions.session" # CORS_ORIGIN_REGEX_WHITELIST = ('^(https?://)?(\w+\.)?nb.local\.com$', ) diff --git a/newsblur_web/settings.py b/newsblur_web/settings.py index 046b59782..35bae7368 100644 --- a/newsblur_web/settings.py +++ b/newsblur_web/settings.py @@ -7,23 +7,23 @@ import yaml # = Directory Declaractions = # =========================== -SETTINGS_DIR = os.path.dirname(__file__) -NEWSBLUR_DIR = os.path.join(SETTINGS_DIR, "../") -MEDIA_ROOT = os.path.join(NEWSBLUR_DIR, 'media') -STATIC_ROOT = os.path.join(NEWSBLUR_DIR, 'static') -UTILS_ROOT = os.path.join(NEWSBLUR_DIR, 'utils') -VENDOR_ROOT = os.path.join(NEWSBLUR_DIR, 'vendor') -LOG_FILE = os.path.join(NEWSBLUR_DIR, 'logs/newsblur.log') -IMAGE_MASK = os.path.join(NEWSBLUR_DIR, 'media/img/mask.png') +SETTINGS_DIR = os.path.dirname(__file__) +NEWSBLUR_DIR = os.path.join(SETTINGS_DIR, "../") +MEDIA_ROOT = os.path.join(NEWSBLUR_DIR, "media") +STATIC_ROOT = os.path.join(NEWSBLUR_DIR, "static") +UTILS_ROOT = os.path.join(NEWSBLUR_DIR, "utils") +VENDOR_ROOT = os.path.join(NEWSBLUR_DIR, "vendor") +LOG_FILE = os.path.join(NEWSBLUR_DIR, "logs/newsblur.log") +IMAGE_MASK = os.path.join(NEWSBLUR_DIR, "media/img/mask.png") # ============== # = PYTHONPATH = # ============== -if '/utils' not in ' '.join(sys.path): +if "/utils" not in " ".join(sys.path): sys.path.append(UTILS_ROOT) -if '/vendor' not in ' '.join(sys.path): +if "/vendor" not in " ".join(sys.path): sys.path.append(VENDOR_ROOT) import datetime @@ -47,17 +47,15 @@ from utils.mongo_command_monitor import MongoCommandLogger # = Server Settings = # =================== -ADMINS = ( - ('Samuel Clay', 'samuel@newsblur.com'), -) +ADMINS = (("Samuel Clay", "samuel@newsblur.com"),) -SERVER_NAME = 'newsblur' -SERVER_EMAIL = 'server@newsblur.com' -HELLO_EMAIL = 'hello@newsblur.com' -NEWSBLUR_URL = 'https://www.newsblur.com' -IMAGES_URL = 'https://imageproxy.newsblur.com' -PUSH_DOMAIN = 'push.newsblur.com' -SECRET_KEY = 'YOUR_SECRET_KEY' +SERVER_NAME = "newsblur" +SERVER_EMAIL = "server@newsblur.com" +HELLO_EMAIL = "hello@newsblur.com" +NEWSBLUR_URL = "https://www.newsblur.com" +IMAGES_URL = "https://imageproxy.newsblur.com" +PUSH_DOMAIN = "push.newsblur.com" +SECRET_KEY = "YOUR_SECRET_KEY" IMAGES_SECRET_KEY = "YOUR_SECRET_IMAGE_KEY" DNSIMPLE_TOKEN = "YOUR_DNSIMPLE_TOKEN" RECAPTCHA_SECRET_KEY = "YOUR_RECAPTCHA_KEY" @@ -71,40 +69,40 @@ FLASK_SENTRY_DSN = None # = Global Settings = # =================== -DEBUG = True -TEST_DEBUG = False +DEBUG = True +TEST_DEBUG = False SEND_BROKEN_LINK_EMAILS = False -DEBUG_QUERIES = False -MANAGERS = ADMINS -PAYPAL_RECEIVER_EMAIL = 'samuel@ofbrooklyn.com' -TIME_ZONE = 'GMT' -LANGUAGE_CODE = 'en-us' -SITE_ID = 1 -USE_I18N = False -LOGIN_REDIRECT_URL = '/' -LOGIN_URL = '/account/login' -MEDIA_URL = '/media/' +DEBUG_QUERIES = False +MANAGERS = ADMINS +PAYPAL_RECEIVER_EMAIL = "samuel@ofbrooklyn.com" +TIME_ZONE = "GMT" +LANGUAGE_CODE = "en-us" +SITE_ID = 1 +USE_I18N = False +LOGIN_REDIRECT_URL = "/" +LOGIN_URL = "/account/login" +MEDIA_URL = "/media/" # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". -CIPHER_USERNAMES = False -DEBUG_ASSETS = True -HOMEPAGE_USERNAME = 'popular' -ALLOWED_HOSTS = ['*'] +CIPHER_USERNAMES = False +DEBUG_ASSETS = True +HOMEPAGE_USERNAME = "popular" +ALLOWED_HOSTS = ["*"] AUTO_PREMIUM_NEW_USERS = True AUTO_ENABLE_NEW_USERS = True ENFORCE_SIGNUP_CAPTCHA = False -ENABLE_PUSH = True -PAYPAL_TEST = False -DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB -FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB +ENABLE_PUSH = True +PAYPAL_TEST = False +DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB +FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB PROMETHEUS_EXPORT_MIGRATIONS = False -MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour -MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 15 # 15 minutes -MAX_EMAILS_SENT_PER_DAY_PER_USER = 20 # Most are story notifications +MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour +MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 15 # 15 minutes +MAX_EMAILS_SENT_PER_DAY_PER_USER = 20 # Most are story notifications -# Uncomment below to force all feeds to store this many stories. Default is to cut +# Uncomment below to force all feeds to store this many stories. Default is to cut # off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds. # OVERRIDE_STORY_COUNT_MAX = 1000 @@ -114,31 +112,31 @@ MAX_EMAILS_SENT_PER_DAY_PER_USER = 20 # Most are story notifications MIDDLEWARE = ( - 'django_prometheus.middleware.PrometheusBeforeMiddleware', - 'django.middleware.gzip.GZipMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'subdomains.middleware.SubdomainMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'apps.profile.middleware.TimingMiddleware', - 'apps.profile.middleware.LastSeenMiddleware', - 'apps.profile.middleware.UserAgentBanMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'apps.profile.middleware.SimpsonsMiddleware', - 'apps.profile.middleware.ServerHostnameMiddleware', - 'oauth2_provider.middleware.OAuth2TokenMiddleware', + "django_prometheus.middleware.PrometheusBeforeMiddleware", + "django.middleware.gzip.GZipMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "subdomains.middleware.SubdomainMiddleware", + "django.middleware.common.CommonMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "apps.profile.middleware.TimingMiddleware", + "apps.profile.middleware.LastSeenMiddleware", + "apps.profile.middleware.UserAgentBanMiddleware", + "corsheaders.middleware.CorsMiddleware", + "apps.profile.middleware.SimpsonsMiddleware", + "apps.profile.middleware.ServerHostnameMiddleware", + "oauth2_provider.middleware.OAuth2TokenMiddleware", # 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'utils.request_introspection_middleware.DumpRequestMiddleware', - 'apps.profile.middleware.DBProfilerMiddleware', - 'apps.profile.middleware.SQLLogToConsoleMiddleware', - 'utils.redis_raw_log_middleware.RedisDumpMiddleware', - 'django_prometheus.middleware.PrometheusAfterMiddleware', + "utils.request_introspection_middleware.DumpRequestMiddleware", + "apps.profile.middleware.DBProfilerMiddleware", + "apps.profile.middleware.SQLLogToConsoleMiddleware", + "utils.redis_raw_log_middleware.RedisDumpMiddleware", + "django_prometheus.middleware.PrometheusAfterMiddleware", ) AUTHENTICATION_BACKENDS = ( - 'oauth2_provider.backends.OAuth2Backend', - 'django.contrib.auth.backends.ModelBackend', + "oauth2_provider.backends.OAuth2Backend", + "django.contrib.auth.backends.ModelBackend", ) CORS_ORIGIN_ALLOW_ALL = True @@ -146,14 +144,14 @@ CORS_ORIGIN_ALLOW_ALL = True CORS_ALLOW_CREDENTIALS = True OAUTH2_PROVIDER = { - 'SCOPES': { - 'read': 'View new unread stories, saved stories, and shared stories.', - 'write': 'Create new saved stories, shared stories, and subscriptions.', - 'ifttt': 'Pair your NewsBlur account with other services.', + "SCOPES": { + "read": "View new unread stories, saved stories, and shared stories.", + "write": "Create new saved stories, shared stories, and subscriptions.", + "ifttt": "Pair your NewsBlur account with other services.", }, - 'CLIENT_ID_GENERATOR_CLASS': 'oauth2_provider.generators.ClientIdGenerator', - 'ACCESS_TOKEN_EXPIRE_SECONDS': 60*60*24*365*10, # 10 years - 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 60*60, # 1 hour + "CLIENT_ID_GENERATOR_CLASS": "oauth2_provider.generators.ClientIdGenerator", + "ACCESS_TOKEN_EXPIRE_SECONDS": 60 * 60 * 24 * 365 * 10, # 10 years + "AUTHORIZATION_CODE_EXPIRE_SECONDS": 60 * 60, # 1 hour } # =========== @@ -161,104 +159,87 @@ OAUTH2_PROVIDER = { # =========== LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '[%(asctime)-12s] %(message)s', - 'datefmt': '%b %d %H:%M:%S' - }, - 'simple': { - 'format': '%(message)s' - }, + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": {"format": "[%(asctime)-12s] %(message)s", "datefmt": "%b %d %H:%M:%S"}, + "simple": {"format": "%(message)s"}, }, - 'handlers': { - 'null': { - 'level':'DEBUG', - 'class':'logging.NullHandler', + "handlers": { + "null": { + "level": "DEBUG", + "class": "logging.NullHandler", }, - 'console':{ - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' + "console": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose"}, + "vendor.apns": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose"}, + "log_file": { + "level": "DEBUG", + "class": "logging.handlers.RotatingFileHandler", + "filename": LOG_FILE, + "maxBytes": 16777216, # 16megabytes + "formatter": "verbose", }, - 'vendor.apns':{ - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' - }, - 'log_file':{ - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': LOG_FILE, - 'maxBytes': 16777216, # 16megabytes - 'formatter': 'verbose' - }, - 'mail_admins': { - 'level': 'CRITICAL', - 'class': 'django.utils.log.AdminEmailHandler', + "mail_admins": { + "level": "CRITICAL", + "class": "django.utils.log.AdminEmailHandler", # 'filters': ['require_debug_false'], - 'include_html': True, + "include_html": True, }, }, - 'loggers': { - 'django': { - 'handlers': ['console', 'log_file', 'mail_admins'], - 'level': 'ERROR', - 'propagate': False, + "loggers": { + "django": { + "handlers": ["console", "log_file", "mail_admins"], + "level": "ERROR", + "propagate": False, }, - 'django.db.backends': { - 'handlers': ['console'], - 'level': 'INFO', - 'propagate': False, + "django.db.backends": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, }, - 'django.security.DisallowedHost': { - 'handlers': ['null'], - 'propagate': False, + "django.security.DisallowedHost": { + "handlers": ["null"], + "propagate": False, }, - 'elasticsearch': { - 'handlers': ['console', 'log_file'], - 'level': 'ERROR', + "elasticsearch": { + "handlers": ["console", "log_file"], + "level": "ERROR", # 'level': 'DEBUG', - 'propagate': False, + "propagate": False, }, - 'elasticsearch.trace': { - 'handlers': ['console', 'log_file'], - 'level': 'ERROR', + "elasticsearch.trace": { + "handlers": ["console", "log_file"], + "level": "ERROR", # 'level': 'DEBUG', - 'propagate': False, + "propagate": False, }, - 'zebra': { - 'handlers': ['console', 'log_file'], + "zebra": { + "handlers": ["console", "log_file"], # 'level': 'ERROR', - 'level': 'DEBUG', - 'propagate': False, + "level": "DEBUG", + "propagate": False, }, - 'newsblur': { - 'handlers': ['console', 'log_file'], - 'level': 'DEBUG', - 'propagate': False, + "newsblur": { + "handlers": ["console", "log_file"], + "level": "DEBUG", + "propagate": False, }, - 'readability': { - 'handlers': ['console', 'log_file'], - 'level': 'WARNING', - 'propagate': False, + "readability": { + "handlers": ["console", "log_file"], + "level": "WARNING", + "propagate": False, }, - 'apps': { - 'handlers': ['log_file'], - 'level': 'DEBUG', - 'propagate': True, + "apps": { + "handlers": ["log_file"], + "level": "DEBUG", + "propagate": True, + }, + "subdomains.middleware": { + "level": "ERROR", + "propagate": False, }, - 'subdomains.middleware': { - 'level': 'ERROR', - 'propagate': False, - } - }, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' - } }, + "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, } logging.getLogger("requests").setLevel(logging.WARNING) @@ -268,48 +249,48 @@ logging.getLogger("urllib3").setLevel(logging.WARNING) # = Miscellaneous Settings = # ========================== -DAYS_OF_UNREAD = 30 -DAYS_OF_UNREAD_FREE = 14 -DAYS_OF_UNREAD_ARCHIVE = 9999 +DAYS_OF_UNREAD = 30 +DAYS_OF_UNREAD_FREE = 14 +DAYS_OF_UNREAD_ARCHIVE = 9999 # DoSH can be more, since you can up this value by N, and after N days, -# you can then up the DAYS_OF_UNREAD value with no impact. +# you can then up the DAYS_OF_UNREAD value with no impact. # The max is for archive subscribers. -DAYS_OF_STORY_HASHES = DAYS_OF_UNREAD +DAYS_OF_STORY_HASHES = DAYS_OF_UNREAD DAYS_OF_STORY_HASHES_ARCHIVE = DAYS_OF_UNREAD_ARCHIVE # SUBSCRIBER_EXPIRE sets the number of days after which a user who hasn't logged in # is no longer considered an active subscriber -SUBSCRIBER_EXPIRE = 7 +SUBSCRIBER_EXPIRE = 7 -# PRO_MINUTES_BETWEEN_FETCHES sets the number of minutes to fetch feeds for +# PRO_MINUTES_BETWEEN_FETCHES sets the number of minutes to fetch feeds for # Premium Pro accounts. Defaults to every 5 minutes, but that's for NewsBlur # servers. On your local, you should probably set this to 10-15 minutes PRO_MINUTES_BETWEEN_FETCHES = 5 -ROOT_URLCONF = 'newsblur_web.urls' -INTERNAL_IPS = ('127.0.0.1',) -LOGGING_LOG_SQL = True -APPEND_SLASH = False -SESSION_ENGINE = 'redis_sessions.session' -TEST_RUNNER = "utils.testrunner.TestRunner" -SESSION_COOKIE_NAME = 'newsblur_sessionid' -SESSION_COOKIE_AGE = 60*60*24*365*10 # 10 years -SESSION_COOKIE_DOMAIN = '.newsblur.com' +ROOT_URLCONF = "newsblur_web.urls" +INTERNAL_IPS = ("127.0.0.1",) +LOGGING_LOG_SQL = True +APPEND_SLASH = False +SESSION_ENGINE = "redis_sessions.session" +TEST_RUNNER = "utils.testrunner.TestRunner" +SESSION_COOKIE_NAME = "newsblur_sessionid" +SESSION_COOKIE_AGE = 60 * 60 * 24 * 365 * 10 # 10 years +SESSION_COOKIE_DOMAIN = ".newsblur.com" SESSION_COOKIE_HTTPONLY = False -SESSION_COOKIE_SECURE = True -SENTRY_DSN = 'https://XXXNEWSBLURXXX@app.getsentry.com/99999999' -SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer' -DATA_UPLOAD_MAX_NUMBER_FIELDS = None # Handle long /reader/complete_river calls -EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' +SESSION_COOKIE_SECURE = True +SENTRY_DSN = "https://XXXNEWSBLURXXX@app.getsentry.com/99999999" +SESSION_SERIALIZER = "django.contrib.sessions.serializers.PickleSerializer" +DATA_UPLOAD_MAX_NUMBER_FIELDS = None # Handle long /reader/complete_river calls +EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend" # ============== # = Subdomains = # ============== SUBDOMAIN_URLCONFS = { - None: 'newsblur_web.urls', - 'www': 'newsblur_web.urls', - 'nb': 'newsblur_web.urls', + None: "newsblur_web.urls", + "www": "newsblur_web.urls", + "nb": "newsblur_web.urls", } REMOVE_WWW_FROM_DOMAIN = True @@ -324,42 +305,42 @@ LOG_TO_STREAM = False # = Django Apps = # =============== -OAUTH2_PROVIDER_APPLICATION_MODEL = 'oauth2_provider.Application' +OAUTH2_PROVIDER_APPLICATION_MODEL = "oauth2_provider.Application" INSTALLED_APPS = ( - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.sites', - 'django.contrib.admin', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_extensions', - 'django_prometheus', - 'paypal.standard.ipn', - 'apps.rss_feeds', - 'apps.reader', - 'apps.analyzer', - 'apps.feed_import', - 'apps.profile', - 'apps.recommendations', - 'apps.statistics', - 'apps.notifications', - 'apps.static', - 'apps.mobile', - 'apps.push', - 'apps.social', - 'apps.oauth', - 'apps.search', - 'apps.categories', - 'utils', # missing models so no migrations - 'vendor', - 'typogrify', - 'vendor.zebra', - 'anymail', - 'oauth2_provider', - 'corsheaders', - 'pipeline', + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.sites", + "django.contrib.admin", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_extensions", + "django_prometheus", + "paypal.standard.ipn", + "apps.rss_feeds", + "apps.reader", + "apps.analyzer", + "apps.feed_import", + "apps.profile", + "apps.recommendations", + "apps.statistics", + "apps.notifications", + "apps.static", + "apps.mobile", + "apps.push", + "apps.social", + "apps.oauth", + "apps.search", + "apps.categories", + "utils", # missing models so no migrations + "vendor", + "typogrify", + "vendor.zebra", + "anymail", + "oauth2_provider", + "corsheaders", + "pipeline", ) # =================== @@ -378,30 +359,12 @@ PAYPAL_API_SECRET = "YOUR-PAYPAL-API-SECRET" # ========== CELERY_TASK_ROUTES = { - "work-queue": { - "queue": "work_queue", - "binding_key": "work_queue" - }, - "new-feeds": { - "queue": "new_feeds", - "binding_key": "new_feeds" - }, - "push-feeds": { - "queue": "push_feeds", - "binding_key": "push_feeds" - }, - "update-feeds": { - "queue": "update_feeds", - "binding_key": "update_feeds" - }, - "beat-tasks": { - "queue": "cron_queue", - "binding_key": "cron_queue" - }, - "search-indexer": { - "queue": "search_indexer", - "binding_key": "search_indexer" - }, + "work-queue": {"queue": "work_queue", "binding_key": "work_queue"}, + "new-feeds": {"queue": "new_feeds", "binding_key": "new_feeds"}, + "push-feeds": {"queue": "push_feeds", "binding_key": "push_feeds"}, + "update-feeds": {"queue": "update_feeds", "binding_key": "update_feeds"}, + "beat-tasks": {"queue": "cron_queue", "binding_key": "cron_queue"}, + "search-indexer": {"queue": "search_indexer", "binding_key": "search_indexer"}, } CELERY_TASK_QUEUES = { "work_queue": { @@ -409,114 +372,100 @@ CELERY_TASK_QUEUES = { "exchange_type": "direct", "binding_key": "work_queue", }, - "new_feeds": { - "exchange": "new_feeds", - "exchange_type": "direct", - "binding_key": "new_feeds" - }, - "push_feeds": { - "exchange": "push_feeds", - "exchange_type": "direct", - "binding_key": "push_feeds" - }, - "update_feeds": { - "exchange": "update_feeds", - "exchange_type": "direct", - "binding_key": "update_feeds" - }, - "cron_queue": { - "exchange": "cron_queue", - "exchange_type": "direct", - "binding_key": "cron_queue" - }, + "new_feeds": {"exchange": "new_feeds", "exchange_type": "direct", "binding_key": "new_feeds"}, + "push_feeds": {"exchange": "push_feeds", "exchange_type": "direct", "binding_key": "push_feeds"}, + "update_feeds": {"exchange": "update_feeds", "exchange_type": "direct", "binding_key": "update_feeds"}, + "cron_queue": {"exchange": "cron_queue", "exchange_type": "direct", "binding_key": "cron_queue"}, "beat_feeds_task": { "exchange": "beat_feeds_task", "exchange_type": "direct", - "binding_key": "beat_feeds_task" + "binding_key": "beat_feeds_task", }, "search_indexer": { "exchange": "search_indexer", "exchange_type": "direct", - "binding_key": "search_indexer" + "binding_key": "search_indexer", }, } CELERY_TASK_DEFAULT_QUEUE = "work_queue" CELERY_WORKER_PREFETCH_MULTIPLIER = 1 -CELERY_IMPORTS = ("apps.rss_feeds.tasks", - "apps.social.tasks", - "apps.reader.tasks", - "apps.profile.tasks", - "apps.feed_import.tasks", - "apps.search.tasks", - "apps.statistics.tasks",) -CELERY_TASK_IGNORE_RESULT = True -CELERY_TASK_ACKS_LATE = True # Retry if task fails +CELERY_IMPORTS = ( + "apps.rss_feeds.tasks", + "apps.social.tasks", + "apps.reader.tasks", + "apps.profile.tasks", + "apps.feed_import.tasks", + "apps.search.tasks", + "apps.statistics.tasks", +) +CELERY_TASK_IGNORE_RESULT = True +CELERY_TASK_ACKS_LATE = True # Retry if task fails CELERY_WORKER_MAX_TASKS_PER_CHILD = 10 -CELERY_TASK_TIME_LIMIT = 12 * 30 -CELERY_WORKER_DISABLE_RATE_LIMITS = True +CELERY_TASK_TIME_LIMIT = 12 * 30 +CELERY_WORKER_DISABLE_RATE_LIMITS = True SECONDS_TO_DELAY_CELERY_EMAILS = 60 CELERY_BEAT_SCHEDULE = { - 'task-feeds': { - 'task': 'task-feeds', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'beat_feeds_task'}, + "task-feeds": { + "task": "task-feeds", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "beat_feeds_task"}, }, - 'task-broken-feeds': { - 'task': 'task-broken-feeds', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'beat_feeds_task'}, + "task-broken-feeds": { + "task": "task-broken-feeds", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "beat_feeds_task"}, }, - 'freshen-homepage': { - 'task': 'freshen-homepage', - 'schedule': datetime.timedelta(hours=1), - 'options': {'queue': 'cron_queue'}, + "freshen-homepage": { + "task": "freshen-homepage", + "schedule": datetime.timedelta(hours=1), + "options": {"queue": "cron_queue"}, }, - 'collect-stats': { - 'task': 'collect-stats', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'cron_queue'}, + "collect-stats": { + "task": "collect-stats", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "cron_queue"}, }, - 'collect-feedback': { - 'task': 'collect-feedback', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'cron_queue'}, + "collect-feedback": { + "task": "collect-feedback", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "cron_queue"}, }, - 'share-popular-stories': { - 'task': 'share-popular-stories', - 'schedule': datetime.timedelta(minutes=10), - 'options': {'queue': 'cron_queue'}, + "share-popular-stories": { + "task": "share-popular-stories", + "schedule": datetime.timedelta(minutes=10), + "options": {"queue": "cron_queue"}, }, - 'clean-analytics': { - 'task': 'clean-analytics', - 'schedule': datetime.timedelta(hours=12), - 'options': {'queue': 'cron_queue', 'timeout': 720*10}, + "clean-analytics": { + "task": "clean-analytics", + "schedule": datetime.timedelta(hours=12), + "options": {"queue": "cron_queue", "timeout": 720 * 10}, }, - 'reimport-stripe-history': { - 'task': 'reimport-stripe-history', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'cron_queue'}, + "reimport-stripe-history": { + "task": "reimport-stripe-history", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "cron_queue"}, }, # 'clean-spam': { # 'task': 'clean-spam', # 'schedule': datetime.timedelta(hours=1), # 'options': {'queue': 'cron_queue'}, # }, - 'clean-social-spam': { - 'task': 'clean-social-spam', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'cron_queue'}, + "clean-social-spam": { + "task": "clean-social-spam", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "cron_queue"}, }, - 'premium-expire': { - 'task': 'premium-expire', - 'schedule': datetime.timedelta(hours=24), - 'options': {'queue': 'cron_queue'}, + "premium-expire": { + "task": "premium-expire", + "schedule": datetime.timedelta(hours=24), + "options": {"queue": "cron_queue"}, }, - 'activate-next-new-user': { - 'task': 'activate-next-new-user', - 'schedule': datetime.timedelta(minutes=5), - 'options': {'queue': 'cron_queue'}, + "activate-next-new-user": { + "task": "activate-next-new-user", + "schedule": datetime.timedelta(minutes=5), + "options": {"queue": "cron_queue"}, }, } @@ -528,32 +477,33 @@ if DOCKERBUILD: else: MONGO_PORT = 27017 MONGO_DB = { - 'host': f'db_mongo:{MONGO_PORT}', - 'name': 'newsblur', + "host": f"db_mongo:{MONGO_PORT}", + "name": "newsblur", } MONGO_ANALYTICS_DB = { - 'host': f'db_mongo_analytics:{MONGO_PORT}', - 'name': 'nbanalytics', + "host": f"db_mongo_analytics:{MONGO_PORT}", + "name": "nbanalytics", } # ==================== # = Database Routers = # ==================== + class MasterSlaveRouter(object): """A router that sets up a simple master/slave configuration""" def db_for_read(self, model, **hints): "Point all read operations to a random slave" - return 'slave' + return "slave" def db_for_write(self, model, **hints): "Point all write operations to the master" - return 'default' + return "default" def allow_relation(self, obj1, obj2, **hints): "Allow any relation between two objects in the db pool" - db_list = ('slave','default') + db_list = ("slave", "default") if obj1._state.db in db_list and obj2._state.db in db_list: return True return None @@ -567,11 +517,11 @@ class MasterSlaveRouter(object): # = Social APIs = # =============== -FACEBOOK_APP_ID = '111111111111111' -FACEBOOK_SECRET = '99999999999999999999999999999999' -FACEBOOK_NAMESPACE = 'newsblur' -TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo' -TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' +FACEBOOK_APP_ID = "111111111111111" +FACEBOOK_SECRET = "99999999999999999999999999999999" +FACEBOOK_NAMESPACE = "newsblur" +TWITTER_CONSUMER_KEY = "ooooooooooooooooooooo" +TWITTER_CONSUMER_SECRET = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # =============== @@ -580,15 +530,15 @@ YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" BACKED_BY_AWS = { - 'pages_on_s3': False, - 'icons_on_s3': False, + "pages_on_s3": False, + "icons_on_s3": False, } PROXY_S3_PAGES = True -S3_BACKUP_BUCKET = 'newsblur-backups' -S3_PAGES_BUCKET_NAME = 'pages.newsblur.com' -S3_ICONS_BUCKET_NAME = 'icons.newsblur.com' -S3_AVATARS_BUCKET_NAME = 'avatars.newsblur.com' +S3_BACKUP_BUCKET = "newsblur-backups" +S3_PAGES_BUCKET_NAME = "pages.newsblur.com" +S3_ICONS_BUCKET_NAME = "icons.newsblur.com" +S3_AVATARS_BUCKET_NAME = "avatars.newsblur.com" # ================== # = Configurations = @@ -605,12 +555,14 @@ except ModuleNotFoundError: started_task_or_app = False try: from newsblur_web.task_env import * + print(" ---> Starting NewsBlur task server...") started_task_or_app = True except ModuleNotFoundError: pass try: from newsblur_web.app_env import * + print(" ---> Starting NewsBlur app server...") started_task_or_app = True except ModuleNotFoundError: @@ -619,34 +571,29 @@ if not started_task_or_app: print(" ---> Starting NewsBlur development server...") if DOCKERBUILD: - CELERY_WORKER_CONCURRENCY = 2 + CELERY_WORKER_CONCURRENCY = 2 elif "task-work" in SERVER_NAME or SERVER_NAME.startswith("task-"): - CELERY_WORKER_CONCURRENCY = 4 + CELERY_WORKER_CONCURRENCY = 4 else: - CELERY_WORKER_CONCURRENCY = 24 - -if not DEBUG: - INSTALLED_APPS += ( - 'django_ses', + CELERY_WORKER_CONCURRENCY = 24 - ) +if not DEBUG: + INSTALLED_APPS += ("django_ses",) sentry_sdk.init( dsn=SENTRY_DSN, integrations=[DjangoIntegration(), RedisIntegration(), CeleryIntegration()], server_name=SERVER_NAME, - # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. # We recommend adjusting this value in production, traces_sample_rate=0.01, - # If you wish to associate users to errors (assuming you are using # django.contrib.auth) you may enable sending PII data. - send_default_pii=True + send_default_pii=True, ) sentry_sdk.utils.MAX_STRING_LENGTH = 8192 - + COMPRESS = not DEBUG ACCOUNT_ACTIVATION_DAYS = 30 AWS_ACCESS_KEY_ID = S3_ACCESS_KEY @@ -655,10 +602,11 @@ AWS_SECRET_ACCESS_KEY = S3_SECRET os.environ["AWS_ACCESS_KEY_ID"] = AWS_ACCESS_KEY_ID os.environ["AWS_SECRET_ACCESS_KEY"] = AWS_SECRET_ACCESS_KEY + def clear_prometheus_aggregation_stats(): - prom_folder = '/srv/newsblur/.prom_cache' + prom_folder = "/srv/newsblur/.prom_cache" os.makedirs(prom_folder, exist_ok=True) - os.environ['PROMETHEUS_MULTIPROC_DIR'] = prom_folder + os.environ["PROMETHEUS_MULTIPROC_DIR"] = prom_folder for filename in os.listdir(prom_folder): file_path = os.path.join(prom_folder, filename) try: @@ -667,24 +615,27 @@ def clear_prometheus_aggregation_stats(): elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - if 'No such file' in str(e): + if "No such file" in str(e): return - print('Failed to delete %s. Reason: %s' % (file_path, e)) + print("Failed to delete %s. Reason: %s" % (file_path, e)) clear_prometheus_aggregation_stats() if DEBUG: template_loaders = [ - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", ] else: template_loaders = [ - ('django.template.loaders.cached.Loader', ( - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - )), + ( + "django.template.loaders.cached.Loader", + ( + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ), + ), ] @@ -692,19 +643,21 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [os.path.join(NEWSBLUR_DIR, 'templates'), - os.path.join(NEWSBLUR_DIR, 'vendor/zebra/templates')], + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [ + os.path.join(NEWSBLUR_DIR, "templates"), + os.path.join(NEWSBLUR_DIR, "vendor/zebra/templates"), + ], # 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ + "OPTIONS": { + "context_processors": [ "django.contrib.auth.context_processors.auth", "django.template.context_processors.debug", "django.template.context_processors.media", - 'django.template.context_processors.request', - 'django.contrib.messages.context_processors.messages', + "django.template.context_processors.request", + "django.contrib.messages.context_processors.messages", ], - 'loaders': template_loaders, + "loaders": template_loaders, }, } ] @@ -726,14 +679,14 @@ MONGO_COMMAND_LOGGER = MongoCommandLogger() monitoring.register(MONGO_COMMAND_LOGGER) MONGO_DB_DEFAULTS = { - 'name': 'newsblur', - 'host': f'db_mongo:{MONGO_PORT}', - 'alias': 'default', - 'unicode_decode_error_handler': 'ignore', - 'connect': False, + "name": "newsblur", + "host": f"db_mongo:{MONGO_PORT}", + "alias": "default", + "unicode_decode_error_handler": "ignore", + "connect": False, } MONGO_DB = dict(MONGO_DB_DEFAULTS, **MONGO_DB) -MONGO_DB_NAME = MONGO_DB.pop('name') +MONGO_DB_NAME = MONGO_DB.pop("name") # MONGO_URI = 'mongodb://%s' % (MONGO_DB.pop('host'),) # if MONGO_DB.get('read_preference', pymongo.ReadPreference.PRIMARY) != pymongo.ReadPreference.PRIMARY: @@ -747,18 +700,24 @@ MONGODB = connect(MONGO_DB_NAME, **MONGO_DB) # MONGODB = connect(host="mongodb://localhost:27017/newsblur", connect=False) MONGO_ANALYTICS_DB_DEFAULTS = { - 'name': 'nbanalytics', - 'host': f'db_mongo_analytics:{MONGO_PORT}', - 'alias': 'nbanalytics', + "name": "nbanalytics", + "host": f"db_mongo_analytics:{MONGO_PORT}", + "alias": "nbanalytics", } MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB) # MONGO_ANALYTICS_DB_NAME = MONGO_ANALYTICS_DB.pop('name') # MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB_NAME, **MONGO_ANALYTICS_DB) -if 'username' in MONGO_ANALYTICS_DB: - MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['username']}:{MONGO_ANALYTICS_DB['password']}@{MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics") +if "username" in MONGO_ANALYTICS_DB: + MONGOANALYTICSDB = connect( + db=MONGO_ANALYTICS_DB["name"], + host=f"mongodb://{MONGO_ANALYTICS_DB['username']}:{MONGO_ANALYTICS_DB['password']}@{MONGO_ANALYTICS_DB['host']}/?authSource=admin", + alias="nbanalytics", + ) else: - MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics") + MONGOANALYTICSDB = connect( + db=MONGO_ANALYTICS_DB["name"], host=f"mongodb://{MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics" + ) # ========= @@ -777,149 +736,164 @@ else: REDIS_PUBSUB_PORT = 6383 if REDIS_USER is None: - # REDIS has been renamed to REDIS_USER. + # REDIS has been renamed to REDIS_USER. REDIS_USER = REDIS CELERY_REDIS_DB_NUM = 4 SESSION_REDIS_DB = 5 -CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS_USER['host'], REDIS_USER_PORT,CELERY_REDIS_DB_NUM) +CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS_USER["host"], REDIS_USER_PORT, CELERY_REDIS_DB_NUM) CELERY_RESULT_BACKEND = CELERY_BROKER_URL -BROKER_TRANSPORT_OPTIONS = { - "max_retries": 3, - "interval_start": 0, - "interval_step": 0.2, - "interval_max": 0.5 -} +BROKER_TRANSPORT_OPTIONS = {"max_retries": 3, "interval_start": 0, "interval_step": 0.2, "interval_max": 0.5} SESSION_REDIS = { - 'host': REDIS_SESSIONS['host'], - 'port': REDIS_SESSION_PORT, - 'db': SESSION_REDIS_DB, + "host": REDIS_SESSIONS["host"], + "port": REDIS_SESSION_PORT, + "db": SESSION_REDIS_DB, # 'password': 'password', - 'prefix': '', - 'socket_timeout': 10, - 'retry_on_timeout': True + "prefix": "", + "socket_timeout": 10, + "retry_on_timeout": True, } CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://%s:%s/6' % (REDIS_USER['host'], REDIS_USER_PORT), + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://%s:%s/6" % (REDIS_USER["host"], REDIS_USER_PORT), }, } -REDIS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=0, decode_responses=True) -REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=2, decode_responses=True) -REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=3, decode_responses=True) -REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=4, decode_responses=True) -REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=10, decode_responses=True) +REDIS_POOL = redis.ConnectionPool(host=REDIS_USER["host"], port=REDIS_USER_PORT, db=0, decode_responses=True) +REDIS_ANALYTICS_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=2, decode_responses=True +) +REDIS_STATISTICS_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=3, decode_responses=True +) +REDIS_FEED_UPDATE_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=4, decode_responses=True +) +REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=10, decode_responses=True +) # REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=6) # Duped in CACHES -REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=REDIS_STORY_PORT, db=1, decode_responses=True) -REDIS_FEED_READ_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=1, decode_responses=True) -REDIS_FEED_SUB_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=2, decode_responses=True) -REDIS_SESSION_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=5, decode_responses=True) -REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=REDIS_PUBSUB_PORT, db=0, decode_responses=True) +REDIS_STORY_HASH_POOL = redis.ConnectionPool( + host=REDIS_STORY["host"], port=REDIS_STORY_PORT, db=1, decode_responses=True +) +REDIS_FEED_READ_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=1, decode_responses=True +) +REDIS_FEED_SUB_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=2, decode_responses=True +) +REDIS_SESSION_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=5, decode_responses=True +) +REDIS_PUBSUB_POOL = redis.ConnectionPool( + host=REDIS_PUBSUB["host"], port=REDIS_PUBSUB_PORT, db=0, decode_responses=True +) # ========== # = Celery = # ========== # celeryapp.autodiscover_tasks(INSTALLED_APPS) -accept_content = ['pickle', 'json', 'msgpack', 'yaml'] +accept_content = ["pickle", "json", "msgpack", "yaml"] # ========== # = Assets = # ========== -STATIC_URL = '/static/' +STATIC_URL = "/static/" # STATICFILES_STORAGE = 'pipeline.storage.PipelineManifestStorage' -STATICFILES_STORAGE = 'utils.pipeline_utils.PipelineStorage' +STATICFILES_STORAGE = "utils.pipeline_utils.PipelineStorage" # STATICFILES_STORAGE = 'utils.pipeline_utils.GzipPipelineStorage' STATICFILES_FINDERS = ( # 'pipeline.finders.FileSystemFinder', # 'django.contrib.staticfiles.finders.FileSystemFinder', # 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'pipeline.finders.AppDirectoriesFinder', - 'utils.pipeline_utils.AppDirectoriesFinder', - 'utils.pipeline_utils.FileSystemFinder', + "utils.pipeline_utils.AppDirectoriesFinder", + "utils.pipeline_utils.FileSystemFinder", # 'pipeline.finders.PipelineFinder', ) STATICFILES_DIRS = [ # '/usr/local/lib/python3.9/site-packages/django/contrib/admin/static/', MEDIA_ROOT, ] -with open(os.path.join(SETTINGS_DIR, 'assets.yml')) as stream: +with open(os.path.join(SETTINGS_DIR, "assets.yml")) as stream: assets = yaml.safe_load(stream) PIPELINE = { - 'PIPELINE_ENABLED': not DEBUG_ASSETS, - 'PIPELINE_COLLECTOR_ENABLED': not DEBUG_ASSETS, - 'SHOW_ERRORS_INLINE': DEBUG_ASSETS, - 'CSS_COMPRESSOR': 'pipeline.compressors.yuglify.YuglifyCompressor', - 'JS_COMPRESSOR': 'pipeline.compressors.closure.ClosureCompressor', + "PIPELINE_ENABLED": not DEBUG_ASSETS, + "PIPELINE_COLLECTOR_ENABLED": not DEBUG_ASSETS, + "SHOW_ERRORS_INLINE": DEBUG_ASSETS, + "CSS_COMPRESSOR": "pipeline.compressors.yuglify.YuglifyCompressor", + "JS_COMPRESSOR": "pipeline.compressors.closure.ClosureCompressor", # 'CSS_COMPRESSOR': 'pipeline.compressors.NoopCompressor', # 'JS_COMPRESSOR': 'pipeline.compressors.NoopCompressor', - 'CLOSURE_BINARY': '/usr/bin/java -jar /usr/local/bin/compiler.jar', - 'CLOSURE_ARGUMENTS': '--language_in ECMASCRIPT_2016 --language_out ECMASCRIPT_2016 --warning_level DEFAULT', - 'JAVASCRIPT': { - 'common': { - 'source_filenames': assets['javascripts']['common'], - 'output_filename': 'js/common.js', + "CLOSURE_BINARY": "/usr/bin/java -jar /usr/local/bin/compiler.jar", + "CLOSURE_ARGUMENTS": "--language_in ECMASCRIPT_2016 --language_out ECMASCRIPT_2016 --warning_level DEFAULT", + "JAVASCRIPT": { + "common": { + "source_filenames": assets["javascripts"]["common"], + "output_filename": "js/common.js", }, - 'statistics': { - 'source_filenames': assets['javascripts']['statistics'], - 'output_filename': 'js/statistics.js', + "statistics": { + "source_filenames": assets["javascripts"]["statistics"], + "output_filename": "js/statistics.js", }, - 'payments': { - 'source_filenames': assets['javascripts']['payments'], - 'output_filename': 'js/payments.js', + "payments": { + "source_filenames": assets["javascripts"]["payments"], + "output_filename": "js/payments.js", }, - 'bookmarklet': { - 'source_filenames': assets['javascripts']['bookmarklet'], - 'output_filename': 'js/bookmarklet.js', + "bookmarklet": { + "source_filenames": assets["javascripts"]["bookmarklet"], + "output_filename": "js/bookmarklet.js", }, - 'blurblog': { - 'source_filenames': assets['javascripts']['blurblog'], - 'output_filename': 'js/blurblog.js', + "blurblog": { + "source_filenames": assets["javascripts"]["blurblog"], + "output_filename": "js/blurblog.js", }, }, - 'STYLESHEETS': { - 'common': { - 'source_filenames': assets['stylesheets']['common'], - 'output_filename': 'css/common.css', + "STYLESHEETS": { + "common": { + "source_filenames": assets["stylesheets"]["common"], + "output_filename": "css/common.css", # 'variant': 'datauri', }, - 'bookmarklet': { - 'source_filenames': assets['stylesheets']['bookmarklet'], - 'output_filename': 'css/bookmarklet.css', + "bookmarklet": { + "source_filenames": assets["stylesheets"]["bookmarklet"], + "output_filename": "css/bookmarklet.css", # 'variant': 'datauri', }, - 'blurblog': { - 'source_filenames': assets['stylesheets']['blurblog'], - 'output_filename': 'css/blurblog.css', + "blurblog": { + "source_filenames": assets["stylesheets"]["blurblog"], + "output_filename": "css/blurblog.css", # 'variant': 'datauri', }, - } + }, } -paypalrestsdk.configure({ - "mode": "sandbox" if DEBUG else "live", - "client_id": PAYPAL_API_CLIENTID, - "client_secret": PAYPAL_API_SECRET -}) +paypalrestsdk.configure( + { + "mode": "sandbox" if DEBUG else "live", + "client_id": PAYPAL_API_CLIENTID, + "client_secret": PAYPAL_API_SECRET, + } +) # ======= # = AWS = # ======= S3_CONN = None -if BACKED_BY_AWS.get('pages_on_s3') or BACKED_BY_AWS.get('icons_on_s3'): +if BACKED_BY_AWS.get("pages_on_s3") or BACKED_BY_AWS.get("icons_on_s3"): boto_session = boto3.Session( aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_SECRET, ) - S3_CONN = boto_session.resource('s3') + S3_CONN = boto_session.resource("s3") django.http.request.host_validation_re = re.compile(r"^([a-z0-9.-_\-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$") @@ -940,6 +914,7 @@ def monkey_patched_get_user(request): and when this monkey patch is removed. """ from django.contrib.auth.models import AnonymousUser + user = None try: user_id = auth._get_user_session_key(request) @@ -951,7 +926,11 @@ def monkey_patched_get_user(request): backend = auth.load_backend(backend_path) user = backend.get_user(user_id) session_hash = request.session.get(auth.HASH_SESSION_KEY) - logging.debug(request, " ---> Ignoring session hash: %s vs %s" % (user.get_session_auth_hash() if user else "[no user]", session_hash)) + logging.debug( + request, + " ---> Ignoring session hash: %s vs %s" + % (user.get_session_auth_hash() if user else "[no user]", session_hash), + ) # # Verify the session # if hasattr(user, 'get_session_auth_hash'): # session_hash = request.session.get(HASH_SESSION_KEY) @@ -965,4 +944,5 @@ def monkey_patched_get_user(request): return user or AnonymousUser() + auth.get_user = monkey_patched_get_user diff --git a/newsblur_web/sitecustomize.py b/newsblur_web/sitecustomize.py index 80ae27feb..0fb429ec5 100644 --- a/newsblur_web/sitecustomize.py +++ b/newsblur_web/sitecustomize.py @@ -1,7 +1,8 @@ import sys -sys.setdefaultencoding('utf-8') + +sys.setdefaultencoding("utf-8") import os -os.putenv('LANG', 'en_US.UTF-8') -os.putenv('LC_ALL', 'en_US.UTF-8') +os.putenv("LANG", "en_US.UTF-8") +os.putenv("LC_ALL", "en_US.UTF-8") diff --git a/newsblur_web/test_settings.py b/newsblur_web/test_settings.py index bdc58a449..a4480bda0 100644 --- a/newsblur_web/test_settings.py +++ b/newsblur_web/test_settings.py @@ -1,13 +1,15 @@ import os + DOCKERBUILD = os.getenv("DOCKERBUILD") from newsblur_web.settings import * -DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3' -DATABASES['default']['OPTIONS'] = {} -DATABASES['default']['NAME'] = 'nb.db' -DATABASES['default']['TEST_NAME'] = os.path.join(BASE_DIR, 'db.sqlite3.test') + +DATABASES["default"]["ENGINE"] = "django.db.backends.sqlite3" +DATABASES["default"]["OPTIONS"] = {} +DATABASES["default"]["NAME"] = "nb.db" +DATABASES["default"]["TEST_NAME"] = os.path.join(BASE_DIR, "db.sqlite3.test") -#DATABASES['default'] = { +# DATABASES['default'] = { # 'NAME': 'newslur', # 'ENGINE': 'django.db.backends.postgresql_psycopg2', # 'USER': 'newsblur', @@ -29,19 +31,19 @@ LOGGING_CONFIG = None if DOCKERBUILD: MONGO_PORT = 29019 MONGO_DB = { - 'name': 'newsblur_test', - 'host': 'db_mongo:29019', + "name": "newsblur_test", + "host": "db_mongo:29019", } else: MONGO_PORT = 27017 MONGO_DB = { - 'name': 'newsblur_test', - 'host': '127.0.0.1:27017', + "name": "newsblur_test", + "host": "127.0.0.1:27017", } SERVER_NAME -MONGO_DATABASE_NAME = 'test_newsblur' +MONGO_DATABASE_NAME = "test_newsblur" SOUTH_TESTS_MIGRATE = False DAYS_OF_UNREAD = 9999 @@ -50,5 +52,5 @@ TEST_DEBUG = True DEBUG = True SITE_ID = 2 SENTRY_DSN = None -HOMEPAGE_USERNAME = 'conesus' -SERVER_NAME = 'test_newsblur' +HOMEPAGE_USERNAME = "conesus" +SERVER_NAME = "test_newsblur" diff --git a/newsblur_web/urls.py b/newsblur_web/urls.py index 3520243b8..dd5950fe6 100644 --- a/newsblur_web/urls.py +++ b/newsblur_web/urls.py @@ -11,73 +11,79 @@ from django.contrib.auth.views import LogoutView admin.autodiscover() urlpatterns = [ - url(r'^$', reader_views.index, name='index'), - url(r'^reader/', include('apps.reader.urls')), - url(r'^add/?', reader_views.index), - url(r'^try/?', reader_views.index), - url(r'^site/(?P\d+)?', reader_views.index), - url(r'^folder/(?P\d+)?', reader_views.index, name='folder'), - url(r'^saved/(?P\d+)?', reader_views.index, name='saved-stories-tag'), - url(r'^saved/?', reader_views.index), - url(r'^read/?', reader_views.index), - url(r'^social/\d+/.*?', reader_views.index), - url(r'^user/.*?', reader_views.index), - url(r'^null/.*?', reader_views.index), - url(r'^story/.*?', reader_views.index), - url(r'^feed/?', social_views.shared_stories_rss_feed_noid), - url(r'^rss_feeds/', include('apps.rss_feeds.urls')), - url(r'^analyzer/', include('apps.analyzer.urls')), - url(r'^classifier/', include('apps.analyzer.urls')), - url(r'^folder_rss/', include('apps.profile.urls')), - url(r'^profile/', include('apps.profile.urls')), - url(r'^import/', include('apps.feed_import.urls')), - url(r'^api/', include('apps.api.urls')), - url(r'^recommendations/', include('apps.recommendations.urls')), - url(r'^notifications/?', include('apps.notifications.urls')), - url(r'^statistics/', include('apps.statistics.urls')), - url(r'^social/', include('apps.social.urls')), - url(r'^search/', include('apps.search.urls')), - url(r'^oauth/', include('apps.oauth.urls')), - url(r'^mobile/', include('apps.mobile.urls')), - url(r'^m/', include('apps.mobile.urls')), - url(r'^push/', include('apps.push.urls')), - url(r'^newsletters/', include('apps.newsletters.urls')), - url(r'^categories/', include('apps.categories.urls')), - url(r'^_haproxychk', static_views.haproxy_check), - url(r'^_dbcheck/postgres', static_views.postgres_check), - url(r'^_dbcheck/mongo', static_views.mongo_check), - url(r'^_dbcheck/redis', static_views.redis_check), - url(r'^_dbcheck/elasticsearch', static_views.elasticsearch_check), - url(r'^admin/', admin.site.urls), - url(r'^about/?', static_views.about, name='about'), - url(r'^faq/?', static_views.faq, name='faq'), - url(r'^api/?$', static_views.api, name='api'), - url(r'^press/?', static_views.press, name='press'), - url(r'^feedback/?', static_views.feedback, name='feedback'), - url(r'^privacy/?', static_views.privacy, name='privacy'), - url(r'^tos/?', static_views.tos, name='tos'), - url(r'^manifest.webmanifest', static_views.webmanifest, name='webmanifest'), - url(r'^.well-known/apple-app-site-association', static_views.apple_app_site_assoc, name='apple-app-site-assoc'), - url(r'^.well-known/apple-developer-merchantid-domain-association', static_views.apple_developer_merchantid, name='apple-developer-merchantid'), - url(r'^ios/download/?', static_views.ios_download, name='ios-download'), - url(r'^ios/NewsBlur.plist', static_views.ios_plist, name='ios-download-plist'), - url(r'^ios/NewsBlur.ipa', static_views.ios_ipa, name='ios-download-ipa'), - url(r'^ios/?', static_views.ios, name='ios-static'), - url(r'^iphone/?', static_views.ios), - url(r'^ipad/?', static_views.ios), - url(r'^android/?', static_views.android, name='android-static'), - url(r'^firefox/?', static_views.firefox, name='firefox'), - url(r'zebra/', include('zebra.urls', namespace="zebra")), - url(r'^account/redeem_code/?$', profile_views.redeem_code, name='redeem-code'), - url(r'^account/login/?$', profile_views.login, name='login'), - url(r'^account/signup/?$', profile_views.signup, name='signup'), - url(r'^account/logout/?$', - LogoutView, - {'next_page': '/'}, name='logout'), - url(r'^account/ifttt/v1/', include('apps.oauth.urls')), - url(r'^account/', include('oauth2_provider.urls', namespace='oauth2_provider')), - url(r'^monitor/', include('apps.monitor.urls'), name="monitor"), - url('', include('django_prometheus.urls')), + url(r"^$", reader_views.index, name="index"), + url(r"^reader/", include("apps.reader.urls")), + url(r"^add/?", reader_views.index), + url(r"^try/?", reader_views.index), + url(r"^site/(?P\d+)?", reader_views.index), + url(r"^folder/(?P\d+)?", reader_views.index, name="folder"), + url(r"^saved/(?P\d+)?", reader_views.index, name="saved-stories-tag"), + url(r"^saved/?", reader_views.index), + url(r"^read/?", reader_views.index), + url(r"^social/\d+/.*?", reader_views.index), + url(r"^user/.*?", reader_views.index), + url(r"^null/.*?", reader_views.index), + url(r"^story/.*?", reader_views.index), + url(r"^feed/?", social_views.shared_stories_rss_feed_noid), + url(r"^rss_feeds/", include("apps.rss_feeds.urls")), + url(r"^analyzer/", include("apps.analyzer.urls")), + url(r"^classifier/", include("apps.analyzer.urls")), + url(r"^folder_rss/", include("apps.profile.urls")), + url(r"^profile/", include("apps.profile.urls")), + url(r"^import/", include("apps.feed_import.urls")), + url(r"^api/", include("apps.api.urls")), + url(r"^recommendations/", include("apps.recommendations.urls")), + url(r"^notifications/?", include("apps.notifications.urls")), + url(r"^statistics/", include("apps.statistics.urls")), + url(r"^social/", include("apps.social.urls")), + url(r"^search/", include("apps.search.urls")), + url(r"^oauth/", include("apps.oauth.urls")), + url(r"^mobile/", include("apps.mobile.urls")), + url(r"^m/", include("apps.mobile.urls")), + url(r"^push/", include("apps.push.urls")), + url(r"^newsletters/", include("apps.newsletters.urls")), + url(r"^categories/", include("apps.categories.urls")), + url(r"^_haproxychk", static_views.haproxy_check), + url(r"^_dbcheck/postgres", static_views.postgres_check), + url(r"^_dbcheck/mongo", static_views.mongo_check), + url(r"^_dbcheck/redis", static_views.redis_check), + url(r"^_dbcheck/elasticsearch", static_views.elasticsearch_check), + url(r"^admin/", admin.site.urls), + url(r"^about/?", static_views.about, name="about"), + url(r"^faq/?", static_views.faq, name="faq"), + url(r"^api/?$", static_views.api, name="api"), + url(r"^press/?", static_views.press, name="press"), + url(r"^feedback/?", static_views.feedback, name="feedback"), + url(r"^privacy/?", static_views.privacy, name="privacy"), + url(r"^tos/?", static_views.tos, name="tos"), + url(r"^manifest.webmanifest", static_views.webmanifest, name="webmanifest"), + url( + r"^.well-known/apple-app-site-association", + static_views.apple_app_site_assoc, + name="apple-app-site-assoc", + ), + url( + r"^.well-known/apple-developer-merchantid-domain-association", + static_views.apple_developer_merchantid, + name="apple-developer-merchantid", + ), + url(r"^ios/download/?", static_views.ios_download, name="ios-download"), + url(r"^ios/NewsBlur.plist", static_views.ios_plist, name="ios-download-plist"), + url(r"^ios/NewsBlur.ipa", static_views.ios_ipa, name="ios-download-ipa"), + url(r"^ios/?", static_views.ios, name="ios-static"), + url(r"^iphone/?", static_views.ios), + url(r"^ipad/?", static_views.ios), + url(r"^android/?", static_views.android, name="android-static"), + url(r"^firefox/?", static_views.firefox, name="firefox"), + url(r"zebra/", include("zebra.urls", namespace="zebra")), + url(r"^account/redeem_code/?$", profile_views.redeem_code, name="redeem-code"), + url(r"^account/login/?$", profile_views.login, name="login"), + url(r"^account/signup/?$", profile_views.signup, name="signup"), + url(r"^account/logout/?$", LogoutView, {"next_page": "/"}, name="logout"), + url(r"^account/ifttt/v1/", include("apps.oauth.urls")), + url(r"^account/", include("oauth2_provider.urls", namespace="oauth2_provider")), + url(r"^monitor/", include("apps.monitor.urls"), name="monitor"), + url("", include("django_prometheus.urls")), ] if settings.DEBUG: diff --git a/newsblur_web/wsgi.py b/newsblur_web/wsgi.py index cfbfbc22f..2f071b032 100644 --- a/newsblur_web/wsgi.py +++ b/newsblur_web/wsgi.py @@ -6,7 +6,9 @@ https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ import os + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newsblur_web.settings") from django.core.wsgi import get_wsgi_application -application = get_wsgi_application() \ No newline at end of file + +application = get_wsgi_application() diff --git a/perf/locust.py b/perf/locust.py index 38587d8ef..15d748547 100644 --- a/perf/locust.py +++ b/perf/locust.py @@ -3,6 +3,7 @@ from locust import HttpUser, task, between import os import requests + class NB_PerfTest(HttpUser): wait_time = between(1, 2.5) diff --git a/utils/PyRSS2Gen.py b/utils/PyRSS2Gen.py index 8a4ff827e..19bb11cd5 100644 --- a/utils/PyRSS2Gen.py +++ b/utils/PyRSS2Gen.py @@ -8,16 +8,18 @@ _generator_name = __name__ + "-" + ".".join(map(str, __version__)) import datetime + # Could make this the base class; will need to add 'publish' class WriteXmlMixin: - def write_xml(self, outfile, encoding = "iso-8859-1"): + def write_xml(self, outfile, encoding="iso-8859-1"): from xml.sax import saxutils + handler = saxutils.XMLGenerator(outfile, encoding) handler.startDocument() self.publish(handler) handler.endDocument() - def to_xml(self, encoding = "iso-8859-1"): + def to_xml(self, encoding="iso-8859-1"): try: import io as StringIO except ImportError: @@ -27,7 +29,7 @@ class WriteXmlMixin: return f.getvalue() -def _element(handler, name, obj, d = {}): +def _element(handler, name, obj, d={}): if isinstance(obj, str) or obj is None: # special-case handling to make the API easier # to use for the common case. @@ -39,6 +41,7 @@ def _element(handler, name, obj, d = {}): # It better know how to emit the correct XML. obj.publish(handler) + def _opt_element(handler, name, obj): if obj is None: return @@ -58,13 +61,16 @@ def _format_date(dt): # rfc822 and email.Utils modules assume a timestamp. The # following is based on the rfc822 module. return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( - ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()], - dt.day, - ["Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][dt.month-1], - dt.year, dt.hour, dt.minute, dt.second) + ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()], + dt.day, + ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][dt.month - 1], + dt.year, + dt.hour, + dt.minute, + dt.second, + ) + - ## # A couple simple wrapper objects for the fields which # take a simple value other than a string. @@ -72,19 +78,23 @@ class IntElement: """implements the 'publish' API for integers Takes the tag name and the integer value to publish. - + (Could be used for anything which uses str() to be published to text for XML.) """ + element_attrs = {} + def __init__(self, name, val): self.name = name self.val = val + def publish(self, handler): handler.startElement(self.name, self.element_attrs) handler.characters(str(self.val)) handler.endElement(self.name) + class DateElement: """implements the 'publish' API for a datetime.datetime @@ -92,53 +102,70 @@ class DateElement: Converts the datetime to RFC 2822 timestamp (4-digit year). """ + def __init__(self, name, dt): self.name = name self.dt = dt + def publish(self, handler): _element(handler, self.name, _format_date(self.dt)) + + #### + class Category: """Publish a category element""" - def __init__(self, category, domain = None): + + def __init__(self, category, domain=None): self.category = category self.domain = domain + def publish(self, handler): d = {} if self.domain is not None: d["domain"] = self.domain _element(handler, "category", self.category, d) + class Cloud: """Publish a cloud""" - def __init__(self, domain, port, path, - registerProcedure, protocol): + + def __init__(self, domain, port, path, registerProcedure, protocol): self.domain = domain self.port = port self.path = path self.registerProcedure = registerProcedure self.protocol = protocol + def publish(self, handler): - _element(handler, "cloud", None, { - "domain": self.domain, - "port": str(self.port), - "path": self.path, - "registerProcedure": self.registerProcedure, - "protocol": self.protocol}) + _element( + handler, + "cloud", + None, + { + "domain": self.domain, + "port": str(self.port), + "path": self.path, + "registerProcedure": self.registerProcedure, + "protocol": self.protocol, + }, + ) + class Image: """Publish a channel Image""" + element_attrs = {} - def __init__(self, url, title, link, - width = None, height = None, description = None): + + def __init__(self, url, title, link, width=None, height=None, description=None): self.url = url self.title = title self.link = link self.width = width self.height = height self.description = description - + def publish(self, handler): handler.startElement("image", self.element_attrs) @@ -150,7 +177,7 @@ class Image: if isinstance(width, int): width = IntElement("width", width) _opt_element(handler, "width", width) - + height = self.height if isinstance(height, int): height = IntElement("height", height) @@ -160,15 +187,18 @@ class Image: handler.endElement("image") + class Guid: """Publish a guid Defaults to being a permalink, which is the assumption if it's omitted. Hence strings are always permalinks. """ - def __init__(self, guid, isPermaLink = 1): + + def __init__(self, guid, isPermaLink=1): self.guid = guid self.isPermaLink = isPermaLink + def publish(self, handler): d = {} if self.isPermaLink: @@ -177,12 +207,15 @@ class Guid: d["isPermaLink"] = "false" _element(handler, "guid", self.guid, d) + class TextInput: """Publish a textInput Apparently this is rarely used. """ + element_attrs = {} + def __init__(self, title, description, name, link): self.title = title self.description = description @@ -196,37 +229,51 @@ class TextInput: _element(handler, "name", self.name) _element(handler, "link", self.link) handler.endElement("textInput") - + class Enclosure: """Publish an enclosure""" + def __init__(self, url, length, type): self.url = url self.length = length self.type = type + def publish(self, handler): - _element(handler, "enclosure", None, - {"url": self.url, - "length": str(self.length), - "type": self.type, - }) + _element( + handler, + "enclosure", + None, + { + "url": self.url, + "length": str(self.length), + "type": self.type, + }, + ) + class Source: """Publish the item's original source, used by aggregators""" + def __init__(self, name, url): self.name = name self.url = url + def publish(self, handler): _element(handler, "source", self.name, {"url": self.url}) + class SkipHours: """Publish the skipHours This takes a list of hours, as integers. """ + element_attrs = {} + def __init__(self, hours): self.hours = hours + def publish(self, handler): if self.hours: handler.startElement("skipHours", self.element_attrs) @@ -234,14 +281,18 @@ class SkipHours: _element(handler, "hour", str(hour)) handler.endElement("skipHours") + class SkipDays: """Publish the skipDays This takes a list of days as strings. """ + element_attrs = {} + def __init__(self, days): self.days = days + def publish(self, handler): if self.days: handler.startElement("skipDays", self.element_attrs) @@ -249,41 +300,40 @@ class SkipDays: _element(handler, "day", day) handler.endElement("skipDays") + class RSS2(WriteXmlMixin): """The main RSS class. Stores the channel attributes, with the "category" elements under ".categories" and the RSS items under ".items". """ - + rss_attrs = {"version": "2.0"} element_attrs = {} - def __init__(self, - title, - link, - description, - language = None, - copyright = None, - managingEditor = None, - webMaster = None, - pubDate = None, # a datetime, *in* *GMT* - lastBuildDate = None, # a datetime - - categories = None, # list of strings or Category - generator = _generator_name, - docs = "http://blogs.law.harvard.edu/tech/rss", - cloud = None, # a Cloud - ttl = None, # integer number of minutes - - image = None, # an Image - rating = None, # a string; I don't know how it's used - textInput = None, # a TextInput - skipHours = None, # a SkipHours with a list of integers - skipDays = None, # a SkipDays with a list of strings - - items = None, # list of RSSItems - ): + def __init__( + self, + title, + link, + description, + language=None, + copyright=None, + managingEditor=None, + webMaster=None, + pubDate=None, # a datetime, *in* *GMT* + lastBuildDate=None, # a datetime + categories=None, # list of strings or Category + generator=_generator_name, + docs="http://blogs.law.harvard.edu/tech/rss", + cloud=None, # a Cloud + ttl=None, # integer number of minutes + image=None, # an Image + rating=None, # a string; I don't know how it's used + textInput=None, # a TextInput + skipHours=None, # a SkipHours with a list of integers + skipDays=None, # a SkipDays with a list of strings + items=None, # list of RSSItems + ): self.title = title self.link = link self.description = description @@ -294,7 +344,7 @@ class RSS2(WriteXmlMixin): self.webMaster = webMaster self.pubDate = pubDate self.lastBuildDate = lastBuildDate - + if categories is None: categories = [] self.categories = categories @@ -320,7 +370,7 @@ class RSS2(WriteXmlMixin): _element(handler, "description", self.description) self.publish_extensions(handler) - + _opt_element(handler, "language", self.language) _opt_element(handler, "copyright", self.copyright) _opt_element(handler, "managingEditor", self.managingEditor) @@ -374,27 +424,27 @@ class RSS2(WriteXmlMixin): # output after the three required fields. pass - - + class RSSItem(WriteXmlMixin): """Publish an RSS Item""" + element_attrs = {} - def __init__(self, - title = None, # string - link = None, # url as string - description = None, # string - author = None, # email address as string - categories = None, # list of string or Category - comments = None, # url as string - enclosure = None, # an Enclosure - guid = None, # a unique string - pubDate = None, # a datetime - source = None, # a Source - ): - + + def __init__( + self, + title=None, # string + link=None, # url as string + description=None, # string + author=None, # email address as string + categories=None, # list of string or Category + comments=None, # url as string + enclosure=None, # an Enclosure + guid=None, # a unique string + pubDate=None, # a datetime + source=None, # a Source + ): if title is None and description is None: - raise TypeError( - "must define at least one of 'title' or 'description'") + raise TypeError("must define at least one of 'title' or 'description'") self.title = title self.link = link self.description = description @@ -421,7 +471,7 @@ class RSSItem(WriteXmlMixin): if isinstance(category, str): category = Category(category) category.publish(handler) - + _opt_element(handler, "comments", self.comments) if self.enclosure is not None: self.enclosure.publish(handler) @@ -434,7 +484,7 @@ class RSSItem(WriteXmlMixin): if self.source is not None: self.source.publish(handler) - + handler.endElement("item") def publish_extensions(self, handler): diff --git a/utils/S3.py b/utils/S3.py index 5e219d06c..ca76850eb 100644 --- a/utils/S3.py +++ b/utils/S3.py @@ -20,33 +20,34 @@ import urllib.request, urllib.parse, urllib.error import urllib.parse import xml.sax -DEFAULT_HOST = 's3.amazonaws.com' -PORTS_BY_SECURITY = { True: 443, False: 80 } -METADATA_PREFIX = 'x-amz-meta-' -AMAZON_HEADER_PREFIX = 'x-amz-' +DEFAULT_HOST = "s3.amazonaws.com" +PORTS_BY_SECURITY = {True: 443, False: 80} +METADATA_PREFIX = "x-amz-meta-" +AMAZON_HEADER_PREFIX = "x-amz-" + # generates the aws canonical string for the given parameters def canonical_string(method, bucket="", key="", query_args={}, headers={}, expires=None): interesting_headers = {} for header_key in headers: lk = header_key.lower() - if lk in ['content-md5', 'content-type', 'date'] or lk.startswith(AMAZON_HEADER_PREFIX): + if lk in ["content-md5", "content-type", "date"] or lk.startswith(AMAZON_HEADER_PREFIX): interesting_headers[lk] = headers[header_key].strip() # these keys get empty strings if they don't exist - if 'content-type' not in interesting_headers: - interesting_headers['content-type'] = '' - if 'content-md5' not in interesting_headers: - interesting_headers['content-md5'] = '' + if "content-type" not in interesting_headers: + interesting_headers["content-type"] = "" + if "content-md5" not in interesting_headers: + interesting_headers["content-md5"] = "" # just in case someone used this. it's not necessary in this lib. - if 'x-amz-date' in interesting_headers: - interesting_headers['date'] = '' + if "x-amz-date" in interesting_headers: + interesting_headers["date"] = "" # if you're using expires for query string auth, then it trumps date # (and x-amz-date) if expires: - interesting_headers['date'] = str(expires) + interesting_headers["date"] = str(expires) sorted_header_keys = list(interesting_headers.keys()) sorted_header_keys.sort() @@ -78,6 +79,7 @@ def canonical_string(method, bucket="", key="", query_args={}, headers={}, expir return buf + # computes the base64'ed hmac-sha hash of the canonical string and the secret # access key, optionally urlencoding the result def encode(aws_secret_access_key, str, urlencode=False): @@ -87,6 +89,7 @@ def encode(aws_secret_access_key, str, urlencode=False): else: return b64_hmac + def merge_meta(headers, metadata): final_headers = headers.copy() for k in list(metadata.keys()): @@ -94,6 +97,7 @@ def merge_meta(headers, metadata): return final_headers + # builds the query arg string def query_args_hash_to_string(query_args): query_string = "" @@ -104,7 +108,7 @@ def query_args_hash_to_string(query_args): piece += "=%s" % urllib.parse.quote_plus(str(v)) pairs.append(piece) - return '&'.join(pairs) + return "&".join(pairs) class CallingFormat: @@ -113,9 +117,9 @@ class CallingFormat: VANITY = 3 def build_url_base(protocol, server, port, bucket, calling_format): - url_base = '%s://' % protocol + url_base = "%s://" % protocol - if bucket == '': + if bucket == "": url_base += server elif calling_format == CallingFormat.SUBDOMAIN: url_base += "%s.%s" % (bucket, server) @@ -126,7 +130,7 @@ class CallingFormat: url_base += ":%s" % port - if (bucket != '') and (calling_format == CallingFormat.PATH): + if (bucket != "") and (calling_format == CallingFormat.PATH): url_base += "/%s" % bucket return url_base @@ -134,17 +138,21 @@ class CallingFormat: build_url_base = staticmethod(build_url_base) - class Location: DEFAULT = None - EU = 'EU' - + EU = "EU" class AWSAuthConnection: - def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, - server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): - + def __init__( + self, + aws_access_key_id, + aws_secret_access_key, + is_secure=True, + server=DEFAULT_HOST, + port=None, + calling_format=CallingFormat.SUBDOMAIN, + ): if not port: port = PORTS_BY_SECURITY[is_secure] @@ -156,86 +164,69 @@ class AWSAuthConnection: self.calling_format = calling_format def create_bucket(self, bucket, headers={}): - return Response(self._make_request('PUT', bucket, '', {}, headers)) + return Response(self._make_request("PUT", bucket, "", {}, headers)) def create_located_bucket(self, bucket, location=Location.DEFAULT, headers={}): if location == Location.DEFAULT: body = "" else: - body = "" + \ - location + \ - "" - return Response(self._make_request('PUT', bucket, '', {}, headers, body)) + body = ( + "" + + location + + "" + ) + return Response(self._make_request("PUT", bucket, "", {}, headers, body)) def check_bucket_exists(self, bucket): - return self._make_request('HEAD', bucket, '', {}, {}) + return self._make_request("HEAD", bucket, "", {}, {}) def list_bucket(self, bucket, options={}, headers={}): - return ListBucketResponse(self._make_request('GET', bucket, '', options, headers)) + return ListBucketResponse(self._make_request("GET", bucket, "", options, headers)) def delete_bucket(self, bucket, headers={}): - return Response(self._make_request('DELETE', bucket, '', {}, headers)) + return Response(self._make_request("DELETE", bucket, "", {}, headers)) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) - return Response( - self._make_request( - 'PUT', - bucket, - key, - {}, - headers, - object.data, - object.metadata)) + return Response(self._make_request("PUT", bucket, key, {}, headers, object.data, object.metadata)) def get(self, bucket, key, headers={}): - return GetResponse( - self._make_request('GET', bucket, key, {}, headers)) + return GetResponse(self._make_request("GET", bucket, key, {}, headers)) def delete(self, bucket, key, headers={}): - return Response( - self._make_request('DELETE', bucket, key, {}, headers)) + return Response(self._make_request("DELETE", bucket, key, {}, headers)) def get_bucket_logging(self, bucket, headers={}): - return GetResponse(self._make_request('GET', bucket, '', { 'logging': None }, headers)) + return GetResponse(self._make_request("GET", bucket, "", {"logging": None}, headers)) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): - return Response(self._make_request('PUT', bucket, '', { 'logging': None }, headers, logging_xml_doc)) + return Response(self._make_request("PUT", bucket, "", {"logging": None}, headers, logging_xml_doc)) def get_bucket_acl(self, bucket, headers={}): - return self.get_acl(bucket, '', headers) + return self.get_acl(bucket, "", headers) def get_acl(self, bucket, key, headers={}): - return GetResponse( - self._make_request('GET', bucket, key, { 'acl': None }, headers)) + return GetResponse(self._make_request("GET", bucket, key, {"acl": None}, headers)) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): - return self.put_acl(bucket, '', acl_xml_document, headers) + return self.put_acl(bucket, "", acl_xml_document, headers) def put_acl(self, bucket, key, acl_xml_document, headers={}): - return Response( - self._make_request( - 'PUT', - bucket, - key, - { 'acl': None }, - headers, - acl_xml_document)) + return Response(self._make_request("PUT", bucket, key, {"acl": None}, headers, acl_xml_document)) def list_all_my_buckets(self, headers={}): - return ListAllMyBucketsResponse(self._make_request('GET', '', '', {}, headers)) + return ListAllMyBucketsResponse(self._make_request("GET", "", "", {}, headers)) def get_bucket_location(self, bucket): - return LocationResponse(self._make_request('GET', bucket, '', {'location' : None})) + return LocationResponse(self._make_request("GET", bucket, "", {"location": None})) # end public methods - def _make_request(self, method, bucket='', key='', query_args={}, headers={}, data='', metadata={}): - - server = '' - if bucket == '': + def _make_request(self, method, bucket="", key="", query_args={}, headers={}, data="", metadata={}): + server = "" + if bucket == "": server = self.server elif self.calling_format == CallingFormat.SUBDOMAIN: server = "%s.%s" % (bucket, self.server) @@ -244,18 +235,17 @@ class AWSAuthConnection: else: server = self.server - path = '' + path = "" - if (bucket != '') and (self.calling_format == CallingFormat.PATH): + if (bucket != "") and (self.calling_format == CallingFormat.PATH): path += "/%s" % bucket # add the slash after the bucket regardless # the key will be appended if it is non-empty path += "/%s" % urllib.parse.quote_plus(key) - # build the path_argument string - # add the ? in all cases since + # add the ? in all cases since # signature and credentials follow path args if len(query_args): path += "?" + query_args_hash_to_string(query_args) @@ -263,12 +253,12 @@ class AWSAuthConnection: is_secure = self.is_secure host = "%s:%d" % (server, self.port) while True: - if (is_secure): + if is_secure: connection = http.client.HTTPSConnection(host) else: connection = http.client.HTTPConnection(host) - final_headers = merge_meta(headers, metadata); + final_headers = merge_meta(headers, metadata) # add auth header self._add_aws_auth_header(final_headers, method, bucket, key, query_args) @@ -277,44 +267,55 @@ class AWSAuthConnection: if resp.status < 300 or resp.status >= 400: return resp # handle redirect - location = resp.getheader('location') + location = resp.getheader("location") if not location: return resp # (close connection) resp.read() - scheme, host, path, params, query, fragment \ - = urllib.parse.urlparse(location) - if scheme == "http": is_secure = True - elif scheme == "https": is_secure = False - else: raise invalidURL("Not http/https: " + location) - if query: path += "?" + query + scheme, host, path, params, query, fragment = urllib.parse.urlparse(location) + if scheme == "http": + is_secure = True + elif scheme == "https": + is_secure = False + else: + raise invalidURL("Not http/https: " + location) + if query: + path += "?" + query # retry with redirect def _add_aws_auth_header(self, headers, method, bucket, key, query_args): - if 'Date' not in headers: - headers['Date'] = time.strftime("%a, %d %b %Y %X GMT", time.gmtime()) + if "Date" not in headers: + headers["Date"] = time.strftime("%a, %d %b %Y %X GMT", time.gmtime()) c_string = canonical_string(method, bucket, key, query_args, headers) - headers['Authorization'] = \ - "AWS %s:%s" % (self.aws_access_key_id, encode(self.aws_secret_access_key, c_string)) + headers["Authorization"] = "AWS %s:%s" % ( + self.aws_access_key_id, + encode(self.aws_secret_access_key, c_string), + ) class QueryStringAuthGenerator: # by default, expire in 1 minute DEFAULT_EXPIRES_IN = 60 - def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, - server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): - + def __init__( + self, + aws_access_key_id, + aws_secret_access_key, + is_secure=True, + server=DEFAULT_HOST, + port=None, + calling_format=CallingFormat.SUBDOMAIN, + ): if not port: port = PORTS_BY_SECURITY[is_secure] self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key - if (is_secure): - self.protocol = 'https' + if is_secure: + self.protocol = "https" else: - self.protocol = 'http' + self.protocol = "http" self.is_secure = is_secure self.server = server @@ -335,58 +336,53 @@ class QueryStringAuthGenerator: self.__expires_in = None def create_bucket(self, bucket, headers={}): - return self.generate_url('PUT', bucket, '', {}, headers) + return self.generate_url("PUT", bucket, "", {}, headers) def list_bucket(self, bucket, options={}, headers={}): - return self.generate_url('GET', bucket, '', options, headers) + return self.generate_url("GET", bucket, "", options, headers) def delete_bucket(self, bucket, headers={}): - return self.generate_url('DELETE', bucket, '', {}, headers) + return self.generate_url("DELETE", bucket, "", {}, headers) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) - return self.generate_url( - 'PUT', - bucket, - key, - {}, - merge_meta(headers, object.metadata)) + return self.generate_url("PUT", bucket, key, {}, merge_meta(headers, object.metadata)) def get(self, bucket, key, headers={}): - return self.generate_url('GET', bucket, key, {}, headers) + return self.generate_url("GET", bucket, key, {}, headers) def delete(self, bucket, key, headers={}): - return self.generate_url('DELETE', bucket, key, {}, headers) + return self.generate_url("DELETE", bucket, key, {}, headers) def get_bucket_logging(self, bucket, headers={}): - return self.generate_url('GET', bucket, '', { 'logging': None }, headers) + return self.generate_url("GET", bucket, "", {"logging": None}, headers) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): - return self.generate_url('PUT', bucket, '', { 'logging': None }, headers) + return self.generate_url("PUT", bucket, "", {"logging": None}, headers) def get_bucket_acl(self, bucket, headers={}): - return self.get_acl(bucket, '', headers) + return self.get_acl(bucket, "", headers) - def get_acl(self, bucket, key='', headers={}): - return self.generate_url('GET', bucket, key, { 'acl': None }, headers) + def get_acl(self, bucket, key="", headers={}): + return self.generate_url("GET", bucket, key, {"acl": None}, headers) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): - return self.put_acl(bucket, '', acl_xml_document, headers) + return self.put_acl(bucket, "", acl_xml_document, headers) # don't really care what the doc is here. def put_acl(self, bucket, key, acl_xml_document, headers={}): - return self.generate_url('PUT', bucket, key, { 'acl': None }, headers) + return self.generate_url("PUT", bucket, key, {"acl": None}, headers) def list_all_my_buckets(self, headers={}): - return self.generate_url('GET', '', '', {}, headers) + return self.generate_url("GET", "", "", {}, headers) - def make_bare_url(self, bucket, key=''): + def make_bare_url(self, bucket, key=""): full_url = self.generate_url(self, bucket, key) - return full_url[:full_url.index('?')] + return full_url[: full_url.index("?")] - def generate_url(self, method, bucket='', key='', query_args={}, headers={}): + def generate_url(self, method, bucket="", key="", query_args={}, headers={}): expires = 0 if self.__expires_in != None: expires = int(time.time() + self.__expires_in) @@ -402,9 +398,9 @@ class QueryStringAuthGenerator: url += "/%s" % urllib.parse.quote_plus(key) - query_args['Signature'] = encoded_canonical - query_args['Expires'] = expires - query_args['AWSAccessKeyId'] = self.aws_access_key_id + query_args["Signature"] = encoded_canonical + query_args["Expires"] = expires + query_args["AWSAccessKeyId"] = self.aws_access_key_id url += "?%s" % query_args_hash_to_string(query_args) @@ -416,13 +412,15 @@ class S3Object: self.data = data self.metadata = metadata + class Owner: - def __init__(self, id='', display_name=''): + def __init__(self, id="", display_name=""): self.id = id self.display_name = display_name + class ListEntry: - def __init__(self, key='', last_modified=None, etag='', size=0, storage_class='', owner=None): + def __init__(self, key="", last_modified=None, etag="", size=0, storage_class="", owner=None): self.key = key self.last_modified = last_modified self.etag = etag @@ -430,15 +428,18 @@ class ListEntry: self.storage_class = storage_class self.owner = owner + class CommonPrefixEntry: - def __init(self, prefix=''): + def __init(self, prefix=""): self.prefix = prefix + class Bucket: - def __init__(self, name='', creation_date=''): + def __init__(self, name="", creation_date=""): self.name = name self.creation_date = creation_date + class Response: def __init__(self, http_response): self.http_response = http_response @@ -451,7 +452,6 @@ class Response: self.message = "%03d %s" % (http_response.status, http_response.reason) - class ListBucketResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) @@ -470,20 +470,22 @@ class ListBucketResponse(Response): else: self.entries = [] + class ListAllMyBucketsResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - if http_response.status < 300: + if http_response.status < 300: handler = ListAllMyBucketsHandler() xml.sax.parseString(self.body, handler) self.entries = handler.entries else: self.entries = [] + class GetResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - response_headers = http_response.msg # older pythons don't have getheaders + response_headers = http_response.msg # older pythons don't have getheaders metadata = self.get_aws_metadata(response_headers) self.object = S3Object(self.body, metadata) @@ -491,82 +493,83 @@ class GetResponse(Response): metadata = {} for hkey in list(headers.keys()): if hkey.lower().startswith(METADATA_PREFIX): - metadata[hkey[len(METADATA_PREFIX):]] = headers[hkey] + metadata[hkey[len(METADATA_PREFIX) :]] = headers[hkey] del headers[hkey] return metadata + class LocationResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - if http_response.status < 300: + if http_response.status < 300: handler = LocationHandler() xml.sax.parseString(self.body, handler) self.location = handler.location + class ListBucketHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None - self.curr_text = '' + self.curr_text = "" self.common_prefixes = [] self.curr_common_prefix = None - self.name = '' - self.marker = '' - self.prefix = '' + self.name = "" + self.marker = "" + self.prefix = "" self.is_truncated = False - self.delimiter = '' + self.delimiter = "" self.max_keys = 0 - self.next_marker = '' + self.next_marker = "" self.is_echoed_prefix_set = False def startElement(self, name, attrs): - if name == 'Contents': + if name == "Contents": self.curr_entry = ListEntry() - elif name == 'Owner': + elif name == "Owner": self.curr_entry.owner = Owner() - elif name == 'CommonPrefixes': + elif name == "CommonPrefixes": self.curr_common_prefix = CommonPrefixEntry() - def endElement(self, name): - if name == 'Contents': + if name == "Contents": self.entries.append(self.curr_entry) - elif name == 'CommonPrefixes': + elif name == "CommonPrefixes": self.common_prefixes.append(self.curr_common_prefix) - elif name == 'Key': + elif name == "Key": self.curr_entry.key = self.curr_text - elif name == 'LastModified': + elif name == "LastModified": self.curr_entry.last_modified = self.curr_text - elif name == 'ETag': + elif name == "ETag": self.curr_entry.etag = self.curr_text - elif name == 'Size': + elif name == "Size": self.curr_entry.size = int(self.curr_text) - elif name == 'ID': + elif name == "ID": self.curr_entry.owner.id = self.curr_text - elif name == 'DisplayName': + elif name == "DisplayName": self.curr_entry.owner.display_name = self.curr_text - elif name == 'StorageClass': + elif name == "StorageClass": self.curr_entry.storage_class = self.curr_text - elif name == 'Name': + elif name == "Name": self.name = self.curr_text - elif name == 'Prefix' and self.is_echoed_prefix_set: + elif name == "Prefix" and self.is_echoed_prefix_set: self.curr_common_prefix.prefix = self.curr_text - elif name == 'Prefix': + elif name == "Prefix": self.prefix = self.curr_text self.is_echoed_prefix_set = True - elif name == 'Marker': + elif name == "Marker": self.marker = self.curr_text - elif name == 'IsTruncated': - self.is_truncated = self.curr_text == 'true' - elif name == 'Delimiter': + elif name == "IsTruncated": + self.is_truncated = self.curr_text == "true" + elif name == "Delimiter": self.delimiter = self.curr_text - elif name == 'MaxKeys': + elif name == "MaxKeys": self.max_keys = int(self.curr_text) - elif name == 'NextMarker': + elif name == "NextMarker": self.next_marker = self.curr_text - self.curr_text = '' + self.curr_text = "" def characters(self, content): self.curr_text += content @@ -576,18 +579,18 @@ class ListAllMyBucketsHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None - self.curr_text = '' + self.curr_text = "" def startElement(self, name, attrs): - if name == 'Bucket': + if name == "Bucket": self.curr_entry = Bucket() def endElement(self, name): - if name == 'Name': + if name == "Name": self.curr_entry.name = self.curr_text - elif name == 'CreationDate': + elif name == "CreationDate": self.curr_entry.creation_date = self.curr_text - elif name == 'Bucket': + elif name == "Bucket": self.entries.append(self.curr_entry) def characters(self, content): @@ -597,21 +600,24 @@ class ListAllMyBucketsHandler(xml.sax.ContentHandler): class LocationHandler(xml.sax.ContentHandler): def __init__(self): self.location = None - self.state = 'init' + self.state = "init" def startElement(self, name, attrs): - if self.state == 'init': - if name == 'LocationConstraint': - self.state = 'tag_location' - self.location = '' - else: self.state = 'bad' - else: self.state = 'bad' + if self.state == "init": + if name == "LocationConstraint": + self.state = "tag_location" + self.location = "" + else: + self.state = "bad" + else: + self.state = "bad" def endElement(self, name): - if self.state == 'tag_location' and name == 'LocationConstraint': - self.state = 'done' - else: self.state = 'bad' + if self.state == "tag_location" and name == "LocationConstraint": + self.state = "done" + else: + self.state = "bad" def characters(self, content): - if self.state == 'tag_location': + if self.state == "tag_location": self.location += content diff --git a/utils/archive/Image Color Algorithm.py b/utils/archive/Image Color Algorithm.py index f02c6fdbd..d6fc230f4 100644 --- a/utils/archive/Image Color Algorithm.py +++ b/utils/archive/Image Color Algorithm.py @@ -3,7 +3,7 @@ import scipy import scipy.cluster from pprint import pprint -image = Image.open('logo.png') +image = Image.open("logo.png") NUM_CLUSTERS = 5 # Convert image into array of values for each point. @@ -20,11 +20,20 @@ codes, _ = scipy.cluster.vq.kmeans(ar, NUM_CLUSTERS) # Pare centroids, removing blacks and whites and shades of really dark and really light. original_codes = codes for low, hi in [(60, 200), (35, 230), (10, 250)]: - codes = scipy.array([code for code in codes - if not ((code[0] < low and code[1] < low and code[2] < low) or - (code[0] > hi and code[1] > hi and code[2] > hi))]) - if not len(codes): codes = original_codes - else: break + codes = scipy.array( + [ + code + for code in codes + if not ( + (code[0] < low and code[1] < low and code[2] < low) + or (code[0] > hi and code[1] > hi and code[2] > hi) + ) + ] + ) + if not len(codes): + codes = original_codes + else: + break # Assign codes (vector quantization). Each vector is compared to the centroids # and assigned the nearest one. @@ -34,12 +43,12 @@ vecs, _ = scipy.cluster.vq.vq(ar, codes) counts, bins = scipy.histogram(vecs, len(codes)) # Show colors for each code in its hex value. -colors = [''.join(chr(c) for c in code).encode('hex') for code in codes] +colors = ["".join(chr(c) for c in code).encode("hex") for code in codes] total = scipy.sum(counts) -color_dist = dict(list(zip(colors, [count/float(total) for count in counts]))) +color_dist = dict(list(zip(colors, [count / float(total) for count in counts]))) pprint(color_dist) # Find the most frequent color, based on the counts. index_max = scipy.argmax(counts) peak = codes[index_max] -color = ''.join(chr(c) for c in peak).encode('hex') +color = "".join(chr(c) for c in peak).encode("hex") diff --git a/utils/archive/bootstrap_intel.py b/utils/archive/bootstrap_intel.py index fd2d7fe3f..357fe5401 100644 --- a/utils/archive/bootstrap_intel.py +++ b/utils/archive/bootstrap_intel.py @@ -6,8 +6,7 @@ from apps.analyzer.models import MClassifierAuthor from apps.analyzer.models import MClassifierTag from apps.analyzer.models import MClassifierTitle -for classifier_cls in [MClassifierFeed, MClassifierAuthor, - MClassifierTag, MClassifierTitle]: +for classifier_cls in [MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle]: print(" ================================================================= ") print((" Now on %s " % classifier_cls.__name__)) print(" ================================================================= ") @@ -28,4 +27,3 @@ for classifier_cls in [MClassifierFeed, MClassifierAuthor, except ValidationError as e: print((" ***> ValidationError error on: %s" % e)) print((" ***> Original classifier: %s" % classifier.__dict__)) - diff --git a/utils/archive/bootstrap_mongo.py b/utils/archive/bootstrap_mongo.py index d7fd747aa..5df95f55d 100644 --- a/utils/archive/bootstrap_mongo.py +++ b/utils/archive/bootstrap_mongo.py @@ -10,7 +10,8 @@ from mongoengine.queryset import OperationError from utils import json_functions as json MONGO_DB = settings.MONGO_DB -db = mongoengine.connect(MONGO_DB['NAME'], host=MONGO_DB['HOST'], port=MONGO_DB['PORT']) +db = mongoengine.connect(MONGO_DB["NAME"], host=MONGO_DB["HOST"], port=MONGO_DB["PORT"]) + def bootstrap_stories(): print("Mongo DB stories: %s" % MStory.objects().count()) @@ -20,24 +21,23 @@ def bootstrap_stories(): print("Stories: %s" % Story.objects.all().count()) pprint(db.stories.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s (%s stories)" % (i, feed_count, - feed, Story.objects.filter(story_feed=feed).count())) + print("%s/%s: %s (%s stories)" % (i, feed_count, feed, Story.objects.filter(story_feed=feed).count())) sys.stdout.flush() - + stories = list(Story.objects.filter(story_feed=feed).values()) for story in stories: # story['story_tags'] = [tag.name for tag in Tag.objects.filter(story=story['id'])] try: - story['story_tags'] = json.decode(story['story_tags']) + story["story_tags"] = json.decode(story["story_tags"]) except: continue - del story['id'] - del story['story_author_id'] + del story["id"] + del story["story_author_id"] try: MStory(**story).save() except: @@ -45,6 +45,7 @@ def bootstrap_stories(): print("\nMongo DB stories: %s" % MStory.objects().count()) + def bootstrap_userstories(): print("Mongo DB userstories: %s" % MUserStory.objects().count()) # db.userstories.drop() @@ -56,58 +57,64 @@ def bootstrap_userstories(): userstories = list(UserStory.objects.all().values()) for userstory in userstories: try: - story = Story.objects.get(pk=userstory['story_id']) + story = Story.objects.get(pk=userstory["story_id"]) except Story.DoesNotExist: continue try: - userstory['story'] = MStory.objects(story_feed_id=story.story_feed.pk, story_guid=story.story_guid)[0] + userstory["story"] = MStory.objects( + story_feed_id=story.story_feed.pk, story_guid=story.story_guid + )[0] except: - print('!') + print("!") continue - print('.') - del userstory['id'] - del userstory['opinion'] - del userstory['story_id'] + print(".") + del userstory["id"] + del userstory["opinion"] + del userstory["story_id"] try: MUserStory(**userstory).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue print("\nMongo DB userstories: %s" % MUserStory.objects().count()) + def bootstrap_classifiers(): - for sql_classifier, mongo_classifier in ((ClassifierTitle, MClassifierTitle), - (ClassifierAuthor, MClassifierAuthor), - (ClassifierFeed, MClassifierFeed), - (ClassifierTag, MClassifierTag)): - collection = mongo_classifier.meta['collection'] + for sql_classifier, mongo_classifier in ( + (ClassifierTitle, MClassifierTitle), + (ClassifierAuthor, MClassifierAuthor), + (ClassifierFeed, MClassifierFeed), + (ClassifierTag, MClassifierTag), + ): + collection = mongo_classifier.meta["collection"] print("Mongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) # db[collection].drop() print("Dropped! Mongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) print("%s: %s" % (sql_classifier._meta.object_name, sql_classifier.objects.all().count())) pprint(db[collection].index_information()) - + for userclassifier in list(sql_classifier.objects.all().values()): - del userclassifier['id'] - if sql_classifier._meta.object_name == 'ClassifierAuthor': - author = StoryAuthor.objects.get(pk=userclassifier['author_id']) - userclassifier['author'] = author.author_name - del userclassifier['author_id'] - if sql_classifier._meta.object_name == 'ClassifierTag': - tag = Tag.objects.get(pk=userclassifier['tag_id']) - userclassifier['tag'] = tag.name - del userclassifier['tag_id'] - print('.') + del userclassifier["id"] + if sql_classifier._meta.object_name == "ClassifierAuthor": + author = StoryAuthor.objects.get(pk=userclassifier["author_id"]) + userclassifier["author"] = author.author_name + del userclassifier["author_id"] + if sql_classifier._meta.object_name == "ClassifierTag": + tag = Tag.objects.get(pk=userclassifier["tag_id"]) + userclassifier["tag"] = tag.name + del userclassifier["tag_id"] + print(".") try: mongo_classifier(**userclassifier).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - + print("\nMongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) - + + def bootstrap_feedpages(): print("Mongo DB feed_pages: %s" % MFeedPage.objects().count()) # db.feed_pages.drop() @@ -116,28 +123,35 @@ def bootstrap_feedpages(): print("FeedPages: %s" % FeedPage.objects.count()) pprint(db.feed_pages.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s" % (i, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + i, + feed_count, + feed, + ) + ) sys.stdout.flush() - + if not MFeedPage.objects(feed_id=feed.pk): feed_page = list(FeedPage.objects.filter(feed=feed).values()) if feed_page: - del feed_page[0]['id'] - feed_page[0]['feed_id'] = feed.pk + del feed_page[0]["id"] + feed_page[0]["feed_id"] = feed.pk try: MFeedPage(**feed_page[0]).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - print("\nMongo DB feed_pages: %s" % MFeedPage.objects().count()) + def bootstrap_feedicons(): print("Mongo DB feed_icons: %s" % MFeedIcon.objects().count()) db.feed_icons.drop() @@ -146,47 +160,62 @@ def bootstrap_feedicons(): print("FeedIcons: %s" % FeedIcon.objects.count()) pprint(db.feed_icons.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s" % (i, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + i, + feed_count, + feed, + ) + ) sys.stdout.flush() - + if not MFeedIcon.objects(feed_id=feed.pk): feed_icon = list(FeedIcon.objects.filter(feed=feed).values()) if feed_icon: try: MFeedIcon(**feed_icon[0]).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - print("\nMongo DB feed_icons: %s" % MFeedIcon.objects().count()) + def compress_stories(): count = MStory.objects().count() print("Mongo DB stories: %s" % count) p = 0.0 i = 0 - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() f = 0 for feed in feeds: f += 1 - print("%s/%s: %s" % (f, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + f, + feed_count, + feed, + ) + ) sys.stdout.flush() - + for story in MStory.objects(story_feed_id=feed.pk): i += 1.0 if round(i / count * 100) != p: p = round(i / count * 100) - print('%s%%' % p) + print("%s%%" % p) story.save() - + + def reindex_stories(): db = pymongo.Connection().newsblur count = MStory.objects().count() @@ -194,18 +223,25 @@ def reindex_stories(): p = 0.0 i = 0 - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() f = 0 for feed in feeds: f += 1 - print("%s/%s: %s" % (f, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + f, + feed_count, + feed, + ) + ) sys.stdout.flush() for story in MStory.objects(story_feed_id=feed.pk): i += 1.0 if round(i / count * 100) != p: p = round(i / count * 100) - print('%s%%' % p) + print("%s%%" % p) if isinstance(story.id, str): story.story_guid = story.id story.id = pymongo.objectid.ObjectId() @@ -214,14 +250,15 @@ def reindex_stories(): except OperationError as e: print(" ***> OperationError: %s" % e) except e: - print(' ***> Unknown Error: %s' % e) + print(" ***> Unknown Error: %s" % e) db.stories.remove({"_id": story.story_guid}) - -if __name__ == '__main__': + + +if __name__ == "__main__": # bootstrap_stories() # bootstrap_userstories() # bootstrap_classifiers() # bootstrap_feedpages() # compress_stories() # reindex_stories() - bootstrap_feedicons() \ No newline at end of file + bootstrap_feedicons() diff --git a/utils/archive/bootstrap_redis_sessions.py b/utils/archive/bootstrap_redis_sessions.py index fc13bb575..1359415a6 100644 --- a/utils/archive/bootstrap_redis_sessions.py +++ b/utils/archive/bootstrap_redis_sessions.py @@ -8,7 +8,7 @@ print((" ---> %s sessions in Django" % sessions_count)) batch_size = 1000 r = redis.Redis(connection_pool=settings.REDIS_SESSION_POOL) -for batch in range(int(math.ceil(sessions_count / batch_size))+1): +for batch in range(int(math.ceil(sessions_count / batch_size)) + 1): start = batch * batch_size end = (batch + 1) * batch_size print((" ---> Loading sessions #%s - #%s" % (start, end))) @@ -16,4 +16,4 @@ for batch in range(int(math.ceil(sessions_count / batch_size))+1): for session in Session.objects.all()[start:end]: _ = pipe.set(session.session_key, session.session_data) _ = pipe.expireat(session.session_key, session.expire_date.strftime("%s")) - _ = pipe.execute() \ No newline at end of file + _ = pipe.execute() diff --git a/utils/archive/bootstrap_story_hash.py b/utils/archive/bootstrap_story_hash.py index efcb31e5f..7d856670d 100644 --- a/utils/archive/bootstrap_story_hash.py +++ b/utils/archive/bootstrap_story_hash.py @@ -6,24 +6,24 @@ from apps.rss_feeds.models import MStory, Feed db = settings.MONGODB batch = 0 start = 0 -for f in range(start, Feed.objects.latest('pk').pk): - if f < batch*100000: continue +for f in range(start, Feed.objects.latest("pk").pk): + if f < batch * 100000: + continue start = time.time() try: cp1 = time.time() - start # if feed.active_premium_subscribers < 1: continue - stories = MStory.objects.filter(story_feed_id=f, story_hash__exists=False)\ - .only('id', 'story_feed_id', 'story_guid')\ - .read_preference(pymongo.ReadPreference.SECONDARY) + stories = ( + MStory.objects.filter(story_feed_id=f, story_hash__exists=False) + .only("id", "story_feed_id", "story_guid") + .read_preference(pymongo.ReadPreference.SECONDARY) + ) cp2 = time.time() - start count = 0 for story in stories: count += 1 - db.newsblur.stories.update({"_id": story.id}, {"$set": { - "story_hash": story.feed_guid_hash - }}) + db.newsblur.stories.update({"_id": story.id}, {"$set": {"story_hash": story.feed_guid_hash}}) cp3 = time.time() - start print(("%s: %3s stories (%s/%s/%s)" % (f, count, round(cp1, 2), round(cp2, 2), round(cp3, 2)))) except Exception as e: print((" ***> (%s) %s" % (f, e))) - diff --git a/utils/archive/check_status.py b/utils/archive/check_status.py index cbad9f317..b5e20d707 100644 --- a/utils/archive/check_status.py +++ b/utils/archive/check_status.py @@ -1,5 +1,6 @@ import time import requests + url = "http://www.newsblur.com" @@ -8,6 +9,10 @@ while True: req = requests.get(url) content = req.content end = time.time() - print((" ---> [%s] Retrieved %s bytes - %s %s" % (str(end - start)[:4], len(content), req.status_code, req.reason))) + print( + ( + " ---> [%s] Retrieved %s bytes - %s %s" + % (str(end - start)[:4], len(content), req.status_code, req.reason) + ) + ) time.sleep(5) - diff --git a/utils/archive/green.py b/utils/archive/green.py index 46e09359d..7f746cd85 100644 --- a/utils/archive/green.py +++ b/utils/archive/green.py @@ -1,4 +1,5 @@ from gevent import monkey + monkey.patch_socket() from newsblur.utils import feedparser @@ -6,13 +7,15 @@ import gevent from gevent import queue import urllib.request, urllib.error, urllib.parse + def fetch_title(url): print(("Running %s" % url)) data = urllib.request.urlopen(url).read() print(("Parsing %s" % url)) d = feedparser.parse(data) - print(("Parsed %s" % d.feed.get('title', ''))) - return d.feed.get('title', '') + print(("Parsed %s" % d.feed.get("title", ""))) + return d.feed.get("title", "") + def worker(): while True: @@ -22,15 +25,18 @@ def worker(): finally: q.task_done() -if __name__ == '__main__': + +if __name__ == "__main__": q = queue.JoinableQueue() for i in range(5): - gevent.spawn(worker) + gevent.spawn(worker) - for url in "http://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://www.asianart.org/feeds/Lectures,Classes,Symposia.xml/nhttp://www.asianart.org/feeds/Performances.xml/nhttp://feeds.feedburner.com/ajaxian/nhttp://ajaxian.com/index.xml/nhttp://al3x.net/atom.xml/nhttp://feeds.feedburner.com/AmericanDrink/nhttp://feeds.feedburner.com/eod_full/nhttp://feeds.feedburner.com/typepad/notes/nhttp://feeds.dashes.com/AnilDash/nhttp://rss.sciam.com/assignment-impossible/feed/nhttp://blogs.scientificamerican.com/assignment-impossible//nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://www.betabeat.com/feed/".split('/n'): - print(("Spawning: %s" % url)) - q.put(url) + for ( + url + ) in "http://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://www.asianart.org/feeds/Lectures,Classes,Symposia.xml/nhttp://www.asianart.org/feeds/Performances.xml/nhttp://feeds.feedburner.com/ajaxian/nhttp://ajaxian.com/index.xml/nhttp://al3x.net/atom.xml/nhttp://feeds.feedburner.com/AmericanDrink/nhttp://feeds.feedburner.com/eod_full/nhttp://feeds.feedburner.com/typepad/notes/nhttp://feeds.dashes.com/AnilDash/nhttp://rss.sciam.com/assignment-impossible/feed/nhttp://blogs.scientificamerican.com/assignment-impossible//nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://www.betabeat.com/feed/".split( + "/n" + ): + print(("Spawning: %s" % url)) + q.put(url) q.join() # block until all tasks are done - - diff --git a/utils/archive/knight.py b/utils/archive/knight.py index d8b437123..f35db12b6 100644 --- a/utils/archive/knight.py +++ b/utils/archive/knight.py @@ -1,15 +1,15 @@ # Screen scrapes the Knight News Challenge entries (all 64 pages of them) # and counts the number of votes/hearts for each entry. Then displays them # in rank order. -# +# # This script runs in about 20 seconds. import requests from BeautifulSoup import BeautifulSoup # Winners found on http://newschallenge.tumblr.com/post/20962258701/knight-news-challenge-on-networks-moving-to-the-next: -# -# $('.posts .MsoNormal > span').find('a[href^="http://newschallenge.tumblr.com/post"]').map(function() { +# +# $('.posts .MsoNormal > span').find('a[href^="http://newschallenge.tumblr.com/post"]').map(function() { # return $(this).attr('href'); # }); @@ -70,7 +70,9 @@ winners = [ "http://newschallenge.tumblr.com/post/19493920734/get-to-the-source", "http://newschallenge.tumblr.com/post/19480128205/farm-to-table-school-lunch", "http://newschallenge.tumblr.com/post/19477700441/partisans-org", - "http://newschallenge.tumblr.com/post/19345505702/protecting-journalists-and-engaging-communities"] + "http://newschallenge.tumblr.com/post/19345505702/protecting-journalists-and-engaging-communities", +] + def find_entries(): page = 1 @@ -79,73 +81,85 @@ def find_entries(): while True: print(" ---> Found %s entries so far. Now on page: %s" % (len(entries), page)) - + knight_url = "http://newschallenge.tumblr.com/page/%s" % (page) html = requests.get(knight_url).content soup = BeautifulSoup(html) postboxes = soup.findAll("div", "postbox") - + # Done if only sticky entry is left. if len(postboxes) <= 1: break page += 1 - + # 15 entries per page, plus a sticky throwaway entry for entry in postboxes: - if 'stickyPost' in entry.get('class'): continue - + if "stickyPost" in entry.get("class"): + continue + total_entry_count += 1 likes = entry.find("", "home-likes") if likes and likes.text: likes = int(likes.text) else: likes = 0 - + comments = entry.find("", "home-comments") if comments and comments.text: comments = int(comments.text) else: comments = 0 - + title = entry.find("h2") if title: title = title.text - - url = entry.find('a', "home-view") + + url = entry.find("a", "home-view") if url: - url = url.get('href') - + url = url.get("href") + # Only record active entries if comments or likes: - entries.append({ - 'likes': likes, - 'comments': comments, - 'title': title, - 'url': url, - }) + entries.append( + { + "likes": likes, + "comments": comments, + "title": title, + "url": url, + } + ) # time.sleep(random.randint(0, 2)) - - entries.sort(key=lambda e: e['comments'] + e['likes']) + + entries.sort(key=lambda e: e["comments"] + e["likes"]) entries.reverse() active_entry_count = len(entries) - + found_entries = [] winner_count = 0 for i, entry in enumerate(entries): - is_winner = entry['url'] in winners - if is_winner: winner_count += 1 - print(" * %s#%s: %s likes - [%s](%s)%s" % ( - "**" if is_winner else "", - i + 1, - entry['likes'], entry['title'], - entry['url'], - "**" if is_winner else "")) + is_winner = entry["url"] in winners + if is_winner: + winner_count += 1 + print( + " * %s#%s: %s likes - [%s](%s)%s" + % ( + "**" if is_winner else "", + i + 1, + entry["likes"], + entry["title"], + entry["url"], + "**" if is_winner else "", + ) + ) found_entries.append(entry) - - print(" ***> Found %s active entries among %s total applications with %s/%s winners." % ( - active_entry_count, total_entry_count, winner_count, len(winners))) + + print( + " ***> Found %s active entries among %s total applications with %s/%s winners." + % (active_entry_count, total_entry_count, winner_count, len(winners)) + ) return found_entries -if __name__ == '__main__': - find_entries() \ No newline at end of file + +if __name__ == "__main__": + find_entries() diff --git a/utils/archive/memcached_status.py b/utils/archive/memcached_status.py index e5be7b37a..62f426371 100644 --- a/utils/archive/memcached_status.py +++ b/utils/archive/memcached_status.py @@ -2,46 +2,46 @@ import memcache import re import sys from settings import CACHE_BACKEND -#gfranxman + +# gfranxman verbose = False -if not CACHE_BACKEND.startswith( 'memcached://' ): +if not CACHE_BACKEND.startswith("memcached://"): print("you are not configured to use memcched as your django cache backend") else: - m = re.search( r'//(.+:\d+)', CACHE_BACKEND ) - cache_host = m.group(1) + m = re.search(r"//(.+:\d+)", CACHE_BACKEND) + cache_host = m.group(1) - h = memcache._Host( cache_host ) + h = memcache._Host(cache_host) h.connect() - h.send_cmd( 'stats' ) + h.send_cmd("stats") stats = {} - pat = re.compile( r'STAT (\w+) (\w+)' ) + pat = re.compile(r"STAT (\w+) (\w+)") - l = '' ; - while l.find( 'END' ) < 0 : + l = "" + while l.find("END") < 0: l = h.readline() if verbose: print(l) - m = pat.match( l ) - if m : - stats[ m.group(1) ] = m.group(2) - + m = pat.match(l) + if m: + stats[m.group(1)] = m.group(2) h.close_socket() if verbose: print(stats) - items = int( stats[ 'curr_items' ] ) - bytes = int( stats[ 'bytes' ] ) - limit_maxbytes = int( stats[ 'limit_maxbytes' ] ) or bytes - current_conns = int( stats[ 'curr_connections' ] ) + items = int(stats["curr_items"]) + bytes = int(stats["bytes"]) + limit_maxbytes = int(stats["limit_maxbytes"]) or bytes + current_conns = int(stats["curr_connections"]) - print("MemCache status for %s" % ( CACHE_BACKEND )) - print("%d items using %d of %d" % ( items, bytes, limit_maxbytes )) - print("%5.2f%% full" % ( 100.0 * bytes / limit_maxbytes )) - print("%d connections being handled" % ( current_conns )) - print() \ No newline at end of file + print("MemCache status for %s" % (CACHE_BACKEND)) + print("%d items using %d of %d" % (items, bytes, limit_maxbytes)) + print("%5.2f%% full" % (100.0 * bytes / limit_maxbytes)) + print("%d connections being handled" % (current_conns)) + print() diff --git a/utils/db_functions.py b/utils/db_functions.py index 9fdabf8ac..7a2a7cff7 100644 --- a/utils/db_functions.py +++ b/utils/db_functions.py @@ -3,23 +3,24 @@ import pymongo PRIMARY_STATE = 1 SECONDARY_STATE = 2 + def mongo_max_replication_lag(connection): try: - status = connection.admin.command('replSetGetStatus') + status = connection.admin.command("replSetGetStatus") except pymongo.errors.OperationFailure: return 0 - - members = status['members'] + + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: return 0 diff --git a/utils/exception_middleware.py b/utils/exception_middleware.py index de282de65..5eec95079 100644 --- a/utils/exception_middleware.py +++ b/utils/exception_middleware.py @@ -3,23 +3,23 @@ import sys import inspect from pprint import pprint + class ConsoleExceptionMiddleware: def process_exception(self, request, exception): exc_info = sys.exc_info() print("######################## Exception #############################") - print(('\n'.join(traceback.format_exception(*(exc_info or sys.exc_info()))))) + print(("\n".join(traceback.format_exception(*(exc_info or sys.exc_info()))))) print("----------------------------------------------------------------") # pprint(inspect.trace()[-1][0].f_locals) print("################################################################") - - #pprint(request) - #print "################################################################" + + # pprint(request) + # print "################################################################" def __init__(self, get_response=None): self.get_response = get_response def __call__(self, request): - response = self.get_response(request) return response diff --git a/utils/facebook_fetcher.py b/utils/facebook_fetcher.py index 356169e85..798bd683e 100644 --- a/utils/facebook_fetcher.py +++ b/utils/facebook_fetcher.py @@ -9,216 +9,227 @@ from apps.reader.models import UserSubscription from utils import log as logging from vendor.facebook import GraphAPIError + class FacebookFetcher: - def __init__(self, feed, options=None): self.feed = feed self.options = options or {} - + def fetch(self): page_name = self.extract_page_name() - if not page_name: + if not page_name: return facebook_user = self.facebook_user() if not facebook_user: return - + # If 'video', use video API to get embed: # f.get_object('tastyvegetarian', fields='posts') # f.get_object('1992797300790726', fields='embed_html') - feed = self.fetch_page_feed(facebook_user, page_name, 'name,about,posts,videos,photos') - + feed = self.fetch_page_feed(facebook_user, page_name, "name,about,posts,videos,photos") + data = {} - data['title'] = feed.get('name', "%s on Facebook" % page_name) - data['link'] = feed.get('link', "https://facebook.com/%s" % page_name) - data['description'] = feed.get('about', "%s on Facebook" % page_name) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Facebook API Decrapifier - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = self.feed.feed_address + data["title"] = feed.get("name", "%s on Facebook" % page_name) + data["link"] = feed.get("link", "https://facebook.com/%s" % page_name) + data["description"] = feed.get("about", "%s on Facebook" % page_name) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Facebook API Decrapifier - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = self.feed.feed_address rss = feedgenerator.Atom1Feed(**data) merged_data = [] - - posts = feed.get('posts', {}).get('data', None) + + posts = feed.get("posts", {}).get("data", None) if posts: for post in posts: story_data = self.page_posts_story(facebook_user, post) if not story_data: continue merged_data.append(story_data) - - videos = feed.get('videos', {}).get('data', None) + + videos = feed.get("videos", {}).get("data", None) if videos: for video in videos: story_data = self.page_video_story(facebook_user, video) if not story_data: continue for seen_data in merged_data: - if story_data['link'] == seen_data['link']: + if story_data["link"] == seen_data["link"]: # Video wins over posts (and attachments) - seen_data['description'] = story_data['description'] - seen_data['title'] = story_data['title'] + seen_data["description"] = story_data["description"] + seen_data["title"] = story_data["title"] break - + for story_data in merged_data: rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def extract_page_name(self): page = None try: - page_groups = re.search('facebook.com/(\w+)/?', self.feed.feed_address) + page_groups = re.search("facebook.com/(\w+)/?", self.feed.feed_address) if not page_groups: return page = page_groups.group(1) except IndexError: return - + return page - + def facebook_user(self): facebook_api = None social_services = None - - if self.options.get('requesting_user_id', None): - social_services = MSocialServices.get_user(self.options.get('requesting_user_id')) + + if self.options.get("requesting_user_id", None): + social_services = MSocialServices.get_user(self.options.get("requesting_user_id")) facebook_api = social_services.facebook_api() if not facebook_api: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % - (self.feed.log_title[:30], self.feed.feed_address, self.options)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s" + % (self.feed.log_title[:30], self.feed.feed_address, self.options) + ) return else: usersubs = UserSubscription.objects.filter(feed=self.feed) if not usersubs: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions' % - (self.feed.log_title[:30], self.feed.feed_address)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions" + % (self.feed.log_title[:30], self.feed.feed_address) + ) return for sub in usersubs: social_services = MSocialServices.get_user(sub.user_id) - if not social_services.facebook_uid: + if not social_services.facebook_uid: continue facebook_api = social_services.facebook_api() - if not facebook_api: + if not facebook_api: continue else: break - + if not facebook_api: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % - (self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s" + % (self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username) + ) return - + return facebook_api - + def fetch_page_feed(self, facebook_user, page, fields): try: stories = facebook_user.get_object(page, fields=fields) except GraphAPIError as e: message = str(e).lower() - if 'session has expired' in message: - logging.debug(' ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s' % - (self.feed.log_title[:30], self.feed.feed_address, e)) + if "session has expired" in message: + logging.debug( + " ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s" + % (self.feed.log_title[:30], self.feed.feed_address, e) + ) self.feed.save_feed_history(560, "Facebook Error: Expired token") return {} - + if not stories: return {} return stories - + def page_posts_story(self, facebook_user, page_story): categories = set() - if 'message' not in page_story: + if "message" not in page_story: # Probably a story shared on the page's timeline, not a published story return - message = linebreaks(page_story['message']) - created_date = page_story['created_time'] + message = linebreaks(page_story["message"]) + created_date = page_story["created_time"] if isinstance(created_date, str): created_date = dateutil.parser.parse(created_date) - fields = facebook_user.get_object(page_story['id'], fields='permalink_url,link,attachments') - permalink = fields.get('link', fields['permalink_url']) + fields = facebook_user.get_object(page_story["id"], fields="permalink_url,link,attachments") + permalink = fields.get("link", fields["permalink_url"]) attachments_html = "" - if fields.get('attachments', None) and fields['attachments']['data']: - for attachment in fields['attachments']['data']: - if 'media' in attachment: - attachments_html += "" % attachment['media']['image']['src'] - if attachment.get('subattachments', None): - for subattachment in attachment['subattachments']['data']: - attachments_html += "" % subattachment['media']['image']['src'] - + if fields.get("attachments", None) and fields["attachments"]["data"]: + for attachment in fields["attachments"]["data"]: + if "media" in attachment: + attachments_html += '' % attachment["media"]["image"]["src"] + if attachment.get("subattachments", None): + for subattachment in attachment["subattachments"]["data"]: + attachments_html += '' % subattachment["media"]["image"]["src"] + content = """
%s
%s
""" % ( message, - attachments_html + attachments_html, ) - + story = { - 'title': message, - 'link': permalink, - 'description': content, - 'categories': list(categories), - 'unique_id': "fb_post:%s" % page_story['id'], - 'pubdate': created_date, + "title": message, + "link": permalink, + "description": content, + "categories": list(categories), + "unique_id": "fb_post:%s" % page_story["id"], + "pubdate": created_date, } - + return story - + def page_video_story(self, facebook_user, page_story): categories = set() - if 'description' not in page_story: + if "description" not in page_story: return - message = linebreaks(page_story['description']) - created_date = page_story['updated_time'] + message = linebreaks(page_story["description"]) + created_date = page_story["updated_time"] if isinstance(created_date, str): created_date = dateutil.parser.parse(created_date) - permalink = facebook_user.get_object(page_story['id'], fields='permalink_url')['permalink_url'] - embed_html = facebook_user.get_object(page_story['id'], fields='embed_html') - - if permalink.startswith('/'): + permalink = facebook_user.get_object(page_story["id"], fields="permalink_url")["permalink_url"] + embed_html = facebook_user.get_object(page_story["id"], fields="embed_html") + + if permalink.startswith("/"): permalink = "https://www.facebook.com%s" % permalink - + content = """
%s
%s
""" % ( message, - embed_html.get('embed_html', '') + embed_html.get("embed_html", ""), ) - + story = { - 'title': page_story.get('story', message), - 'link': permalink, - 'description': content, - 'categories': list(categories), - 'unique_id': "fb_post:%s" % page_story['id'], - 'pubdate': created_date, + "title": page_story.get("story", message), + "link": permalink, + "description": content, + "categories": list(categories), + "unique_id": "fb_post:%s" % page_story["id"], + "pubdate": created_date, } - + return story - + def favicon_url(self): page_name = self.extract_page_name() facebook_user = self.facebook_user() if not facebook_user: - logging.debug(' ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s' % - (self.feed.log_title[:30], self.feed.feed_address)) - return - - try: - picture_data = facebook_user.get_object(page_name, fields='picture') - except GraphAPIError as e: - message = str(e).lower() - if 'session has expired' in message: - logging.debug(' ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s' % - (self.feed.log_title[:30], self.feed.feed_address, e)) + logging.debug( + " ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s" + % (self.feed.log_title[:30], self.feed.feed_address) + ) return - if 'picture' in picture_data: - return picture_data['picture']['data']['url'] - \ No newline at end of file + try: + picture_data = facebook_user.get_object(page_name, fields="picture") + except GraphAPIError as e: + message = str(e).lower() + if "session has expired" in message: + logging.debug( + " ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s" + % (self.feed.log_title[:30], self.feed.feed_address, e) + ) + return + + if "picture" in picture_data: + return picture_data["picture"]["data"]["url"] diff --git a/utils/feed_fetcher.py b/utils/feed_fetcher.py index c6b804418..2955ac203 100644 --- a/utils/feed_fetcher.py +++ b/utils/feed_fetcher.py @@ -37,8 +37,8 @@ from apps.rss_feeds.models import Feed, MStory from apps.rss_feeds.page_importer import PageImporter from apps.statistics.models import MAnalyticsFetcher, MStatistics -feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['iframe']) -feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['text']) +feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(["iframe"]) +feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(["text"]) from bs4 import BeautifulSoup from celery.exceptions import SoftTimeLimitExceeded @@ -81,15 +81,15 @@ class FetchFeed: """ start = time.time() identity = self.get_identity() - if self.options.get('archive_page', None): - log_msg = '%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY) ~BG~FMarchive page~ST~FY: ~SB%s' % ( + if self.options.get("archive_page", None): + log_msg = "%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY) ~BG~FMarchive page~ST~FY: ~SB%s" % ( identity, self.feed.log_title[:30], self.feed.id, - self.options['archive_page'], + self.options["archive_page"], ) else: - log_msg = '%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s' % ( + log_msg = "%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s" % ( identity, self.feed.log_title[:30], self.feed.id, @@ -101,85 +101,87 @@ class FetchFeed: modified = self.feed.last_modified.utctimetuple()[:7] if self.feed.last_modified else None address = self.feed.feed_address - if self.options.get('force') or self.options.get('archive_page', None) or random.random() <= 0.01: - self.options['force'] = True + if self.options.get("force") or self.options.get("archive_page", None) or random.random() <= 0.01: + self.options["force"] = True modified = None etag = None - if self.options.get('archive_page', None) == "rfc5005" and self.options.get('archive_page_link', None): - address = self.options['archive_page_link'] - elif self.options.get('archive_page', None): - address = qurl(address, add={self.options['archive_page_key']: self.options['archive_page']}) - elif address.startswith('http'): + if self.options.get("archive_page", None) == "rfc5005" and self.options.get( + "archive_page_link", None + ): + address = self.options["archive_page_link"] + elif self.options.get("archive_page", None): + address = qurl(address, add={self.options["archive_page_key"]: self.options["archive_page"]}) + elif address.startswith("http"): address = qurl(address, add={"_": random.randint(0, 10000)}) - logging.debug(' ---> [%-30s] ~FBForcing fetch: %s' % (self.feed.log_title[:30], address)) + logging.debug(" ---> [%-30s] ~FBForcing fetch: %s" % (self.feed.log_title[:30], address)) elif not self.feed.fetched_once or not self.feed.known_good: modified = None etag = None - if self.options.get('feed_xml'): + if self.options.get("feed_xml"): logging.debug( - ' ---> [%-30s] ~FM~BKFeed has been fat pinged. Ignoring fat: %s' - % (self.feed.log_title[:30], len(self.options.get('feed_xml'))) + " ---> [%-30s] ~FM~BKFeed has been fat pinged. Ignoring fat: %s" + % (self.feed.log_title[:30], len(self.options.get("feed_xml"))) ) - if self.options.get('fpf'): - self.fpf = self.options.get('fpf') + if self.options.get("fpf"): + self.fpf = self.options.get("fpf") logging.debug( - ' ---> [%-30s] ~FM~BKFeed fetched in real-time with fat ping.' % (self.feed.log_title[:30]) + " ---> [%-30s] ~FM~BKFeed fetched in real-time with fat ping." % (self.feed.log_title[:30]) ) return FEED_OK, self.fpf - if 'youtube.com' in address: + if "youtube.com" in address: youtube_feed = self.fetch_youtube() if not youtube_feed: logging.debug( - ' ***> [%-30s] ~FRYouTube fetch failed: %s.' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRYouTube fetch failed: %s." % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(youtube_feed, sanitize_html=False) - elif re.match(r'(https?)?://twitter.com/\w+/?', qurl(address, remove=['_'])): + elif re.match(r"(https?)?://twitter.com/\w+/?", qurl(address, remove=["_"])): twitter_feed = self.fetch_twitter(address) if not twitter_feed: logging.debug( - ' ***> [%-30s] ~FRTwitter fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRTwitter fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(twitter_feed) - elif re.match(r'(.*?)facebook.com/\w+/?$', qurl(address, remove=['_'])): + elif re.match(r"(.*?)facebook.com/\w+/?$", qurl(address, remove=["_"])): facebook_feed = self.fetch_facebook() if not facebook_feed: logging.debug( - ' ***> [%-30s] ~FRFacebook fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRFacebook fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(facebook_feed) - if not self.fpf and 'json' in address: + if not self.fpf and "json" in address: try: headers = self.feed.fetch_headers() if etag: - headers['If-None-Match'] = etag + headers["If-None-Match"] = etag if modified: # format into an RFC 1123-compliant timestamp. We can't use # time.strftime() since the %a and %b directives can be affected # by the current locale, but RFC 2616 states that dates must be # in English. - short_weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + short_weekdays = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] months = [ - 'Jan', - 'Feb', - 'Mar', - 'Apr', - 'May', - 'Jun', - 'Jul', - 'Aug', - 'Sep', - 'Oct', - 'Nov', - 'Dec', + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", ] - modified_header = '%s, %02d %s %04d %02d:%02d:%02d GMT' % ( + modified_header = "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( short_weekdays[modified[6]], modified[2], months[modified[1] - 1], @@ -188,9 +190,9 @@ class FetchFeed: modified[4], modified[5], ) - headers['If-Modified-Since'] = modified_header + headers["If-Modified-Since"] = modified_header if etag or modified: - headers['A-IM'] = 'feed' + headers["A-IM"] = "feed" try: raw_feed = requests.get(address, headers=headers, timeout=15) except (requests.adapters.ConnectionError, TimeoutError): @@ -202,7 +204,10 @@ class FetchFeed: % (self.feed.log_title[:30], raw_feed.status_code, raw_feed.headers) ) else: - logging.debug(" ***> [%-30s] ~FRJson feed fetch timed out, trying fake headers: %s" % (self.feed.log_title[:30], address)) + logging.debug( + " ***> [%-30s] ~FRJson feed fetch timed out, trying fake headers: %s" + % (self.feed.log_title[:30], address) + ) raw_feed = requests.get( self.feed.feed_address, headers=self.feed.fetch_headers(fake=True), @@ -210,24 +215,24 @@ class FetchFeed: ) json_feed_content_type = any( - json_feed in raw_feed.headers.get('Content-Type', "") - for json_feed in ['application/feed+json', 'application/json'] + json_feed in raw_feed.headers.get("Content-Type", "") + for json_feed in ["application/feed+json", "application/json"] ) if raw_feed.content and json_feed_content_type: # JSON Feed json_feed = self.fetch_json_feed(address, raw_feed) if not json_feed: logging.debug( - ' ***> [%-30s] ~FRJSON fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRJSON fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(json_feed) elif raw_feed.content and raw_feed.status_code < 400: response_headers = raw_feed.headers - response_headers['Content-Location'] = raw_feed.url + response_headers["Content-Location"] = raw_feed.url self.raw_feed = smart_str(raw_feed.content) self.fpf = feedparser.parse(self.raw_feed, response_headers=response_headers) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( " ---> [%-30s] ~FBFeed fetch status %s: %s length / %s" % ( @@ -244,7 +249,7 @@ class FetchFeed: ) # raise e - if not self.fpf or self.options.get('force_fp', False): + if not self.fpf or self.options.get("force_fp", False): try: self.fpf = feedparser.parse(address, agent=self.feed.user_agent, etag=etag, modified=modified) except ( @@ -260,12 +265,14 @@ class FetchFeed: ConnectionResetError, TimeoutError, ) as e: - logging.debug(' ***> [%-30s] ~FRFeed fetch error: %s' % (self.feed.log_title[:30], e)) + logging.debug(" ***> [%-30s] ~FRFeed fetch error: %s" % (self.feed.log_title[:30], e)) pass if not self.fpf: try: - logging.debug(' ***> [%-30s] ~FRTurning off headers: %s' % (self.feed.log_title[:30], address)) + logging.debug( + " ***> [%-30s] ~FRTurning off headers: %s" % (self.feed.log_title[:30], address) + ) self.fpf = feedparser.parse(address, agent=self.feed.user_agent) except ( TypeError, @@ -279,11 +286,11 @@ class FetchFeed: http.client.IncompleteRead, ConnectionResetError, ) as e: - logging.debug(' ***> [%-30s] ~FRFetch failed: %s.' % (self.feed.log_title[:30], e)) + logging.debug(" ***> [%-30s] ~FRFetch failed: %s." % (self.feed.log_title[:30], e)) return FEED_ERRHTTP, None logging.debug( - ' ---> [%-30s] ~FYFeed fetch in ~FM%.4ss' % (self.feed.log_title[:30], time.time() - start) + " ---> [%-30s] ~FYFeed fetch in ~FM%.4ss" % (self.feed.log_title[:30], time.time() - start) ) return FEED_OK, self.fpf @@ -333,21 +340,21 @@ class ProcessFeed: start = time.time() self.refresh_feed() - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): feed_status, ret_values = self.verify_feed_integrity() if feed_status and ret_values: return feed_status, ret_values - + self.fpf.entries = self.fpf.entries[:100] - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): self.compare_feed_attribute_changes() # Determine if stories aren't valid and replace broken guids guids_seen = set() permalinks_seen = set() for entry in self.fpf.entries: - guids_seen.add(entry.get('guid')) + guids_seen.add(entry.get("guid")) permalinks_seen.add(Feed.get_permalink(entry)) guid_difference = len(guids_seen) != len(self.fpf.entries) single_guid = len(guids_seen) == 1 @@ -363,45 +370,45 @@ class ProcessFeed: stories = [] for entry in self.fpf.entries: story = pre_process_story(entry, self.fpf.encoding) - if not story['title'] and not story['story_content']: + if not story["title"] and not story["story_content"]: continue - if self.options.get('archive_page', None) and story.get('published') > day_ago: + if self.options.get("archive_page", None) and story.get("published") > day_ago: # Archive only: Arbitrary but necessary to prevent feeds from creating an unlimited number of stories # because they don't have a guid so it gets auto-generated based on the date, and if the story # is missing a date, then the latest date gets used. So reject anything newer than 24 hours old # when filling out the archive. # logging.debug(f" ---> [%-30s] ~FBTossing story because it's too new for the archive: ~SB{story}") continue - if story.get('published') < start_date: - start_date = story.get('published') + if story.get("published") < start_date: + start_date = story.get("published") if replace_guids: if replace_permalinks: - new_story_guid = str(story.get('published')) - if self.options['verbose']: + new_story_guid = str(story.get("published")) + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBReplacing guid (%s) with timestamp: %s' - % (self.feed.log_title[:30], story.get('guid'), new_story_guid) + " ---> [%-30s] ~FBReplacing guid (%s) with timestamp: %s" + % (self.feed.log_title[:30], story.get("guid"), new_story_guid) ) - story['guid'] = new_story_guid + story["guid"] = new_story_guid else: new_story_guid = Feed.get_permalink(story) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBReplacing guid (%s) with permalink: %s' - % (self.feed.log_title[:30], story.get('guid'), new_story_guid) + " ---> [%-30s] ~FBReplacing guid (%s) with permalink: %s" + % (self.feed.log_title[:30], story.get("guid"), new_story_guid) ) - story['guid'] = new_story_guid - story['story_hash'] = MStory.feed_guid_hash_unsaved(self.feed.pk, story.get('guid')) + story["guid"] = new_story_guid + story["story_hash"] = MStory.feed_guid_hash_unsaved(self.feed.pk, story.get("guid")) stories.append(story) - story_hashes.append(story.get('story_hash')) + story_hashes.append(story.get("story_hash")) original_story_hash_count = len(story_hashes) story_hashes_in_unread_cutoff = self.feed.story_hashes_in_unread_cutoff[:original_story_hash_count] story_hashes.extend(story_hashes_in_unread_cutoff) story_hashes = list(set(story_hashes)) - if self.options['verbose'] or settings.DEBUG: + if self.options["verbose"] or settings.DEBUG: logging.debug( - ' ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN/%s guids from db' + " ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN/%s guids from db" % ( self.feed.log_title[:30], original_story_hash_count, @@ -427,53 +434,53 @@ class ProcessFeed: ret_values = self.feed.add_update_stories( stories, existing_stories, - verbose=self.options['verbose'], - updates_off=self.options['updates_off'], + verbose=self.options["verbose"], + updates_off=self.options["updates_off"], ) # PubSubHubbub - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): self.check_feed_for_push() # Push notifications - if ret_values['new'] > 0 and MUserFeedNotification.feed_has_users(self.feed.pk) > 0: - QueueNotifications.delay(self.feed.pk, ret_values['new']) + if ret_values["new"] > 0 and MUserFeedNotification.feed_has_users(self.feed.pk) > 0: + QueueNotifications.delay(self.feed.pk, ret_values["new"]) # All Done logging.debug( - ' ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s' + " ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s" % ( self.feed.log_title[:30], - '~FG~SB' if ret_values['new'] else '', - ret_values['new'], - '~FY~SB' if ret_values['updated'] else '', - ret_values['updated'], - '~SB' if ret_values['same'] else '', - ret_values['same'], - '~FR~SB' if ret_values['error'] else '', - ret_values['error'], + "~FG~SB" if ret_values["new"] else "", + ret_values["new"], + "~FY~SB" if ret_values["updated"] else "", + ret_values["updated"], + "~SB" if ret_values["same"] else "", + ret_values["same"], + "~FR~SB" if ret_values["error"] else "", + ret_values["error"], len(self.fpf.entries), ) ) - self.feed.update_all_statistics(has_new_stories=bool(ret_values['new']), force=self.options['force']) + self.feed.update_all_statistics(has_new_stories=bool(ret_values["new"]), force=self.options["force"]) fetch_date = datetime.datetime.now() - if ret_values['new']: - if not getattr(settings, 'TEST_DEBUG', False): + if ret_values["new"]: + if not getattr(settings, "TEST_DEBUG", False): self.feed.trim_feed() self.feed.expire_redis() - if MStatistics.get('raw_feed', None) == self.feed.pk: + if MStatistics.get("raw_feed", None) == self.feed.pk: self.feed.save_raw_feed(self.raw_feed, fetch_date) self.feed.save_feed_history(200, "OK", date=fetch_date) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss' + " ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss" % (self.feed.log_title[:30], time.time() - start) ) - if self.options.get('archive_page', None): + if self.options.get("archive_page", None): self.archive_seen_story_hashes.update(story_hashes) - + return FEED_OK, ret_values def verify_feed_integrity(self): @@ -487,12 +494,12 @@ class ProcessFeed: if not self.feed: return FEED_ERREXC, ret_values - - if hasattr(self.fpf, 'status'): - if self.options['verbose']: + + if hasattr(self.fpf, "status"): + if self.options["verbose"]: if self.fpf.bozo and self.fpf.status != 304: logging.debug( - ' ---> [%-30s] ~FRBOZO exception: %s ~SB(%s entries)' + " ---> [%-30s] ~FRBOZO exception: %s ~SB(%s entries)" % (self.feed.log_title[:30], self.fpf.bozo_exception, len(self.fpf.entries)) ) @@ -504,16 +511,16 @@ class ProcessFeed: # 302 and 307: Temporary redirect: ignore # 301 and 308: Permanent redirect: save it (after 10 tries) if self.fpf.status == 301 or self.fpf.status == 308: - if self.fpf.href.endswith('feedburner.com/atom.xml'): + if self.fpf.href.endswith("feedburner.com/atom.xml"): return FEED_ERRHTTP, ret_values - redirects, non_redirects = self.feed.count_redirects_in_history('feed') + redirects, non_redirects = self.feed.count_redirects_in_history("feed") self.feed.save_feed_history( self.fpf.status, "HTTP Redirect (%d to go)" % (10 - len(redirects)) ) if len(redirects) >= 10 or len(non_redirects) == 0: address = self.fpf.href - if self.options['force'] and address: - address = qurl(address, remove=['_']) + if self.options["force"] and address: + address = qurl(address, remove=["_"]) self.feed.feed_address = address if not self.feed.known_good: self.feed.fetched_once = True @@ -559,7 +566,7 @@ class ProcessFeed: if not self.feed.known_good: fixed_feed, feed = self.feed.check_feed_link_for_feed_address() if not fixed_feed: - self.feed.save_feed_history(552, 'Non-xml feed', self.fpf.bozo_exception) + self.feed.save_feed_history(552, "Non-xml feed", self.fpf.bozo_exception) else: self.feed = feed self.feed = self.feed.save() @@ -573,7 +580,7 @@ class ProcessFeed: if not self.feed.known_good: fixed_feed, feed = self.feed.check_feed_link_for_feed_address() if not fixed_feed: - self.feed.save_feed_history(553, 'Not an RSS feed', self.fpf.bozo_exception) + self.feed.save_feed_history(553, "Not an RSS feed", self.fpf.bozo_exception) else: self.feed = feed self.feed = self.feed.save() @@ -588,69 +595,69 @@ class ProcessFeed: if not self.feed: logging.debug(f"Missing feed: {self.feed}") return - + original_etag = self.feed.etag - self.feed.etag = self.fpf.get('etag') + self.feed.etag = self.fpf.get("etag") if self.feed.etag: self.feed.etag = self.feed.etag[:255] # some times this is None (it never should) *sigh* if self.feed.etag is None: - self.feed.etag = '' + self.feed.etag = "" if self.feed.etag != original_etag: - self.feed.save(update_fields=['etag']) + self.feed.save(update_fields=["etag"]) original_last_modified = self.feed.last_modified - if hasattr(self.fpf, 'modified') and self.fpf.modified: + if hasattr(self.fpf, "modified") and self.fpf.modified: try: self.feed.last_modified = datetime.datetime.strptime( - self.fpf.modified, '%a, %d %b %Y %H:%M:%S %Z' + self.fpf.modified, "%a, %d %b %Y %H:%M:%S %Z" ) except Exception as e: self.feed.last_modified = None logging.debug("Broken mtime %s: %s" % (self.feed.last_modified, e)) pass if self.feed.last_modified != original_last_modified: - self.feed.save(update_fields=['last_modified']) + self.feed.save(update_fields=["last_modified"]) original_title = self.feed.feed_title - if self.fpf.feed.get('title'): - self.feed.feed_title = strip_tags(self.fpf.feed.get('title')) + if self.fpf.feed.get("title"): + self.feed.feed_title = strip_tags(self.fpf.feed.get("title")) if self.feed.feed_title != original_title: - self.feed.save(update_fields=['feed_title']) + self.feed.save(update_fields=["feed_title"]) - tagline = self.fpf.feed.get('tagline', self.feed.data.feed_tagline) + tagline = self.fpf.feed.get("tagline", self.feed.data.feed_tagline) if tagline: original_tagline = self.feed.data.feed_tagline self.feed.data.feed_tagline = smart_str(tagline) if self.feed.data.feed_tagline != original_tagline: - self.feed.data.save(update_fields=['feed_tagline']) + self.feed.data.save(update_fields=["feed_tagline"]) if not self.feed.feed_link_locked: - new_feed_link = self.fpf.feed.get('link') or self.fpf.feed.get('id') or self.feed.feed_link - if self.options['force'] and new_feed_link: - new_feed_link = qurl(new_feed_link, remove=['_']) + new_feed_link = self.fpf.feed.get("link") or self.fpf.feed.get("id") or self.feed.feed_link + if self.options["force"] and new_feed_link: + new_feed_link = qurl(new_feed_link, remove=["_"]) if new_feed_link != self.feed.feed_link: logging.debug( " ---> [%-30s] ~SB~FRFeed's page is different: %s to %s" % (self.feed.log_title[:30], self.feed.feed_link, new_feed_link) ) - redirects, non_redirects = self.feed.count_redirects_in_history('page') + redirects, non_redirects = self.feed.count_redirects_in_history("page") self.feed.save_page_history(301, "HTTP Redirect (%s to go)" % (10 - len(redirects))) if len(redirects) >= 10 or len(non_redirects) == 0: self.feed.feed_link = new_feed_link - self.feed.save(update_fields=['feed_link']) + self.feed.save(update_fields=["feed_link"]) def check_feed_for_push(self): - if not (hasattr(self.fpf, 'feed') and hasattr(self.fpf.feed, 'links') and self.fpf.feed.links): + if not (hasattr(self.fpf, "feed") and hasattr(self.fpf.feed, "links") and self.fpf.feed.links): return - + hub_url = None self_url = self.feed.feed_address for link in self.fpf.feed.links: - if link['rel'] == 'hub' and not hub_url: - hub_url = link['href'] - elif link['rel'] == 'self': - self_url = link['href'] + if link["rel"] == "hub" and not hub_url: + hub_url = link["href"] + elif link["rel"] == "self": + self_url = link["href"] push_expired = False if self.feed.is_push: try: @@ -662,10 +669,10 @@ class ProcessFeed: and self_url and not settings.DEBUG and self.feed.active_subscribers > 0 - and (push_expired or not self.feed.is_push or self.options.get('force')) + and (push_expired or not self.feed.is_push or self.options.get("force")) ): logging.debug( - ' ---> [%-30s] ~BB~FW%sSubscribing to PuSH hub: %s' + " ---> [%-30s] ~BB~FW%sSubscribing to PuSH hub: %s" % (self.feed.log_title[:30], "~SKRe-~SN" if push_expired else "", hub_url) ) try: @@ -673,13 +680,11 @@ class ProcessFeed: PushSubscription.objects.subscribe(self_url, feed=self.feed, hub=hub_url) except TimeoutError: logging.debug( - ' ---> [%-30s] ~BB~FW~FRTimed out~FW subscribing to PuSH hub: %s' + " ---> [%-30s] ~BB~FW~FRTimed out~FW subscribing to PuSH hub: %s" % (self.feed.log_title[:30], hub_url) ) elif self.feed.is_push and (self.feed.active_subscribers <= 0 or not hub_url): - logging.debug( - ' ---> [%-30s] ~BB~FWTurning off PuSH, no hub found' % (self.feed.log_title[:30]) - ) + logging.debug(" ---> [%-30s] ~BB~FWTurning off PuSH, no hub found" % (self.feed.log_title[:30])) self.feed.is_push = False self.feed = self.feed.save() @@ -695,11 +700,11 @@ class FeedFetcherWorker: FEED_ERREXC: 0, } self.feed_trans = { - FEED_OK: 'ok', - FEED_SAME: 'unchanged', - FEED_ERRPARSE: 'cant_parse', - FEED_ERRHTTP: 'http_error', - FEED_ERREXC: 'exception', + FEED_OK: "ok", + FEED_SAME: "unchanged", + FEED_ERRPARSE: "cant_parse", + FEED_ERRHTTP: "http_error", + FEED_ERREXC: "exception", } self.feed_keys = sorted(self.feed_trans.keys()) self.time_start = datetime.datetime.utcnow() @@ -713,15 +718,15 @@ class FeedFetcherWorker: connection._connection_settings = {} connection._dbs = {} settings.MONGODB = connect(settings.MONGO_DB_NAME, **settings.MONGO_DB) - if 'username' in settings.MONGO_ANALYTICS_DB: + if "username" in settings.MONGO_ANALYTICS_DB: settings.MONGOANALYTICSDB = connect( - db=settings.MONGO_ANALYTICS_DB['name'], + db=settings.MONGO_ANALYTICS_DB["name"], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics", ) else: settings.MONGOANALYTICSDB = connect( - db=settings.MONGO_ANALYTICS_DB['name'], + db=settings.MONGO_ANALYTICS_DB["name"], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics", ) @@ -738,15 +743,15 @@ class FeedFetcherWorker: identity = current_process._identity[0] # If fetching archive pages, come back once the archive scaffolding is built - if self.options.get('archive_page', None): + if self.options.get("archive_page", None): for feed_id in feed_queue: feed = self.refresh_feed(feed_id) try: self.fetch_and_process_archive_pages(feed_id) except SoftTimeLimitExceeded: logging.debug( - ' ---> [%-30s] ~FRTime limit reached while fetching ~FGarchive pages~FR. Made it to ~SB%s' - % (feed.log_title[:30], self.options['archive_page']) + " ---> [%-30s] ~FRTime limit reached while fetching ~FGarchive pages~FR. Made it to ~SB%s" + % (feed.log_title[:30], self.options["archive_page"]) ) pass if len(feed_queue) == 1: @@ -771,21 +776,21 @@ class FeedFetcherWorker: set_user({"id": feed_id, "username": feed.feed_title}) skip = False - if self.options.get('fake'): + if self.options.get("fake"): skip = True weight = "-" quick = "-" rand = "-" elif ( - self.options.get('quick') - and not self.options['force'] + self.options.get("quick") + and not self.options["force"] and feed.known_good and feed.fetched_once and not feed.is_push ): weight = feed.stories_last_month * feed.num_subscribers random_weight = random.randint(1, max(weight, 1)) - quick = float(self.options.get('quick', 0)) + quick = float(self.options.get("quick", 0)) rand = random.random() if random_weight < 1000 and rand < quick: skip = True @@ -796,7 +801,7 @@ class FeedFetcherWorker: rand = "-" if skip: logging.debug( - ' ---> [%-30s] ~BGFaking fetch, skipping (%s/month, %s subs, %s < %s)...' + " ---> [%-30s] ~BGFaking fetch, skipping (%s/month, %s subs, %s < %s)..." % (feed.log_title[:30], weight, feed.num_subscribers, rand, quick) ) continue @@ -807,74 +812,74 @@ class FeedFetcherWorker: feed_fetch_duration = time.time() - start_duration raw_feed = ffeed.raw_feed - if fetched_feed and (ret_feed == FEED_OK or self.options['force']): + if fetched_feed and (ret_feed == FEED_OK or self.options["force"]): pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) ret_feed, ret_entries = pfeed.process() feed = pfeed.feed feed_process_duration = time.time() - start_duration - if (ret_entries and ret_entries['new']) or self.options['force']: + if (ret_entries and ret_entries["new"]) or self.options["force"]: start = time.time() if not feed.known_good or not feed.fetched_once: feed.known_good = True feed.fetched_once = True feed = feed.save() - if self.options['force'] or random.random() <= 0.02: + if self.options["force"] or random.random() <= 0.02: logging.debug( - ' ---> [%-30s] ~FBPerforming feed cleanup...' % (feed.log_title[:30],) + " ---> [%-30s] ~FBPerforming feed cleanup..." % (feed.log_title[:30],) ) start_cleanup = time.time() feed.count_fs_size_bytes() logging.debug( - ' ---> [%-30s] ~FBDone with feed cleanup. Took ~SB%.4s~SN sec.' + " ---> [%-30s] ~FBDone with feed cleanup. Took ~SB%.4s~SN sec." % (feed.log_title[:30], time.time() - start_cleanup) ) try: self.count_unreads_for_subscribers(feed) except TimeoutError: logging.debug( - ' ---> [%-30s] Unread count took too long...' % (feed.log_title[:30],) + " ---> [%-30s] Unread count took too long..." % (feed.log_title[:30],) ) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBTIME: unread count in ~FM%.4ss' + " ---> [%-30s] ~FBTIME: unread count in ~FM%.4ss" % (feed.log_title[:30], time.time() - start) ) except (urllib.error.HTTPError, urllib.error.URLError) as e: logging.debug( - ' ---> [%-30s] ~FRFeed throws HTTP error: ~SB%s' % (str(feed_id)[:30], e.reason) + " ---> [%-30s] ~FRFeed throws HTTP error: ~SB%s" % (str(feed_id)[:30], e.reason) ) feed_code = 404 feed.save_feed_history(feed_code, str(e.reason), e) fetched_feed = None except Feed.DoesNotExist: - logging.debug(' ---> [%-30s] ~FRFeed is now gone...' % (str(feed_id)[:30])) + logging.debug(" ---> [%-30s] ~FRFeed is now gone..." % (str(feed_id)[:30])) continue except SoftTimeLimitExceeded as e: logging.debug(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed) ret_feed = FEED_ERREXC fetched_feed = None feed_code = 559 - feed.save_feed_history(feed_code, 'Timeout', e) + feed.save_feed_history(feed_code, "Timeout", e) except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRFeed fetch timed out...' % (feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FRFeed fetch timed out..." % (feed.log_title[:30])) feed_code = 505 - feed.save_feed_history(feed_code, 'Timeout', e) + feed.save_feed_history(feed_code, "Timeout", e) fetched_feed = None except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) ret_feed = FEED_ERREXC - feed = Feed.get_by_id(getattr(feed, 'pk', feed_id)) + feed = Feed.get_by_id(getattr(feed, "pk", feed_id)) if not feed: continue feed.save_feed_history(500, "Error", tb) feed_code = 500 fetched_feed = None # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() @@ -897,7 +902,7 @@ class FeedFetcherWorker: continue if ( - (self.options['force']) + (self.options["force"]) or (random.random() > 0.9) or ( fetched_feed @@ -906,8 +911,7 @@ class FeedFetcherWorker: and (ret_feed == FEED_OK or (ret_feed == FEED_SAME and feed.stories_last_month > 10)) ) ): - - logging.debug(' ---> [%-30s] ~FYFetching page: %s' % (feed.log_title[:30], feed.feed_link)) + logging.debug(" ---> [%-30s] ~FYFetching page: %s" % (feed.log_title[:30], feed.feed_link)) page_importer = PageImporter(feed) try: page_data = page_importer.fetch_page() @@ -917,27 +921,27 @@ class FeedFetcherWorker: " ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed ) page_data = None - feed.save_feed_history(557, 'Timeout', e) + feed.save_feed_history(557, "Timeout", e) except TimeoutError: - logging.debug(' ---> [%-30s] ~FRPage fetch timed out...' % (feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FRPage fetch timed out..." % (feed.log_title[:30])) page_data = None - feed.save_page_history(555, 'Timeout', '') + feed.save_page_history(555, "Timeout", "") except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) feed.save_page_history(550, "Page Error", tb) fetched_feed = None page_data = None # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() feed = self.refresh_feed(feed.pk) - logging.debug(' ---> [%-30s] ~FYFetching icon: %s' % (feed.log_title[:30], feed.feed_link)) - force = self.options['force'] + logging.debug(" ---> [%-30s] ~FYFetching icon: %s" % (feed.log_title[:30], feed.feed_link)) + force = self.options["force"] if random.random() > 0.99: force = True icon_importer = IconImporter(feed, page_data=page_data, force=force) @@ -948,28 +952,28 @@ class FeedFetcherWorker: logging.debug( " ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed ) - feed.save_feed_history(558, 'Timeout', e) + feed.save_feed_history(558, "Timeout", e) except TimeoutError: - logging.debug(' ---> [%-30s] ~FRIcon fetch timed out...' % (feed.log_title[:30])) - feed.save_page_history(556, 'Timeout', '') + logging.debug(" ---> [%-30s] ~FRIcon fetch timed out..." % (feed.log_title[:30])) + feed.save_page_history(556, "Timeout", "") except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) # feed.save_feed_history(560, "Icon Error", tb) # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() else: logging.debug( - ' ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s' + " ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s" % ( feed.log_title[:30], self.feed_trans[ret_feed], feed.stories_last_month, - '' if feed.has_page else ' [HAS NO PAGE]', + "" if feed.has_page else " [HAS NO PAGE]", ) ) @@ -979,7 +983,7 @@ class FeedFetcherWorker: feed.last_load_time = round(delta) feed.fetched_once = True try: - feed = feed.save(update_fields=['last_load_time', 'fetched_once']) + feed = feed.save(update_fields=["last_load_time", "fetched_once"]) except IntegrityError: logging.debug( " ***> [%-30s] ~FRIntegrityError on feed: %s" @@ -989,10 +993,10 @@ class FeedFetcherWorker: ) ) - if ret_entries and ret_entries['new']: - self.publish_to_subscribers(feed, ret_entries['new']) + if ret_entries and ret_entries["new"]: + self.publish_to_subscribers(feed, ret_entries["new"]) - done_msg = '%2s ---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN (~FB%s~FY) [%s]' % ( + done_msg = "%2s ---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN (~FB%s~FY) [%s]" % ( identity, feed.log_title[:30], delta, @@ -1021,31 +1025,38 @@ class FeedFetcherWorker: def fetch_and_process_archive_pages(self, feed_id): feed = Feed.get_by_id(feed_id) first_seen_feed = None - original_starting_page = self.options['archive_page'] - + original_starting_page = self.options["archive_page"] + for archive_page_key in ["page", "paged", "rfc5005"]: seen_story_hashes = set() failed_pages = 0 - self.options['archive_page_key'] = archive_page_key + self.options["archive_page_key"] = archive_page_key if archive_page_key == "rfc5005": - self.options['archive_page'] = "rfc5005" + self.options["archive_page"] = "rfc5005" link_prev_archive = None if first_seen_feed: - for link in getattr(first_seen_feed.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] - logging.debug(' ---> [%-30s] ~FGFeed has ~SBRFC5005~SN links, filling out archive: %s' % (feed.log_title[:30], link_prev_archive)) + for link in getattr(first_seen_feed.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] + logging.debug( + " ---> [%-30s] ~FGFeed has ~SBRFC5005~SN links, filling out archive: %s" + % (feed.log_title[:30], link_prev_archive) + ) break else: - logging.debug(' ---> [%-30s] ~FBFeed has no RFC5005 links...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBFeed has no RFC5005 links..." % (feed.log_title[:30]) + ) else: - self.options['archive_page_link'] = link_prev_archive + self.options["archive_page_link"] = link_prev_archive ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... continue @@ -1055,9 +1066,9 @@ class FeedFetcherWorker: pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) if not pfeed.fpf or not pfeed.fpf.entries: continue - for link in getattr(pfeed.fpf.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] + for link in getattr(pfeed.fpf.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] if not link_prev_archive: continue @@ -1065,16 +1076,21 @@ class FeedFetcherWorker: while True: if not link_prev_archive: break - if link_prev_archive == self.options.get('archive_page_link', None): - logging.debug(' ---> [%-30s] ~FRNo change in archive page link: %s' % (feed.log_title[:30], link_prev_archive)) - break - self.options['archive_page_link'] = link_prev_archive + if link_prev_archive == self.options.get("archive_page_link", None): + logging.debug( + " ---> [%-30s] ~FRNo change in archive page link: %s" + % (feed.log_title[:30], link_prev_archive) + ) + break + self.options["archive_page_link"] = link_prev_archive link_prev_archive = None ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... break @@ -1083,15 +1099,22 @@ class FeedFetcherWorker: if fetched_feed and ret_feed == FEED_OK: pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) if not pfeed.fpf or not pfeed.fpf.entries: - logging.debug(' ---> [%-30s] ~FRFeed parse failed, no entries' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRFeed parse failed, no entries" % (feed.log_title[:30]) + ) continue - for link in getattr(pfeed.fpf.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] - logging.debug(' ---> [%-30s] ~FGFeed still has ~SBRFC5005~SN links, continuing filling out archive: %s' % (feed.log_title[:30], link_prev_archive)) + for link in getattr(pfeed.fpf.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] + logging.debug( + " ---> [%-30s] ~FGFeed still has ~SBRFC5005~SN links, continuing filling out archive: %s" + % (feed.log_title[:30], link_prev_archive) + ) break else: - logging.debug(' ---> [%-30s] ~FBFeed has no more RFC5005 links...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBFeed has no more RFC5005 links..." % (feed.log_title[:30]) + ) break before_story_hashes = len(seen_story_hashes) @@ -1100,23 +1123,30 @@ class FeedFetcherWorker: after_story_hashes = len(seen_story_hashes) if before_story_hashes == after_story_hashes: - logging.debug(' ---> [%-30s] ~FRNo change in story hashes, but has archive link: %s' % (feed.log_title[:30], link_prev_archive)) - + logging.debug( + " ---> [%-30s] ~FRNo change in story hashes, but has archive link: %s" + % (feed.log_title[:30], link_prev_archive) + ) + failed_color = "~FR" if not link_prev_archive else "" - logging.debug(f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive RFC5005 ~SB{link_prev_archive}~SN: ~SB~FG{failed_color}{len(seen_story_hashes):,} stories~SN~FB") + logging.debug( + f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive RFC5005 ~SB{link_prev_archive}~SN: ~SB~FG{failed_color}{len(seen_story_hashes):,} stories~SN~FB" + ) else: for page in range(3 if settings.DEBUG and False else 150): if page < original_starting_page: continue - if failed_pages >= 1: + if failed_pages >= 1: break - self.options['archive_page'] = page+1 + self.options["archive_page"] = page + 1 ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... break @@ -1140,12 +1170,14 @@ class FeedFetcherWorker: else: failed_pages += 1 failed_color = "~FR" if failed_pages > 0 else "" - logging.debug(f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive page ~SB{page+1}~SN: ~SB~FG{len(seen_story_hashes):,} stories~SN~FB, {failed_color}{failed_pages} failures") + logging.debug( + f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive page ~SB{page+1}~SN: ~SB~FG{len(seen_story_hashes):,} stories~SN~FB, {failed_color}{failed_pages} failures" + ) def publish_to_subscribers(self, feed, new_count): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - listeners_count = r.publish(str(feed.pk), 'story:new_count:%s' % new_count) + listeners_count = r.publish(str(feed.pk), "story:new_count:%s" % new_count) if listeners_count: logging.debug( " ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count) @@ -1158,7 +1190,7 @@ class FeedFetcherWorker: user_subs = UserSubscription.objects.filter( feed=feed, active=True, user__profile__last_seen_on__gte=subscriber_expire - ).order_by('-last_read_date') + ).order_by("-last_read_date") if not user_subs.count(): return @@ -1168,16 +1200,16 @@ class FeedFetcherWorker: sub.needs_unread_recalc = True sub.save() - if self.options['compute_scores']: + if self.options["compute_scores"]: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) stories = MStory.objects(story_feed_id=feed.pk, story_date__gte=feed.unread_cutoff) stories = Feed.format_stories(stories, feed.pk) story_hashes = r.zrangebyscore( - 'zF:%s' % feed.pk, - int(feed.unread_cutoff.strftime('%s')), + "zF:%s" % feed.pk, + int(feed.unread_cutoff.strftime("%s")), int(time.time() + 60 * 60 * 24), ) - missing_story_hashes = set(story_hashes) - set([s['story_hash'] for s in stories]) + missing_story_hashes = set(story_hashes) - set([s["story_hash"] for s in stories]) if missing_story_hashes: missing_stories = MStory.objects( story_feed_id=feed.pk, story_hash__in=missing_story_hashes @@ -1185,7 +1217,7 @@ class FeedFetcherWorker: missing_stories = Feed.format_stories(missing_stories, feed.pk) stories = missing_stories + stories logging.debug( - ' ---> [%-30s] ~FYFound ~SB~FC%s(of %s)/%s~FY~SN un-secondaried stories while computing scores' + " ---> [%-30s] ~FYFound ~SB~FC%s(of %s)/%s~FY~SN un-secondaried stories while computing scores" % ( feed.log_title[:30], len(missing_stories), @@ -1195,7 +1227,7 @@ class FeedFetcherWorker: ) cache.set("S:v3:%s" % feed.pk, stories, 60) logging.debug( - ' ---> [%-30s] ~FYComputing scores: ~SB%s stories~SN with ~SB%s subscribers ~SN(%s/%s/%s)' + " ---> [%-30s] ~FYComputing scores: ~SB%s stories~SN with ~SB%s subscribers ~SN(%s/%s/%s)" % ( feed.log_title[:30], len(stories), @@ -1206,16 +1238,16 @@ class FeedFetcherWorker: ) ) self.calculate_feed_scores_with_stories(user_subs, stories) - elif self.options.get('mongodb_replication_lag'): + elif self.options.get("mongodb_replication_lag"): logging.debug( - ' ---> [%-30s] ~BR~FYSkipping computing scores: ~SB%s seconds~SN of mongodb lag' - % (feed.log_title[:30], self.options.get('mongodb_replication_lag')) + " ---> [%-30s] ~BR~FYSkipping computing scores: ~SB%s seconds~SN of mongodb lag" + % (feed.log_title[:30], self.options.get("mongodb_replication_lag")) ) @timelimit(10) def calculate_feed_scores_with_stories(self, user_subs, stories): for sub in user_subs: - silent = False if getattr(self.options, 'verbose', 0) >= 2 else True + silent = False if getattr(self.options, "verbose", 0) >= 2 else True sub.calculate_feed_scores(silent=silent, stories=stories) @@ -1231,7 +1263,7 @@ class Dispatcher: self.feeds_count = feeds_count def run_jobs(self): - if self.options['single_threaded'] or self.num_threads == 1: + if self.options["single_threaded"] or self.num_threads == 1: return dispatch_workers(self.feeds_queue[0], self.options) else: for i in range(self.num_threads): diff --git a/utils/feed_functions.py b/utils/feed_functions.py index 104fff010..fbd93035e 100644 --- a/utils/feed_functions.py +++ b/utils/feed_functions.py @@ -12,9 +12,13 @@ from django.utils.encoding import smart_str from utils import log as logging -class TimeoutError(Exception): pass +class TimeoutError(Exception): + pass + + def timelimit(timeout): """borrowed from web.py""" + def _1(function): def _2(*args, **kw): class Dispatch(threading.Thread): @@ -23,7 +27,7 @@ def timelimit(timeout): self.result = None self.error = None self.exc_info = None - + self.setDaemon(True) self.start() @@ -33,28 +37,31 @@ def timelimit(timeout): except BaseException as e: self.error = e self.exc_info = sys.exc_info() + c = Dispatch() dispatch = c c.join(timeout) if c.is_alive(): - raise TimeoutError('took too long') + raise TimeoutError("took too long") if c.error: - tb = ''.join(traceback.format_exception(c.exc_info[0], c.exc_info[1], c.exc_info[2])) + tb = "".join(traceback.format_exception(c.exc_info[0], c.exc_info[1], c.exc_info[2])) logging.debug(f" ***> Traceback timeout error: {tb}") # mail_admins('Error in timeout: %s' % c.exc_info[0], tb) raise c.error return c.result + return _2 + return _1 - + def utf8encode(tstr): - """ Encodes a unicode string in utf-8 - """ + """Encodes a unicode string in utf-8""" msg = "utf8encode is deprecated. Use django.utils.encoding.smart_str instead." warnings.warn(msg, DeprecationWarning) return smart_str(tstr) + # From: http://www.poromenos.org/node/87 def levenshtein_distance(first, second): """Find the Levenshtein distance between two strings.""" @@ -70,24 +77,25 @@ def levenshtein_distance(first, second): second_length = len(second) + 1 distance_matrix = [[0] * second_length for x in range(first_length)] for i in range(first_length): - distance_matrix[i][0] = i + distance_matrix[i][0] = i for j in range(second_length): - distance_matrix[0][j]=j + distance_matrix[0][j] = j for i in range(1, first_length): for j in range(1, second_length): - deletion = distance_matrix[i-1][j] + 1 - insertion = distance_matrix[i][j-1] + 1 - substitution = distance_matrix[i-1][j-1] - if first[i-1] != second[j-1]: + deletion = distance_matrix[i - 1][j] + 1 + insertion = distance_matrix[i][j - 1] + 1 + substitution = distance_matrix[i - 1][j - 1] + if first[i - 1] != second[j - 1]: substitution += 1 distance_matrix[i][j] = min(insertion, deletion, substitution) - return distance_matrix[first_length-1][second_length-1] - + return distance_matrix[first_length - 1][second_length - 1] + + def _do_timesince(d, chunks, now=None): """ Started as a copy of django.util.timesince.timesince, but modified to only output one time unit, and use months as the maximum unit of measure. - + Takes two datetime objects and returns the time between d and now as a nicely formatted string, e.g. "10 minutes". If d occurs after now, then "0 minutes" is returned. @@ -110,83 +118,86 @@ def _do_timesince(d, chunks, now=None): count = since // seconds if count != 0: break - s = '%(number)d %(type)s' % {'number': count, 'type': name(count)} + s = "%(number)d %(type)s" % {"number": count, "type": name(count)} else: - s = 'just a second' + s = "just a second" return s + def relative_timesince(value): if not value: - return '' + return "" chunks = ( - (60 * 60 * 24, lambda n: ungettext('day', 'days', n)), - (60 * 60, lambda n: ungettext('hour', 'hours', n)), - (60, lambda n: ungettext('minute', 'minutes', n)), - (1, lambda n: ungettext('second', 'seconds', n)), - (0, lambda n: 'just now'), + (60 * 60 * 24, lambda n: ungettext("day", "days", n)), + (60 * 60, lambda n: ungettext("hour", "hours", n)), + (60, lambda n: ungettext("minute", "minutes", n)), + (1, lambda n: ungettext("second", "seconds", n)), + (0, lambda n: "just now"), ) return _do_timesince(value, chunks) - + + def relative_timeuntil(value): if not value: - return '' + return "" chunks = ( - (60 * 60, lambda n: ungettext('hour', 'hours', n)), - (60, lambda n: ungettext('minute', 'minutes', n)) + (60 * 60, lambda n: ungettext("hour", "hours", n)), + (60, lambda n: ungettext("minute", "minutes", n)), ) - + now = datetime.datetime.utcnow() - + return _do_timesince(now, chunks, value) + def seconds_timesince(value): if not value: return 0 now = datetime.datetime.utcnow() delta = now - value - + return delta.days * 24 * 60 * 60 + delta.seconds - + + def format_relative_date(date, future=False): if not date or date < datetime.datetime(2010, 1, 1): return "Soon" - + now = datetime.datetime.utcnow() diff = abs(now - date) if diff < datetime.timedelta(minutes=60): minutes = diff.seconds / 60 - return "%s minute%s %s" % (minutes, - '' if minutes == 1 else 's', - '' if future else 'ago') + return "%s minute%s %s" % (minutes, "" if minutes == 1 else "s", "" if future else "ago") elif datetime.timedelta(minutes=60) <= diff < datetime.timedelta(minutes=90): - return "1 hour %s" % ('' if future else 'ago') + return "1 hour %s" % ("" if future else "ago") elif diff < datetime.timedelta(hours=24): dec = (diff.seconds / 60 + 15) % 60 if dec >= 30: - return "%s.5 hours %s" % ((((diff.seconds / 60) + 15) / 60), - '' if future else 'ago') + return "%s.5 hours %s" % ((((diff.seconds / 60) + 15) / 60), "" if future else "ago") else: - return "%s hours %s" % ((((diff.seconds / 60) + 15) / 60), - '' if future else 'ago') + return "%s hours %s" % ((((diff.seconds / 60) + 15) / 60), "" if future else "ago") else: - days = ((diff.seconds / 60) / 60 / 24) - return "%s day%s %s" % (days, '' if days == 1 else 's', '' if future else 'ago') - -def add_object_to_folder(obj, in_folder, folders, parent='', added=False): - if parent.startswith('river:'): - parent = parent.replace('river:', '') - if in_folder.startswith('river:'): - in_folder = in_folder.replace('river:', '') + days = (diff.seconds / 60) / 60 / 24 + return "%s day%s %s" % (days, "" if days == 1 else "s", "" if future else "ago") + + +def add_object_to_folder(obj, in_folder, folders, parent="", added=False): + if parent.startswith("river:"): + parent = parent.replace("river:", "") + if in_folder.startswith("river:"): + in_folder = in_folder.replace("river:", "") obj_identifier = obj if isinstance(obj, dict): obj_identifier = list(obj.keys())[0] - if ((not in_folder or in_folder == " ") and - not parent and - not isinstance(obj, dict) and - obj_identifier not in folders): + if ( + (not in_folder or in_folder == " ") + and not parent + and not isinstance(obj, dict) + and obj_identifier not in folders + ): folders.append(obj) return folders @@ -198,7 +209,7 @@ def add_object_to_folder(obj, in_folder, folders, parent='', added=False): if obj_identifier not in child_folder_names: folders.append(obj) return folders - + for k, v in enumerate(folders): if isinstance(v, dict): for f_k, f_v in list(v.items()): @@ -206,39 +217,42 @@ def add_object_to_folder(obj, in_folder, folders, parent='', added=False): f_v.append(obj) added = True folders[k][f_k] = add_object_to_folder(obj, in_folder, f_v, f_k, added) - - return folders + + return folders + def mail_feed_error_to_admin(feed, e, local_vars=None, subject=None): # Mail the admins with the error if not subject: subject = "Feed update error" exc_info = sys.exc_info() - subject = '%s: %s' % (subject, repr(e)) - message = 'Traceback:\n%s\n\Feed:\n%s\nLocals:\n%s' % ( - '\n'.join(traceback.format_exception(*exc_info)), + subject = "%s: %s" % (subject, repr(e)) + message = "Traceback:\n%s\n\Feed:\n%s\nLocals:\n%s" % ( + "\n".join(traceback.format_exception(*exc_info)), pprint.pformat(feed.__dict__), - pprint.pformat(local_vars) - ) + pprint.pformat(local_vars), + ) logging.debug(f" ***> Feed error, {subject}: {message}") - + + ## {{{ http://code.activestate.com/recipes/576611/ (r11) from operator import itemgetter from heapq import nlargest from itertools import repeat + class Counter(dict): - '''Dict subclass for counting hashable objects. Sometimes called a bag + """Dict subclass for counting hashable objects. Sometimes called a bag or multiset. Elements are stored as dictionary keys and their counts are stored as dictionary values. >>> Counter('zyzygy') Counter({'y': 3, 'z': 2, 'g': 1}) - ''' + """ def __init__(self, iterable=None, **kwds): - '''Create a new, empty Counter object. And if given, count elements + """Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts. @@ -247,26 +261,26 @@ class Counter(dict): >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping >>> c = Counter(a=4, b=2) # a new counter from keyword args - ''' + """ self.update(iterable, **kwds) def __missing__(self, key): return 0 def most_common(self, n=None): - '''List the n most common elements and their counts from the most + """List the n most common elements and their counts from the most common to the least. If n is None, then list all element counts. >>> Counter('abracadabra').most_common(3) [('a', 5), ('r', 2), ('b', 2)] - ''' + """ if n is None: return sorted(iter(list(self.items())), key=itemgetter(1), reverse=True) return nlargest(n, iter(list(self.items())), key=itemgetter(1)) def elements(self): - '''Iterator over elements repeating each as many times as its count. + """Iterator over elements repeating each as many times as its count. >>> c = Counter('ABCABC') >>> sorted(c.elements()) @@ -275,7 +289,7 @@ class Counter(dict): If an element's count has been set to zero or is a negative number, elements() will ignore it. - ''' + """ for elem, count in list(self.items()): for _ in repeat(None, count): yield elem @@ -284,11 +298,10 @@ class Counter(dict): @classmethod def fromkeys(cls, iterable, v=None): - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + raise NotImplementedError("Counter.fromkeys() is undefined. Use Counter(iterable) instead.") def update(self, iterable=None, **kwds): - '''Like dict.update() but add counts instead of replacing them. + """Like dict.update() but add counts instead of replacing them. Source can be an iterable, a dictionary, or another Counter instance. @@ -299,15 +312,15 @@ class Counter(dict): >>> c['h'] # four 'h' in which, witch, and watch 4 - ''' + """ if iterable is not None: - if hasattr(iterable, 'iteritems'): + if hasattr(iterable, "iteritems"): if self: self_get = self.get for elem, count in list(iterable.items()): self[elem] = self_get(elem, 0) + count else: - dict.update(self, iterable) # fast path when counter is empty + dict.update(self, iterable) # fast path when counter is empty else: self_get = self.get for elem in iterable: @@ -316,19 +329,19 @@ class Counter(dict): self.update(kwds) def copy(self): - 'Like dict.copy() but returns a Counter instance instead of a dict.' + "Like dict.copy() but returns a Counter instance instead of a dict." return Counter(self) def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' + "Like dict.__delitem__() but does not raise KeyError for missing values." if elem in self: dict.__delitem__(self, elem) def __repr__(self): if not self: - return '%s()' % self.__class__.__name__ - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) + return "%s()" % self.__class__.__name__ + items = ", ".join(map("%r: %r".__mod__, self.most_common())) + return "%s({%s})" % (self.__class__.__name__, items) # Multiset-style mathematical operations discussed in: # Knuth TAOCP Volume II section 4.6.3 exercise 19 @@ -340,13 +353,13 @@ class Counter(dict): # c += Counter() def __add__(self, other): - '''Add counts from two counters. + """Add counts from two counters. >>> Counter('abbb') + Counter('bcc') Counter({'b': 4, 'c': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented result = Counter() @@ -357,12 +370,12 @@ class Counter(dict): return result def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. + """Subtract count, but keep only results with positive counts. >>> Counter('abbbc') - Counter('bccd') Counter({'b': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented result = Counter() @@ -373,12 +386,12 @@ class Counter(dict): return result def __or__(self, other): - '''Union is the maximum of value in either of the input counters. + """Union is the maximum of value in either of the input counters. >>> Counter('abbb') | Counter('bcc') Counter({'b': 3, 'c': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented _max = max @@ -390,12 +403,12 @@ class Counter(dict): return result def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. + """Intersection is the minimum of corresponding counts. >>> Counter('abbb') & Counter('bcc') Counter({'b': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented _min = min @@ -409,11 +422,13 @@ class Counter(dict): return result -if __name__ == '__main__': +if __name__ == "__main__": import doctest + print((doctest.testmod())) ## end of http://code.activestate.com/recipes/576611/ }}} + def chunks(l, n): for i in range(0, len(l), n): - yield l[i:i+n] + yield l[i : i + n] diff --git a/utils/feedfinder_forman.py b/utils/feedfinder_forman.py index d543b2ce6..8bccbe1bd 100755 --- a/utils/feedfinder_forman.py +++ b/utils/feedfinder_forman.py @@ -2,7 +2,6 @@ # -*- coding: utf-8 -*- - __version__ = "0.0.3" try: @@ -30,7 +29,6 @@ def coerce_url(url): class FeedFinder(object): - def __init__(self, user_agent=None): if user_agent is None: user_agent = "NewsBlur Feed Finder" @@ -38,7 +36,9 @@ class FeedFinder(object): def get_feed(self, url, skip_user_agent=False): try: - r = requests.get(url, headers={"User-Agent": self.user_agent if not skip_user_agent else None}, timeout=15) + r = requests.get( + url, headers={"User-Agent": self.user_agent if not skip_user_agent else None}, timeout=15 + ) except Exception as e: logging.warn("Error while getting '{0}'".format(url)) logging.warn("{0}".format(e)) @@ -51,7 +51,7 @@ class FeedFinder(object): data = text.lower() if data and data[:100].count("').replace(''', "'").replace('"', '"').replace('&', '&') + v = ( + v.replace("<", "<") + .replace(">", ">") + .replace("'", "'") + .replace(""", '"') + .replace("&", "&") + ) return v + attrs = [(k.lower(), cleanattr(v)) for k, v in attrs if cleanattr(v)] - attrs = [(k, k in ('rel','type') and v.lower() or v) for k, v in attrs if cleanattr(v)] + attrs = [(k, k in ("rel", "type") and v.lower() or v) for k, v in attrs if cleanattr(v)] return attrs - + def do_base(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'href' not in attrsD: return - self.baseuri = attrsD['href'] - - def error(self, *a, **kw): pass # we're not picky - + if "href" not in attrsD: + return + self.baseuri = attrsD["href"] + + def error(self, *a, **kw): + pass # we're not picky + + class LinkParser(BaseParser): - FEED_TYPES = ('application/rss+xml', - 'text/xml', - 'application/atom+xml', - 'application/x.atom+xml', - 'application/x-atom+xml') + FEED_TYPES = ( + "application/rss+xml", + "text/xml", + "application/atom+xml", + "application/x.atom+xml", + "application/x-atom+xml", + ) + def do_link(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'rel' not in attrsD: return - rels = attrsD['rel'].split() - if 'alternate' not in rels: return - if attrsD.get('type') not in self.FEED_TYPES: return - if 'href' not in attrsD: return - self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href'])) + if "rel" not in attrsD: + return + rels = attrsD["rel"].split() + if "alternate" not in rels: + return + if attrsD.get("type") not in self.FEED_TYPES: + return + if "href" not in attrsD: + return + self.links.append(urllib.parse.urljoin(self.baseuri, attrsD["href"])) + class ALinkParser(BaseParser): def start_a(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'href' not in attrsD: return - self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href'])) + if "href" not in attrsD: + return + self.links.append(urllib.parse.urljoin(self.baseuri, attrsD["href"])) + def makeFullURI(uri): - if not uri: return + if not uri: + return uri = uri.strip() - if uri.startswith('feed://'): - uri = 'http://' + uri.split('feed://', 1).pop() - for x in ['http', 'https']: - if uri.startswith('%s://' % x): + if uri.startswith("feed://"): + uri = "http://" + uri.split("feed://", 1).pop() + for x in ["http", "https"]: + if uri.startswith("%s://" % x): return uri - return 'http://%s' % uri + return "http://%s" % uri + def getLinks(data, baseuri): p = LinkParser(baseuri) p.feed(data) return p.links + def getLinksLXML(data, baseuri): parser = etree.HTMLParser(recover=True) tree = etree.parse(StringIO(data), parser) links = [] - for link in tree.findall('.//link'): - if link.attrib.get('type') in LinkParser.FEED_TYPES: - href = link.attrib['href'] - if href: links.append(href) + for link in tree.findall(".//link"): + if link.attrib.get("type") in LinkParser.FEED_TYPES: + href = link.attrib["href"] + if href: + links.append(href) return links + def getALinks(data, baseuri): p = ALinkParser(baseuri) p.feed(data) return p.links + def getLocalLinks(links, baseuri): found_links = [] - if not baseuri: return found_links + if not baseuri: + return found_links baseuri = baseuri.lower() for l in links: try: @@ -198,28 +234,38 @@ def getLocalLinks(links, baseuri): pass return found_links + def isFeedLink(link): - return link[-4:].lower() in ('.rss', '.rdf', '.xml', '.atom') + return link[-4:].lower() in (".rss", ".rdf", ".xml", ".atom") + def isXMLRelatedLink(link): link = link.lower() - return link.count('rss') + link.count('rdf') + link.count('xml') + link.count('atom') + return link.count("rss") + link.count("rdf") + link.count("xml") + link.count("atom") + + +r_brokenRedirect = re.compile("]*>(.*?)", re.S) + -r_brokenRedirect = re.compile(']*>(.*?)', re.S) def tryBrokenRedirect(data): - if ' b) - (a < b) + return (a > b) - (a < b) + def sortFeeds(feed1Info, feed2Info): - return cmp_(feed2Info['headlines_rank'], feed1Info['headlines_rank']) + return cmp_(feed2Info["headlines_rank"], feed1Info["headlines_rank"]) + def getFeedsFromSyndic8(uri): feeds = [] try: - server = xmlrpc.client.Server('http://www.syndic8.com/xmlrpc.php') + server = xmlrpc.client.Server("http://www.syndic8.com/xmlrpc.php") feedids = server.syndic8.FindFeeds(uri) - infolist = server.syndic8.GetFeedInfo(feedids, ['headlines_rank','status','dataurl']) + infolist = server.syndic8.GetFeedInfo(feedids, ["headlines_rank", "status", "dataurl"]) infolist.sort(sortFeeds) - feeds = [f['dataurl'] for f in infolist if f['status']=='Syndicated'] - _debuglog('found %s feeds through Syndic8' % len(feeds)) + feeds = [f["dataurl"] for f in infolist if f["status"] == "Syndicated"] + _debuglog("found %s feeds through Syndic8" % len(feeds)) except: pass return feeds - + + def feeds(uri, all=False, querySyndic8=False, _recurs=None): - if _recurs is None: _recurs = [uri] + if _recurs is None: + _recurs = [uri] fulluri = makeFullURI(uri) try: data = _gatekeeper.get(fulluri, check=False) @@ -261,27 +312,27 @@ def feeds(uri, all=False, querySyndic8=False, _recurs=None): _recurs.append(newuri) return feeds(newuri, all=all, querySyndic8=querySyndic8, _recurs=_recurs) # nope, it's a page, try LINK tags first - _debuglog('looking for LINK tags') + _debuglog("looking for LINK tags") try: outfeeds = getLinks(data, fulluri) except: outfeeds = [] if not outfeeds: - _debuglog('using lxml to look for LINK tags') + _debuglog("using lxml to look for LINK tags") try: outfeeds = getLinksLXML(data, fulluri) except: outfeeds = [] - _debuglog('found %s feeds through LINK tags' % len(outfeeds)) + _debuglog("found %s feeds through LINK tags" % len(outfeeds)) outfeeds = list(filter(isFeed, outfeeds)) if all or not outfeeds: # no LINK tags, look for regular links that point to feeds - _debuglog('no LINK tags, looking at A tags') + _debuglog("no LINK tags, looking at A tags") try: links = getALinks(data, fulluri) except: links = [] - _debuglog('no LINK tags, looking at local links') + _debuglog("no LINK tags, looking at local links") locallinks = getLocalLinks(links, fulluri) # look for obvious feed links on the same server outfeeds.extend(list(filter(isFeed, list(filter(isFeedLink, locallinks))))) @@ -295,82 +346,89 @@ def feeds(uri, all=False, querySyndic8=False, _recurs=None): # look harder for feed links on another server outfeeds.extend(list(filter(isFeed, list(filter(isXMLRelatedLink, links))))) if all or not outfeeds: - _debuglog('no A tags, guessing') - suffixes = [ # filenames used by popular software: - 'feed/', # obvious - 'atom.xml', # blogger, TypePad - 'index.atom', # MT, apparently - 'index.rdf', # MT - 'rss.xml', # Dave Winer/Manila - 'index.xml', # MT - 'index.rss' # Slash + _debuglog("no A tags, guessing") + suffixes = [ # filenames used by popular software: + "feed/", # obvious + "atom.xml", # blogger, TypePad + "index.atom", # MT, apparently + "index.rdf", # MT + "rss.xml", # Dave Winer/Manila + "index.xml", # MT + "index.rss", # Slash ] outfeeds.extend(list(filter(isFeed, [urllib.parse.urljoin(fulluri, x) for x in suffixes]))) if (all or not outfeeds) and querySyndic8: # still no luck, search Syndic8 for feeds (requires xmlrpclib) - _debuglog('still no luck, searching Syndic8') + _debuglog("still no luck, searching Syndic8") outfeeds.extend(getFeedsFromSyndic8(uri)) - if hasattr(__builtins__, 'set') or 'set' in __builtins__: + if hasattr(__builtins__, "set") or "set" in __builtins__: outfeeds = list(set(outfeeds)) return outfeeds -getFeeds = feeds # backwards-compatibility + +getFeeds = feeds # backwards-compatibility + def feed(uri): - #todo: give preference to certain feed formats + # todo: give preference to certain feed formats feedlist = feeds(uri) if feedlist: - feeds_no_comments = [f for f in feedlist if 'comments' not in f.lower()] + feeds_no_comments = [f for f in feedlist if "comments" not in f.lower()] if feeds_no_comments: return feeds_no_comments[0] return feedlist[0] else: return None + ##### test harness ###### + def test(): - uri = 'http://diveintomark.org/tests/client/autodiscovery/html4-001.html' + uri = "http://diveintomark.org/tests/client/autodiscovery/html4-001.html" failed = [] count = 0 while 1: data = _gatekeeper.get(uri) - if data.find('Atom autodiscovery test') == -1: break - sys.stdout.write('.') + if data.find("Atom autodiscovery test") == -1: + break + sys.stdout.write(".") sys.stdout.flush() count += 1 links = getLinks(data, uri) if not links: - print(('\n*** FAILED ***', uri, 'could not find link')) + print(("\n*** FAILED ***", uri, "could not find link")) failed.append(uri) elif len(links) > 1: - print(('\n*** FAILED ***', uri, 'found too many links')) + print(("\n*** FAILED ***", uri, "found too many links")) failed.append(uri) else: atomdata = urllib.request.urlopen(links[0]).read() if atomdata.find(' 2 else "1" droplet_index = int(second_arg) if str(second_arg).isnumeric() else 1 droplet_name = sys.argv[1] # Use correct Digital Ocean team based on "old" - commands = ['ansible-inventory', '--list'] + commands = ["ansible-inventory", "--list"] env = None if second_arg == "old": env = dict(os.environ, ANSIBLE_CONFIG="ansible.old.cfg") @@ -26,7 +27,7 @@ if __name__ == '__main__': print(" ***> Could not load ansible-inventory!") hosts = json.loads(hosts) - for host, ip_host in hosts['_meta']['hostvars'].items(): + for host, ip_host in hosts["_meta"]["hostvars"].items(): if host.startswith(droplet_name): - print(ip_host['ansible_host']) + print(ip_host["ansible_host"]) break diff --git a/utils/image_functions.py b/utils/image_functions.py index 0b1f5a4f2..fb5448c73 100644 --- a/utils/image_functions.py +++ b/utils/image_functions.py @@ -7,61 +7,59 @@ from PIL import ImageOps as PILOps from PIL.ExifTags import TAGS from io import BytesIO -PROFILE_PICTURE_SIZES = { - 'fullsize': (256, 256), - 'thumbnail': (64, 64) -} +PROFILE_PICTURE_SIZES = {"fullsize": (256, 256), "thumbnail": (64, 64)} + class ImageOps: - """Module that holds all image operations. Since there's no state, + """Module that holds all image operations. Since there's no state, everything is a classmethod.""" - + @classmethod def resize_image(cls, image_body, size, fit_to_size=False): """Takes a raw image (in image_body) and resizes it to fit given - dimensions. Returns a file-like object in the form of a StringIO. - This must happen in this function because PIL is transforming the + dimensions. Returns a file-like object in the form of a StringIO. + This must happen in this function because PIL is transforming the original as it works.""" - + image_file = BytesIO(image_body) try: image = Image.open(image_file) except IOError: # Invalid image file return False - + # Get the image format early, as we lose it after perform a `thumbnail` or `fit`. format = image.format - + # Check for rotation image = cls.adjust_image_orientation(image) - + if not fit_to_size: image.thumbnail(PROFILE_PICTURE_SIZES[size], Image.ANTIALIAS) else: - image = PILOps.fit(image, PROFILE_PICTURE_SIZES[size], - method=Image.ANTIALIAS, - centering=(0.5, 0.5)) - + image = PILOps.fit( + image, PROFILE_PICTURE_SIZES[size], method=Image.ANTIALIAS, centering=(0.5, 0.5) + ) + output = BytesIO() - if format.lower() == 'jpg': - format = 'jpeg' + if format.lower() == "jpg": + format = "jpeg" image.save(output, format=format, quality=95) - + return output - + @classmethod def adjust_image_orientation(cls, image): """Since the iPhone will store an image on its side but with EXIF data stating that it should be rotated, we need to find that EXIF data and correctly rotate the image before storage.""" - - if hasattr(image, '_getexif'): + + if hasattr(image, "_getexif"): exif = image._getexif() if exif: for tag, value in list(exif.items()): decoded = TAGS.get(tag, tag) - if decoded == 'Orientation': + if decoded == "Orientation": if value == 6: image = image.rotate(-90) if value == 8: @@ -70,14 +68,15 @@ class ImageOps: image = image.rotate(180) break return image - + @classmethod def image_size(cls, url, headers=None): - if not headers: headers = {} + if not headers: + headers = {} req = urllib.request.Request(url, data=None, headers=headers) file = urllib.request.urlopen(req) size = file.headers.get("content-length") - if size: + if size: size = int(size) p = ImageFile.Parser() while True: diff --git a/utils/jennyholzer.py b/utils/jennyholzer.py index f03f8160d..ce845a6cf 100644 --- a/utils/jennyholzer.py +++ b/utils/jennyholzer.py @@ -3,7 +3,7 @@ # it is not because they are wrong, just that they may be considered # controversial. I'd rather err on the side of safety, which is contrary # to the trusim: "playing it safe can cause a lot of damage in the long run". -# +# # We'll see where this goes. This is an experiment. - Sam, July 6th, 2012 @@ -261,4 +261,4 @@ TRUISMS = [ "you should study as much as possible", # "your actions are pointless if no one notices", # "your oldest fears are the worst ones", -] \ No newline at end of file +] diff --git a/utils/json_fetcher.py b/utils/json_fetcher.py index 08a1befdb..d94371c32 100644 --- a/utils/json_fetcher.py +++ b/utils/json_fetcher.py @@ -5,57 +5,56 @@ from django.utils import feedgenerator from utils import log as logging from utils.json_functions import decode + class JSONFetcher: - def __init__(self, feed, options=None): self.feed = feed self.options = options or {} - + def fetch(self, address, raw_feed): if not address: address = self.feed.feed_address - + json_feed = decode(raw_feed.content) if not json_feed: - logging.debug(' ***> [%-30s] ~FRJSON fetch failed: %s' % - (self.feed.log_title[:30], address)) + logging.debug(" ***> [%-30s] ~FRJSON fetch failed: %s" % (self.feed.log_title[:30], address)) return data = {} - data['title'] = json_feed.get('title', '[Untitled]') - data['link'] = json_feed.get('home_page_url', "") - data['description'] = json_feed.get('title', "") - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur JSON Feed - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = json_feed.get('feed_url') - + data["title"] = json_feed.get("title", "[Untitled]") + data["link"] = json_feed.get("home_page_url", "") + data["description"] = json_feed.get("title", "") + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur JSON Feed - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = json_feed.get("feed_url") + rss = feedgenerator.Atom1Feed(**data) - for item in json_feed.get('items', []): + for item in json_feed.get("items", []): story_data = self.json_feed_story(item) rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def json_feed_story(self, item): date_published = datetime.datetime.now() - pubdate = item.get('date_published', None) + pubdate = item.get("date_published", None) if pubdate: date_published = dateutil.parser.parse(pubdate) - authors = item.get('authors', item.get('author', {})) + authors = item.get("authors", item.get("author", {})) if isinstance(authors, list): - author_name = ', '.join([author.get('name', "") for author in authors]) + author_name = ", ".join([author.get("name", "") for author in authors]) else: - author_name = authors.get('name', "") + author_name = authors.get("name", "") story = { - 'title': item.get('title', ""), - 'link': item.get('external_url', item.get('url', "")), - 'description': item.get('content_html', item.get('content_text', "")), - 'author_name': author_name, - 'categories': item.get('tags', []), - 'unique_id': str(item.get('id', item.get('url', ""))), - 'pubdate': date_published, + "title": item.get("title", ""), + "link": item.get("external_url", item.get("url", "")), + "description": item.get("content_html", item.get("content_text", "")), + "author_name": author_name, + "categories": item.get("tags", []), + "unique_id": str(item.get("id", item.get("url", ""))), + "pubdate": date_published, } - + return story diff --git a/utils/json_functions.py b/utils/json_functions.py index 1b04ed0ab..0cf5fa4e3 100644 --- a/utils/json_functions.py +++ b/utils/json_functions.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from django.db import models from django.utils.functional import Promise from django.utils.encoding import force_text, smart_str @@ -8,6 +8,7 @@ from django.core import serializers from django.conf import settings from django.http import HttpResponse, HttpResponseForbidden, Http404 from django.db.models.query import QuerySet + # from django.utils.deprecation import CallableBool from mongoengine.queryset.queryset import QuerySet as MongoQuerySet from bson.objectid import ObjectId @@ -42,7 +43,7 @@ def json_encode(data, *args, **kwargs): # Opps, we used to check if it is of type list, but that fails # i.e. in the case of django.newforms.utils.ErrorList, which extends # the type "list". Oh man, that was a dumb mistake! - if hasattr(data, 'canonical'): + if hasattr(data, "canonical"): ret = _any(data.canonical()) elif isinstance(data, list): ret = _list(data) @@ -66,7 +67,7 @@ def json_encode(data, *args, **kwargs): ret = _model(data) # here we need to encode the string as unicode (otherwise we get utf-16 in the json-response) elif isinstance(data, bytes): - ret = data.decode('utf-8', 'ignore') + ret = data.decode("utf-8", "ignore") elif isinstance(data, str): ret = smart_str(data) elif isinstance(data, Exception): @@ -76,7 +77,7 @@ def json_encode(data, *args, **kwargs): ret = force_text(data) elif isinstance(data, datetime.datetime) or isinstance(data, datetime.date): ret = str(data) - elif hasattr(data, 'to_json'): + elif hasattr(data, "to_json"): ret = data.to_json() else: ret = data @@ -106,7 +107,7 @@ def json_encode(data, *args, **kwargs): ret[str(k)] = _any(v) return ret - if hasattr(data, 'to_json'): + if hasattr(data, "to_json"): data = data.to_json() ret = _any(data) return json.dumps(ret) @@ -132,12 +133,12 @@ def json_response(request, response=None): try: if isinstance(response, dict): response = dict(response) - if 'result' not in response: - response['result'] = 'ok' + if "result" not in response: + response["result"] = "ok" authenticated = request.user.is_authenticated - response['authenticated'] = authenticated + response["authenticated"] = authenticated if authenticated: - response['user_id'] = request.user.pk + response["user_id"] = request.user.pk except KeyboardInterrupt: # Allow keyboard interrupts through for debugging. raise @@ -146,28 +147,28 @@ def json_response(request, response=None): except Exception as e: # Mail the admins with the error exc_info = sys.exc_info() - subject = 'JSON view error: %s' % request.path + subject = "JSON view error: %s" % request.path try: request_repr = repr(request) except: - request_repr = 'Request repr() unavailable' + request_repr = "Request repr() unavailable" import traceback - message = 'Traceback:\n%s\n\nRequest:\n%s' % ( - '\n'.join(traceback.format_exception(*exc_info)), - request_repr, - ) - response = {'result': 'error', - 'text': str(e)} + message = "Traceback:\n%s\n\nRequest:\n%s" % ( + "\n".join(traceback.format_exception(*exc_info)), + request_repr, + ) + + response = {"result": "error", "text": str(e)} code = 500 if not settings.DEBUG: logging.debug(f" ***> JSON exception {subject}: {message}") - logging.debug('\n'.join(traceback.format_exception(*exc_info))) + logging.debug("\n".join(traceback.format_exception(*exc_info))) else: - print('\n'.join(traceback.format_exception(*exc_info))) + print("\n".join(traceback.format_exception(*exc_info))) json = json_encode(response) - return HttpResponse(json, content_type='application/json; charset=utf-8', status=code) + return HttpResponse(json, content_type="application/json; charset=utf-8", status=code) def main(): @@ -182,5 +183,5 @@ def main(): print(test, json_test) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/utils/log.py b/utils/log.py index 762b1d1ac..de81fb008 100644 --- a/utils/log.py +++ b/utils/log.py @@ -16,7 +16,7 @@ class NullHandler(logging.Handler): # exists in python 3.1 def getlogger(): - logger = logging.getLogger('newsblur') + logger = logging.getLogger("newsblur") return logger @@ -25,7 +25,7 @@ def user(u, msg, request=None, warn_color=True): if not u: return debug(msg) - platform = '------' + platform = "------" time_elapsed = "" if isinstance(u, WSGIRequest) or request: if not request: @@ -33,24 +33,24 @@ def user(u, msg, request=None, warn_color=True): u = request.user platform = extract_user_agent(request) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time - color = '~FB' + color = "~FB" if warn_color: if seconds >= 5: - color = '~FR' + color = "~FR" elif seconds > 1: - color = '~SB~FK' + color = "~SB~FK" time_elapsed = "[%s%.4ss~SB] " % ( color, seconds, ) is_premium = u.is_authenticated and u.profile.is_premium - premium = '*' if is_premium else '' + premium = "*" if is_premium else "" if is_premium and u.profile.is_archive: premium = "^" username = cipher(str(u)) if settings.CIPHER_USERNAMES else str(u) - info(' ---> [~FB~SN%-6s~SB] %s[%s%s] %s' % (platform, time_elapsed, username, premium, msg)) + info(" ---> [~FB~SN%-6s~SB] %s[%s%s] %s" % (platform, time_elapsed, username, premium, msg)) def cipher(msg): @@ -82,91 +82,97 @@ def error(msg): def colorize(msg): params = { - r'\-\-\->' : '~FB~SB--->~FW', - r'\*\*\*>' : '~FB~SB~BB--->~BT~FW', - r'\[' : '~SB~FB[~SN~FM', - r'AnonymousUser' : '~FBAnonymousUser', - r'\*\]' : r'~SN~FR*~FB~SB]', - r'\^\]' : r'~SN~FR^~FB~SB]', - r'\]' : '~FB~SB]~FW~SN', + r"\-\-\->": "~FB~SB--->~FW", + r"\*\*\*>": "~FB~SB~BB--->~BT~FW", + r"\[": "~SB~FB[~SN~FM", + r"AnonymousUser": "~FBAnonymousUser", + r"\*\]": r"~SN~FR*~FB~SB]", + r"\^\]": r"~SN~FR^~FB~SB]", + r"\]": "~FB~SB]~FW~SN", } colors = { - '~SB' : Style.BRIGHT, - '~SN' : Style.NORMAL, - '~SK' : Style.BLINK, - '~SU' : Style.UNDERLINE, - '~ST' : Style.RESET_ALL, - '~FK': Fore.BLACK, - '~FR': Fore.RED, - '~FG': Fore.GREEN, - '~FY': Fore.YELLOW, - '~FB': Fore.BLUE, - '~FM': Fore.MAGENTA, - '~FC': Fore.CYAN, - '~FW': Fore.WHITE, - '~FT': Fore.RESET, - '~BK': Back.BLACK, - '~BR': Back.RED, - '~BG': Back.GREEN, - '~BY': Back.YELLOW, - '~BB': Back.BLUE, - '~BM': Back.MAGENTA, - '~BC': Back.CYAN, - '~BW': Back.WHITE, - '~BT': Back.RESET, + "~SB": Style.BRIGHT, + "~SN": Style.NORMAL, + "~SK": Style.BLINK, + "~SU": Style.UNDERLINE, + "~ST": Style.RESET_ALL, + "~FK": Fore.BLACK, + "~FR": Fore.RED, + "~FG": Fore.GREEN, + "~FY": Fore.YELLOW, + "~FB": Fore.BLUE, + "~FM": Fore.MAGENTA, + "~FC": Fore.CYAN, + "~FW": Fore.WHITE, + "~FT": Fore.RESET, + "~BK": Back.BLACK, + "~BR": Back.RED, + "~BG": Back.GREEN, + "~BY": Back.YELLOW, + "~BB": Back.BLUE, + "~BM": Back.MAGENTA, + "~BC": Back.CYAN, + "~BW": Back.WHITE, + "~BT": Back.RESET, } for k, v in list(params.items()): msg = re.sub(k, v, msg) - msg = msg + '~ST~FW~BT' + msg = msg + "~ST~FW~BT" # msg = re.sub(r'(~[A-Z]{2})', r'%(\1)s', msg) for k, v in list(colors.items()): msg = msg.replace(k, v) return msg - -''' + + +""" This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' +""" + +COLOR_ESC = "\033[" -COLOR_ESC = '\033[' class AnsiCodes(object): def __init__(self, codes): for name in dir(codes): - if not name.startswith('_'): + if not name.startswith("_"): value = getattr(codes, name) - setattr(self, name, COLOR_ESC + str(value) + 'm') + setattr(self, name, COLOR_ESC + str(value) + "m") + class AnsiFore: - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 + CYAN = 36 + WHITE = 37 + RESET = 39 + class AnsiBack: - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 + CYAN = 46 + WHITE = 47 + RESET = 49 + class AnsiStyle: - BRIGHT = 1 - DIM = 2 + BRIGHT = 1 + DIM = 2 UNDERLINE = 4 - BLINK = 5 - NORMAL = 22 + BLINK = 5 + NORMAL = 22 RESET_ALL = 0 + Fore = AnsiCodes(AnsiFore) Back = AnsiCodes(AnsiBack) Style = AnsiCodes(AnsiStyle) diff --git a/utils/management_functions.py b/utils/management_functions.py index 914f04e63..e49c231f4 100644 --- a/utils/management_functions.py +++ b/utils/management_functions.py @@ -1,17 +1,18 @@ import os import errno + def daemonize(): """ Detach from the terminal and continue as a daemon. """ # swiped from twisted/scripts/twistd.py # See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16 - if os.fork(): # launch child and... - os._exit(0) # kill off parent + if os.fork(): # launch child and... + os._exit(0) # kill off parent os.setsid() - if os.fork(): # launch child and... - os._exit(0) # kill off parent again. + if os.fork(): # launch child and... + os._exit(0) # kill off parent again. os.umask(0o77) null = os.open("/dev/null", os.O_RDWR) for i in range(3): @@ -20,4 +21,4 @@ def daemonize(): except OSError as e: if e.errno != errno.EBADF: raise - os.close(null) \ No newline at end of file + os.close(null) diff --git a/utils/mongo_command_monitor.py b/utils/mongo_command_monitor.py index 84b677a77..108fcfe99 100644 --- a/utils/mongo_command_monitor.py +++ b/utils/mongo_command_monitor.py @@ -3,8 +3,8 @@ import logging from django.conf import settings from django.db import connection -class MongoCommandLogger(monitoring.CommandListener): +class MongoCommandLogger(monitoring.CommandListener): def __init__(self): self.seen_request_ids = dict() @@ -24,13 +24,13 @@ class MongoCommandLogger(monitoring.CommandListener): op = event.command_name collection = command_dict[op] - command_filter = command_dict.get('filter', None) - command_documents = command_dict.get('documents', None) - command_indexes = command_dict.get('indexes', None) - command_insert = command_dict.get('updates', None) - command_update = command_dict.get('updates', None) - command_sort = command_dict.get('sort', None) - command_get_more = command_dict.get('getMore', None) + command_filter = command_dict.get("filter", None) + command_documents = command_dict.get("documents", None) + command_indexes = command_dict.get("indexes", None) + command_insert = command_dict.get("updates", None) + command_update = command_dict.get("updates", None) + command_sort = command_dict.get("sort", None) + command_get_more = command_dict.get("getMore", None) if command_sort: command_sort = dict(command_sort) @@ -55,19 +55,17 @@ class MongoCommandLogger(monitoring.CommandListener): if op == "insert" or op == "update": op = f"~SB{op}" - - message = { - "op": op, - "query": query, - "collection": collection - } - if not getattr(connection, 'queriesx', False): + message = {"op": op, "query": query, "collection": collection} + + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - 'mongo': message, - 'time': '%.6f' % (int(event.duration_micros) / 1000000), - }) + connection.queriesx.append( + { + "mongo": message, + "time": "%.6f" % (int(event.duration_micros) / 1000000), + } + ) # logging.info("Command {0.command_name} with request id " # "{0.request_id} on server {0.connection_id} " @@ -75,18 +73,21 @@ class MongoCommandLogger(monitoring.CommandListener): # "microseconds".format(event)) def failed(self, event): - logging.info("Command {0.command_name} with request id " - "{0.request_id} on server {0.connection_id} " - "failed in {0.duration_micros} " - "microseconds".format(event)) + logging.info( + "Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "failed in {0.duration_micros} " + "microseconds".format(event) + ) def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) - + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) + def process_celery(self, profiler): - if not self.activated(profiler): return + if not self.activated(profiler): + return connection.queriesx = [] diff --git a/utils/mongo_raw_log_middleware.py b/utils/mongo_raw_log_middleware.py index 780e5b39b..5d23e668d 100644 --- a/utils/mongo_raw_log_middleware.py +++ b/utils/mongo_raw_log_middleware.py @@ -10,41 +10,47 @@ import bson import pymongo from bson.errors import InvalidBSON -class MongoDumpMiddleware(object): +class MongoDumpMiddleware(object): def __init__(self, get_response=None): self.get_response = get_response def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) - + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) + def process_view(self, request, callback, callback_args, callback_kwargs): - if not self.activated(request): return + if not self.activated(request): + return self._used_msg_ids = [] - if not getattr(MongoClient, '_logging', False): + if not getattr(MongoClient, "_logging", False): # save old methods - setattr(MongoClient, '_logging', True) - if hasattr(MongoClient, '_send_message_with_response'): + setattr(MongoClient, "_logging", True) + if hasattr(MongoClient, "_send_message_with_response"): connection.queriesx = [] - MongoClient._send_message_with_response = \ - self._instrument(MongoClient._send_message_with_response) - MongoReplicaSetClient._send_message_with_response = \ - self._instrument(MongoReplicaSetClient._send_message_with_response) + MongoClient._send_message_with_response = self._instrument( + MongoClient._send_message_with_response + ) + MongoReplicaSetClient._send_message_with_response = self._instrument( + MongoReplicaSetClient._send_message_with_response + ) return None def process_celery(self, profiler): - if not self.activated(profiler): return + if not self.activated(profiler): + return self._used_msg_ids = [] - if not getattr(MongoClient, '_logging', False): + if not getattr(MongoClient, "_logging", False): # save old methods - setattr(MongoClient, '_logging', True) - if hasattr(MongoClient, '_send_message_with_response'): - MongoClient._send_message_with_response = \ - self._instrument(MongoClient._send_message_with_response) - MongoReplicaSetClient._send_message_with_response = \ - self._instrument(MongoReplicaSetClient._send_message_with_response) + setattr(MongoClient, "_logging", True) + if hasattr(MongoClient, "_send_message_with_response"): + MongoClient._send_message_with_response = self._instrument( + MongoClient._send_message_with_response + ) + MongoReplicaSetClient._send_message_with_response = self._instrument( + MongoReplicaSetClient._send_message_with_response + ) return None def process_response(self, request, response): @@ -56,20 +62,23 @@ class MongoDumpMiddleware(object): query = args[1].get_message(False, sock_info, False) message = _mongodb_decode_wire_protocol(query[1]) # message = _mongodb_decode_wire_protocol(args[1][1]) - if not message or message['msg_id'] in self._used_msg_ids: + if not message or message["msg_id"] in self._used_msg_ids: return original_method(*args, **kwargs) - self._used_msg_ids.append(message['msg_id']) + self._used_msg_ids.append(message["msg_id"]) start = time() result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - 'mongo': message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + "mongo": message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method def __call__(self, request): @@ -78,34 +87,40 @@ class MongoDumpMiddleware(object): return response + def _mongodb_decode_wire_protocol(message): - """ http://www.mongodb.org/display/DOCS/Mongo+Wire+Protocol """ + """http://www.mongodb.org/display/DOCS/Mongo+Wire+Protocol""" MONGO_OPS = { - 1000: 'msg', - 2001: 'update', - 2002: 'insert', - 2003: 'reserved', - 2004: 'query', - 2005: 'get_more', - 2006: 'delete', - 2007: 'kill_cursors', + 1000: "msg", + 2001: "update", + 2002: "insert", + 2003: "reserved", + 2004: "query", + 2005: "get_more", + 2006: "delete", + 2007: "kill_cursors", } - _, msg_id, _, opcode, _ = struct.unpack(' 90: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Disk Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s hit %s%% disk usage!" % (hostname, percent), - "text": "Usage on %s: %s" % (hostname, disk_usage_output)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Disk Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s hit %s%% disk usage!" % (hostname, percent), + "text": "Usage on %s: %s" % (hostname, disk_usage_output), + }, + ) print(" ---> Disk usage is NOT fine: %s / %s%% used" % (hostname, percent)) else: print(" ---> Disk usage is fine: %s / %s%% used" % (hostname, percent)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_newsletter_delivery.py b/utils/monitor_newsletter_delivery.py index 35f30a72f..c1ef3a57c 100755 --- a/utils/monitor_newsletter_delivery.py +++ b/utils/monitor_newsletter_delivery.py @@ -1,35 +1,44 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings import socket + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] - r = requests.get("https://api.mailgun.net/v3/newsletters.newsblur.com/stats/total", - auth=("api", settings.MAILGUN_ACCESS_KEY), - params={"event": ["accepted", "delivered", "failed"], - "duration": "2h"}) - stats = r.json()['stats'][0] - delivered = stats['delivered']['total'] - accepted = stats['delivered']['total'] - bounced = stats['failed']['permanent']['total'] + stats['failed']['temporary']['total'] + r = requests.get( + "https://api.mailgun.net/v3/newsletters.newsblur.com/stats/total", + auth=("api", settings.MAILGUN_ACCESS_KEY), + params={"event": ["accepted", "delivered", "failed"], "duration": "2h"}, + ) + stats = r.json()["stats"][0] + delivered = stats["delivered"]["total"] + accepted = stats["delivered"]["total"] + bounced = stats["failed"]["permanent"]["total"] + stats["failed"]["temporary"]["total"] if bounced / float(delivered) > 0.5: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Newsletter Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s newsletters bounced (2h): %s/%s accepted/delivered -> %s bounced" % (hostname, accepted, delivered, bounced), - "text": "Newsletters are not being delivered! %s delivered, %s bounced" % (delivered, bounced)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Newsletter Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s newsletters bounced (2h): %s/%s accepted/delivered -> %s bounced" + % (hostname, accepted, delivered, bounced), + "text": "Newsletters are not being delivered! %s delivered, %s bounced" + % (delivered, bounced), + }, + ) print(" ---> %s newsletters bounced: %s > %s > %s" % (hostname, accepted, delivered, bounced)) else: print(" ---> %s newsletters OK: %s > %s > %s" % (hostname, accepted, delivered, bounced)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_redis_bgsave.py b/utils/monitor_redis_bgsave.py index c70893f3a..c23769ba1 100755 --- a/utils/monitor_redis_bgsave.py +++ b/utils/monitor_redis_bgsave.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import os import datetime @@ -9,25 +10,30 @@ import requests from newsblur_web import settings import socket + def main(): redis_log_path = sys.argv[1] - t = os.popen('stat -c%Y /srv/newsblur/docker/volumes/redis/') - timestamp = t.read().split('\n')[0] + t = os.popen("stat -c%Y /srv/newsblur/docker/volumes/redis/") + timestamp = t.read().split("\n")[0] modified = datetime.datetime.fromtimestamp(int(timestamp)) hostname = socket.gethostname() modified_minutes = datetime.datetime.now() - modified log_tail = os.popen(f"tail -n 100 {redis_log_path}").read() if True: - #if modified < ten_min_ago: + # if modified < ten_min_ago: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Redis Monitor: %s " % (hostname, hostname), - "to": [settings.ADMINS[0][1]], - "subject": "%s hasn't bgsave'd redis in %s!" % (hostname, modified_minutes), - "text": "Last modified %s: %s ago\n\n----\n\n%s" % (hostname, modified_minutes, log_tail)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Redis Monitor: %s " % (hostname, hostname), + "to": [settings.ADMINS[0][1]], + "subject": "%s hasn't bgsave'd redis in %s!" % (hostname, modified_minutes), + "text": "Last modified %s: %s ago\n\n----\n\n%s" % (hostname, modified_minutes, log_tail), + }, + ) else: print(" ---> Redis bgsave fine: %s / %s ago" % (hostname, modified_minutes)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_task_fetches.py b/utils/monitor_task_fetches.py index 7e0447fd8..56e4d967e 100755 --- a/utils/monitor_task_fetches.py +++ b/utils/monitor_task_fetches.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings @@ -9,6 +10,7 @@ import socket import redis import pymongo + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] @@ -20,33 +22,41 @@ def main(): r = redis.Redis(connection_pool=settings.REDIS_ANALYTICS_POOL) try: - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.MONGO_DB['host']}/?authSource=admin") - feeds_fetched = client.newsblur.statistics.find_one({"key": "feeds_fetched"})['value'] + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.MONGO_DB['host']}/?authSource=admin" + ) + feeds_fetched = client.newsblur.statistics.find_one({"key": "feeds_fetched"})["value"] redis_task_fetches = int(r.get(monitor_key) or 0) except Exception as e: failed = e - + if feeds_fetched < 5000000 and not failed: if redis_task_fetches > 0 and feeds_fetched < (redis_task_fetches - FETCHES_DROP_AMOUNT): failed = True - # Ignore 0's below, as they simply imply low number, not falling + # Ignore 0's below, as they simply imply low number, not falling # elif redis_task_fetches <= 0: # failed = True if failed: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Task Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s feeds fetched falling: %s (from %s)" % (hostname, feeds_fetched, redis_task_fetches), - "text": "Feed fetches are falling: %s (from %s) %s" % (feeds_fetched, redis_task_fetches, failed)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Task Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s feeds fetched falling: %s (from %s)" + % (hostname, feeds_fetched, redis_task_fetches), + "text": "Feed fetches are falling: %s (from %s) %s" + % (feeds_fetched, redis_task_fetches, failed), + }, + ) r.set(monitor_key, feeds_fetched) - r.expire(monitor_key, 60*60*12) # 3 hours + r.expire(monitor_key, 60 * 60 * 12) # 3 hours print(" ---> Feeds fetched falling! %s %s" % (feeds_fetched, failed)) else: print(" ---> Feeds fetched OK: %s" % (feeds_fetched)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_work_queue.py b/utils/monitor_work_queue.py index 1c4ba2238..df95cb1f2 100755 --- a/utils/monitor_work_queue.py +++ b/utils/monitor_work_queue.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings @@ -9,6 +10,7 @@ import socket import redis import pymongo + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] @@ -25,25 +27,30 @@ def main(): redis_work_queue = int(r_monitor.get(monitor_key) or 0) except Exception as e: failed = e - + if work_queue_size > 300 and work_queue_size > (redis_work_queue + QUEUE_DROP_AMOUNT): failed = True if failed: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Queue Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s work queue rising: %s (from %s)" % (hostname, work_queue_size, redis_work_queue), - "text": "Work queue is rising: %s (from %s) %s" % (work_queue_size, redis_work_queue, failed)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Queue Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s work queue rising: %s (from %s)" + % (hostname, work_queue_size, redis_work_queue), + "text": "Work queue is rising: %s (from %s) %s" % (work_queue_size, redis_work_queue, failed), + }, + ) r_monitor.set(monitor_key, work_queue_size) - r_monitor.expire(monitor_key, 60*60*3) # 3 hours + r_monitor.expire(monitor_key, 60 * 60 * 3) # 3 hours print(" ---> Work queue rising! %s %s" % (work_queue_size, failed)) else: print(" ---> Work queue OK: %s" % (work_queue_size)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/munin/base.py b/utils/munin/base.py index 203e24b31..1f663c705 100644 --- a/utils/munin/base.py +++ b/utils/munin/base.py @@ -1,22 +1,21 @@ import sys -class MuninGraph(object): +class MuninGraph(object): def run(self): cmd_name = None if len(sys.argv) > 1: cmd_name = sys.argv[1] - if cmd_name == 'config': + if cmd_name == "config": self.print_config() - else: + else: metrics = self.calculate_metrics() self.print_metrics(metrics) - + def print_config(self): - for key,value in self.graph_config.items(): - print('%s %s' % (key, value)) + for key, value in self.graph_config.items(): + print("%s %s" % (key, value)) def print_metrics(self, metrics): for key, value in metrics.items(): - print('%s.value %s' % (key, value)) - \ No newline at end of file + print("%s.value %s" % (key, value)) diff --git a/utils/munin/newsblur_app_servers.py b/utils/munin/newsblur_app_servers.py index 96be3dd79..d4b1766ea 100755 --- a/utils/munin/newsblur_app_servers.py +++ b/utils/munin/newsblur_app_servers.py @@ -2,70 +2,80 @@ from utils.munin.base import MuninGraph import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings class NBMuninGraph(MuninGraph): - @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur App Server Page Loads', - 'graph_vlabel' : '# of page loads / server', - 'graph_args' : '-l 0', - 'total.label' : 'total', - 'total.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur App Server Page Loads", + "graph_vlabel": "# of page loads / server", + "graph_args": "-l 0", + "total.label": "total", + "total.draw": "LINE1", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), "AREASTACK") for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "AREASTACK") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - servers['total'] = self.total[0]['feeds'] + servers = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + servers["total"] = self.total[0]["feeds"] return servers - + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - - return list(stats) - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, + }, + ] + ) + + return list(stats) + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_app_times.py b/utils/munin/newsblur_app_times.py index 228c9963a..868014e5c 100755 --- a/utils/munin/newsblur_app_times.py +++ b/utils/munin/newsblur_app_times.py @@ -1,51 +1,57 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur App Server Times', - 'graph_vlabel' : 'Page load time / server', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur App Server Times", + "graph_vlabel": "Page load time / server", + "graph_args": "-l 0", } stats = self.stats - graph['graph_order'] = ' '.join(sorted(s['_id'] for s in stats)) - graph.update(dict((("%s.label" % s['_id'], s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'], 'LINE1') for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"] for s in stats)) + graph.update(dict((("%s.label" % s["_id"], s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"], "LINE1") for s in stats))) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'], s['page_load']) for s in self.stats)) + servers = dict((("%s" % s["_id"], s["page_load"]) for s in self.stats)) return servers - + @property def stats(self): import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id" : "$server", - "page_load" : {"$avg": "$page_load"}, - }, - }]) - - return list(stats) - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$server", + "page_load": {"$avg": "$page_load"}, + }, + }, + ] + ) + + return list(stats) + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_classifiers.py b/utils/munin/newsblur_classifiers.py index e06515eb7..1efbd6e9a 100755 --- a/utils/munin/newsblur_classifiers.py +++ b/utils/munin/newsblur_classifiers.py @@ -2,34 +2,37 @@ from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Classifiers', - 'graph_vlabel' : '# of classifiers', - 'graph_args' : '-l 0', - 'feeds.label': 'feeds', - 'authors.label': 'authors', - 'tags.label': 'tags', - 'titles.label': 'titles', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Classifiers", + "graph_vlabel": "# of classifiers", + "graph_args": "-l 0", + "feeds.label": "feeds", + "authors.label": "authors", + "tags.label": "tags", + "titles.label": "titles", } def calculate_metrics(self): from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle return { - 'feeds': MClassifierFeed.objects.count(), - 'authors': MClassifierAuthor.objects.count(), - 'tags': MClassifierTag.objects.count(), - 'titles': MClassifierTitle.objects.count(), + "feeds": MClassifierFeed.objects.count(), + "authors": MClassifierAuthor.objects.count(), + "tags": MClassifierTag.objects.count(), + "titles": MClassifierTitle.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_dbtimes.py b/utils/munin/newsblur_dbtimes.py index 46a7668d2..e50be6597 100755 --- a/utils/munin/newsblur_dbtimes.py +++ b/utils/munin/newsblur_dbtimes.py @@ -1,44 +1,47 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur DB Times', - 'graph_vlabel' : 'Database times (seconds)', - 'graph_args' : '-l 0', - 'sql_avg.label' : 'SQL avg times (5m)', - 'sql_avg.draw' : 'LINE1', - 'mongo_avg.label' : 'Mongo avg times (5m)', - 'mongo_avg.draw' : 'LINE1', - 'redis_avg.label' :'Redis avg times (5m)', - 'redis_avg.draw' : 'LINE1', - 'task_sql_avg.label' : 'Task SQL avg times (5m)', - 'task_sql_avg.draw' : 'LINE1', - 'task_mongo_avg.label' : 'Task Mongo avg times (5m)', - 'task_mongo_avg.draw' : 'LINE1', - 'task_redis_avg.label' :'Task Redis avg times (5m)', - 'task_redis_avg.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur DB Times", + "graph_vlabel": "Database times (seconds)", + "graph_args": "-l 0", + "sql_avg.label": "SQL avg times (5m)", + "sql_avg.draw": "LINE1", + "mongo_avg.label": "Mongo avg times (5m)", + "mongo_avg.draw": "LINE1", + "redis_avg.label": "Redis avg times (5m)", + "redis_avg.draw": "LINE1", + "task_sql_avg.label": "Task SQL avg times (5m)", + "task_sql_avg.draw": "LINE1", + "task_mongo_avg.label": "Task Mongo avg times (5m)", + "task_mongo_avg.draw": "LINE1", + "task_redis_avg.label": "Task Redis avg times (5m)", + "task_redis_avg.draw": "LINE1", } def calculate_metrics(self): from apps.statistics.models import MStatistics - + return { - 'sql_avg': MStatistics.get('latest_sql_avg'), - 'mongo_avg': MStatistics.get('latest_mongo_avg'), - 'redis_avg': MStatistics.get('latest_redis_avg'), - 'task_sql_avg': MStatistics.get('latest_task_sql_avg'), - 'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), - 'task_redis_avg': MStatistics.get('latest_task_redis_avg'), + "sql_avg": MStatistics.get("latest_sql_avg"), + "mongo_avg": MStatistics.get("latest_mongo_avg"), + "redis_avg": MStatistics.get("latest_redis_avg"), + "task_sql_avg": MStatistics.get("latest_task_sql_avg"), + "task_mongo_avg": MStatistics.get("latest_task_mongo_avg"), + "task_redis_avg": MStatistics.get("latest_task_redis_avg"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_errors.py b/utils/munin/newsblur_errors.py index 0e1f0d83d..ec3c967e3 100755 --- a/utils/munin/newsblur_errors.py +++ b/utils/munin/newsblur_errors.py @@ -2,32 +2,36 @@ from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Fetching History', - 'graph_vlabel' : 'errors', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Fetching History", + "graph_vlabel": "errors", + "graph_args": "-l 0", # 'feed_errors.label': 'Feed Errors', - 'feed_success.label': 'Feed Success', + "feed_success.label": "Feed Success", # 'page_errors.label': 'Page Errors', # 'page_success.label': 'Page Success', } def calculate_metrics(self): from apps.statistics.models import MStatistics + statistics = MStatistics.all() - + return { - 'feed_success': statistics['feeds_fetched'], + "feed_success": statistics["feeds_fetched"], } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_feed_counts.py b/utils/munin/newsblur_feed_counts.py index 1ed4ff518..97d929802 100755 --- a/utils/munin/newsblur_feed_counts.py +++ b/utils/munin/newsblur_feed_counts.py @@ -2,25 +2,27 @@ from utils.munin.base import MuninGraph import redis import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Feed Counts', - 'graph_vlabel' : 'Feeds Feed Counts', - 'graph_args' : '-l 0', - 'scheduled_feeds.label': 'scheduled_feeds', - 'exception_feeds.label': 'exception_feeds', - 'exception_pages.label': 'exception_pages', - 'duplicate_feeds.label': 'duplicate_feeds', - 'active_feeds.label': 'active_feeds', - 'push_feeds.label': 'push_feeds', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Feed Counts", + "graph_vlabel": "Feeds Feed Counts", + "graph_args": "-l 0", + "scheduled_feeds.label": "scheduled_feeds", + "exception_feeds.label": "exception_feeds", + "exception_pages.label": "exception_pages", + "duplicate_feeds.label": "duplicate_feeds", + "active_feeds.label": "active_feeds", + "push_feeds.label": "push_feeds", } def calculate_metrics(self): @@ -28,42 +30,43 @@ class NBMuninGraph(MuninGraph): from apps.push.models import PushSubscription from django.conf import settings from apps.statistics.models import MStatistics - - exception_feeds = MStatistics.get('munin:exception_feeds') + + exception_feeds = MStatistics.get("munin:exception_feeds") if not exception_feeds: exception_feeds = Feed.objects.filter(has_feed_exception=True).count() - MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12) + MStatistics.set("munin:exception_feeds", exception_feeds, 60 * 60 * 12) - exception_pages = MStatistics.get('munin:exception_pages') + exception_pages = MStatistics.get("munin:exception_pages") if not exception_pages: exception_pages = Feed.objects.filter(has_page_exception=True).count() - MStatistics.set('munin:exception_pages', exception_pages, 60*60*12) + MStatistics.set("munin:exception_pages", exception_pages, 60 * 60 * 12) - duplicate_feeds = MStatistics.get('munin:duplicate_feeds') + duplicate_feeds = MStatistics.get("munin:duplicate_feeds") if not duplicate_feeds: duplicate_feeds = DuplicateFeed.objects.count() - MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12) + MStatistics.set("munin:duplicate_feeds", duplicate_feeds, 60 * 60 * 12) - active_feeds = MStatistics.get('munin:active_feeds') + active_feeds = MStatistics.get("munin:active_feeds") if not active_feeds: active_feeds = Feed.objects.filter(active_subscribers__gt=0).count() - MStatistics.set('munin:active_feeds', active_feeds, 60*60*12) + MStatistics.set("munin:active_feeds", active_feeds, 60 * 60 * 12) - push_feeds = MStatistics.get('munin:push_feeds') + push_feeds = MStatistics.get("munin:push_feeds") if not push_feeds: push_feeds = PushSubscription.objects.filter(verified=True).count() - MStatistics.set('munin:push_feeds', push_feeds, 60*60*12) + MStatistics.set("munin:push_feeds", push_feeds, 60 * 60 * 12) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + return { - 'scheduled_feeds': r.zcard('scheduled_updates'), - 'exception_feeds': exception_feeds, - 'exception_pages': exception_pages, - 'duplicate_feeds': duplicate_feeds, - 'active_feeds': active_feeds, - 'push_feeds': push_feeds, + "scheduled_feeds": r.zcard("scheduled_updates"), + "exception_feeds": exception_feeds, + "exception_pages": exception_pages, + "duplicate_feeds": duplicate_feeds, + "active_feeds": active_feeds, + "push_feeds": push_feeds, } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_feeds.py b/utils/munin/newsblur_feeds.py index 61857dbc8..c02918e6f 100755 --- a/utils/munin/newsblur_feeds.py +++ b/utils/munin/newsblur_feeds.py @@ -1,23 +1,25 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Feeds & Subscriptions', - 'graph_vlabel' : 'Feeds & Subscribers', - 'graph_args' : '-l 0', - 'feeds.label': 'feeds', - 'subscriptions.label': 'subscriptions', - 'profiles.label': 'profiles', - 'social_subscriptions.label': 'social_subscriptions', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Feeds & Subscriptions", + "graph_vlabel": "Feeds & Subscribers", + "graph_args": "-l 0", + "feeds.label": "feeds", + "subscriptions.label": "subscriptions", + "profiles.label": "profiles", + "social_subscriptions.label": "social_subscriptions", } def calculate_metrics(self): @@ -26,22 +28,23 @@ class NBMuninGraph(MuninGraph): from apps.social.models import MSocialProfile, MSocialSubscription from apps.statistics.models import MStatistics - feeds_count = MStatistics.get('munin:feeds_count') + feeds_count = MStatistics.get("munin:feeds_count") if not feeds_count: feeds_count = Feed.objects.all().count() - MStatistics.set('munin:feeds_count', feeds_count, 60*60*12) + MStatistics.set("munin:feeds_count", feeds_count, 60 * 60 * 12) - subscriptions_count = MStatistics.get('munin:subscriptions_count') + subscriptions_count = MStatistics.get("munin:subscriptions_count") if not subscriptions_count: subscriptions_count = UserSubscription.objects.all().count() - MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12) + MStatistics.set("munin:subscriptions_count", subscriptions_count, 60 * 60 * 12) return { - 'feeds': feeds_count, - 'subscriptions': subscriptions_count, - 'profiles': MSocialProfile.objects.count(), - 'social_subscriptions': MSocialSubscription.objects.count(), + "feeds": feeds_count, + "subscriptions": subscriptions_count, + "profiles": MSocialProfile.objects.count(), + "social_subscriptions": MSocialSubscription.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_loadtimes.py b/utils/munin/newsblur_loadtimes.py index 05a76eb32..5d89be785 100755 --- a/utils/munin/newsblur_loadtimes.py +++ b/utils/munin/newsblur_loadtimes.py @@ -1,30 +1,33 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Loadtimes', - 'graph_vlabel' : 'Loadtimes (seconds)', - 'graph_args' : '-l 0', - 'feed_loadtimes_avg_hour.label': 'Feed Loadtimes Avg (Hour)', - 'feeds_loaded_hour.label': 'Feeds Loaded (Hour)', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Loadtimes", + "graph_vlabel": "Loadtimes (seconds)", + "graph_args": "-l 0", + "feed_loadtimes_avg_hour.label": "Feed Loadtimes Avg (Hour)", + "feeds_loaded_hour.label": "Feeds Loaded (Hour)", } def calculate_metrics(self): from apps.statistics.models import MStatistics - + return { - 'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'), - 'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'), + "feed_loadtimes_avg_hour": MStatistics.get("latest_avg_time_taken"), + "feeds_loaded_hour": MStatistics.get("latest_sites_loaded"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_stories.py b/utils/munin/newsblur_stories.py index 9b71664a0..d94c35d2c 100755 --- a/utils/munin/newsblur_stories.py +++ b/utils/munin/newsblur_stories.py @@ -1,31 +1,33 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() class NBMuninGraph(MuninGraph): - @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Stories', - 'graph_vlabel' : 'Stories', - 'graph_args' : '-l 0', - 'stories.label': 'Stories', - 'starred_stories.label': 'Starred stories', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Stories", + "graph_vlabel": "Stories", + "graph_args": "-l 0", + "stories.label": "Stories", + "starred_stories.label": "Starred stories", } def calculate_metrics(self): from apps.rss_feeds.models import MStory, MStarredStory return { - 'stories': MStory.objects.count(), - 'starred_stories': MStarredStory.objects.count(), + "stories": MStory.objects.count(), + "starred_stories": MStarredStory.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_codes.py b/utils/munin/newsblur_tasks_codes.py index 94106899d..e613704aa 100755 --- a/utils/munin/newsblur_tasks_codes.py +++ b/utils/munin/newsblur_tasks_codes.py @@ -1,49 +1,55 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Codes', - 'graph_vlabel' : 'Status codes on feed fetch', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Codes", + "graph_vlabel": "Status codes on feed fetch", + "graph_args": "-l 0", } stats = self.stats - graph.update(dict((("_%s.label" % s['_id'], s['_id']) for s in stats))) - graph['graph_order'] = ' '.join(sorted(("_%s" % s['_id']) for s in stats)) + graph.update(dict((("_%s.label" % s["_id"], s["_id"]) for s in stats))) + graph["graph_order"] = " ".join(sorted(("_%s" % s["_id"]) for s in stats)) return graph def calculate_metrics(self): - servers = dict((("_%s" % s['_id'], s['feeds']) for s in self.stats)) - + servers = dict((("_%s" % s["_id"], s["feeds"]) for s in self.stats)) + return servers - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id" : "$feed_code", - "feeds" : {"$sum": 1}, - }, - }]) - - return list(stats) - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$feed_code", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + + return list(stats) + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_pipeline.py b/utils/munin/newsblur_tasks_pipeline.py index 430918f73..f857452d7 100755 --- a/utils/munin/newsblur_tasks_pipeline.py +++ b/utils/munin/newsblur_tasks_pipeline.py @@ -1,54 +1,61 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Pipeline', - 'graph_vlabel' : 'Feed fetch pipeline times', - 'graph_args' : '-l 0', - 'feed_fetch.label': 'feed_fetch', - 'feed_process.label': 'feed_process', - 'page.label': 'page', - 'icon.label': 'icon', - 'total.label': 'total', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Pipeline", + "graph_vlabel": "Feed fetch pipeline times", + "graph_args": "-l 0", + "feed_fetch.label": "feed_fetch", + "feed_process.label": "feed_process", + "page.label": "page", + "icon.label": "icon", + "total.label": "total", } return graph def calculate_metrics(self): return self.stats - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id": 1, - "feed_fetch": {"$avg": "$feed_fetch"}, - "feed_process": {"$avg": "$feed_process"}, - "page": {"$avg": "$page"}, - "icon": {"$avg": "$icon"}, - "total": {"$avg": "$total"}, - }, - }]) - - return list(stats)[0] - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feed_fetch": {"$avg": "$feed_fetch"}, + "feed_process": {"$avg": "$feed_process"}, + "page": {"$avg": "$page"}, + "icon": {"$avg": "$icon"}, + "total": {"$avg": "$total"}, + }, + }, + ] + ) + + return list(stats)[0] + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_servers.py b/utils/munin/newsblur_tasks_servers.py index 3b1286a02..e76b920dc 100755 --- a/utils/munin/newsblur_tasks_servers.py +++ b/utils/munin/newsblur_tasks_servers.py @@ -2,70 +2,80 @@ from utils.munin.base import MuninGraph import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings class NBMuninGraph(MuninGraph): - @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Server Fetches', - 'graph_vlabel' : '# of fetches / server', - 'graph_args' : '-l 0', - 'total.label' : 'total', - 'total.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Server Fetches", + "graph_vlabel": "# of fetches / server", + "graph_args": "-l 0", + "total.label": "total", + "total.draw": "LINE1", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), "AREASTACK") for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "AREASTACK") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - servers['total'] = self.total[0]['feeds'] + servers = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + servers["total"] = self.total[0]["feeds"] return servers - + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - - return list(stats) - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, + }, + ] + ) + + return list(stats) + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_times.py b/utils/munin/newsblur_tasks_times.py index 4a650a467..e83d29ff8 100755 --- a/utils/munin/newsblur_tasks_times.py +++ b/utils/munin/newsblur_tasks_times.py @@ -1,53 +1,60 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Server Times', - 'graph_vlabel' : 'Feed fetch time / server', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Server Times", + "graph_vlabel": "Feed fetch time / server", + "graph_args": "-l 0", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), 'LINE1') for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "LINE1") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['total']) for s in self.stats)) + servers = dict((("%s" % s["_id"].replace("-", ""), s["total"]) for s in self.stats)) return servers - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), - }, - }, - }, { - "$group": { - "_id" : "$server", - "total" : {"$avg": "$total"}, - }, - }]) - - return list(stats) - -if __name__ == '__main__': + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$server", + "total": {"$avg": "$total"}, + }, + }, + ] + ) + + return list(stats) + + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_updates.py b/utils/munin/newsblur_updates.py index 31e0b86e9..4be495c4f 100755 --- a/utils/munin/newsblur_updates.py +++ b/utils/munin/newsblur_updates.py @@ -2,47 +2,49 @@ import redis from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Updates', - 'graph_vlabel' : '# of updates', - 'graph_args' : '-l 0', - 'update_queue.label': 'Queued Feeds', - 'feeds_fetched.label': 'Fetched feeds last hour', - 'tasked_feeds.label': 'Tasked Feeds', - 'error_feeds.label': 'Error Feeds', - 'celery_update_feeds.label': 'Celery - Update Feeds', - 'celery_new_feeds.label': 'Celery - New Feeds', - 'celery_push_feeds.label': 'Celery - Push Feeds', - 'celery_work_queue.label': 'Celery - Work Queue', - 'celery_search_queue.label': 'Celery - Search Queue', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Updates", + "graph_vlabel": "# of updates", + "graph_args": "-l 0", + "update_queue.label": "Queued Feeds", + "feeds_fetched.label": "Fetched feeds last hour", + "tasked_feeds.label": "Tasked Feeds", + "error_feeds.label": "Error Feeds", + "celery_update_feeds.label": "Celery - Update Feeds", + "celery_new_feeds.label": "Celery - New Feeds", + "celery_push_feeds.label": "Celery - Push Feeds", + "celery_work_queue.label": "Celery - Work Queue", + "celery_search_queue.label": "Celery - Search Queue", } - def calculate_metrics(self): from django.conf import settings - + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) return { - 'update_queue': r.scard("queued_feeds"), - 'feeds_fetched': r.zcard("fetched_feeds_last_hour"), - 'tasked_feeds': r.zcard("tasked_feeds"), - 'error_feeds': r.zcard("error_feeds"), - 'celery_update_feeds': r.llen("update_feeds"), - 'celery_new_feeds': r.llen("new_feeds"), - 'celery_push_feeds': r.llen("push_feeds"), - 'celery_work_queue': r.llen("work_queue"), - 'celery_search_queue': r.llen("search_indexer"), + "update_queue": r.scard("queued_feeds"), + "feeds_fetched": r.zcard("fetched_feeds_last_hour"), + "tasked_feeds": r.zcard("tasked_feeds"), + "error_feeds": r.zcard("error_feeds"), + "celery_update_feeds": r.llen("update_feeds"), + "celery_new_feeds": r.llen("new_feeds"), + "celery_push_feeds": r.llen("push_feeds"), + "celery_work_queue": r.llen("work_queue"), + "celery_search_queue": r.llen("search_indexer"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_users.py b/utils/munin/newsblur_users.py index a2083ea95..5725dcfcc 100755 --- a/utils/munin/newsblur_users.py +++ b/utils/munin/newsblur_users.py @@ -1,24 +1,26 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Users', - 'graph_vlabel' : 'users', - 'graph_args' : '-l 0', - 'all.label': 'all', - 'monthly.label': 'monthly', - 'daily.label': 'daily', - 'premium.label': 'premium', - 'queued.label': 'queued', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Users", + "graph_vlabel": "users", + "graph_args": "-l 0", + "all.label": "all", + "monthly.label": "monthly", + "daily.label": "daily", + "premium.label": "premium", + "queued.label": "queued", } def calculate_metrics(self): @@ -27,15 +29,16 @@ class NBMuninGraph(MuninGraph): from apps.profile.models import Profile, RNewUserQueue last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30) - last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24) + last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 * 24) return { - 'all': User.objects.count(), - 'monthly': Profile.objects.filter(last_seen_on__gte=last_month).count(), - 'daily': Profile.objects.filter(last_seen_on__gte=last_day).count(), - 'premium': Profile.objects.filter(is_premium=True).count(), - 'queued': RNewUserQueue.user_count(), + "all": User.objects.count(), + "monthly": Profile.objects.filter(last_seen_on__gte=last_month).count(), + "daily": Profile.objects.filter(last_seen_on__gte=last_day).count(), + "premium": Profile.objects.filter(is_premium=True).count(), + "queued": RNewUserQueue.user_count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/pipeline_utils.py b/utils/pipeline_utils.py index 51cbf85ff..a73f5c4bf 100644 --- a/utils/pipeline_utils.py +++ b/utils/pipeline_utils.py @@ -5,22 +5,25 @@ from pipeline.finders import AppDirectoriesFinder as PipelineAppDirectoriesFinde from pipeline.storage import GZIPMixin from pipeline.storage import PipelineManifestStorage + class PipelineStorage(PipelineManifestStorage): def url(self, *args, **kwargs): if settings.DEBUG_ASSETS: # print(f"Pre-Pipeline storage: {args} {kwargs}") - kwargs['name'] = re.sub(r'\.[a-f0-9]{12}\.(css|js)$', r'.\1', args[0]) + kwargs["name"] = re.sub(r"\.[a-f0-9]{12}\.(css|js)$", r".\1", args[0]) args = args[1:] url = super().url(*args, **kwargs) if settings.DEBUG_ASSETS: url = url.replace(settings.STATIC_URL, settings.MEDIA_URL) - url = re.sub(r'\.[a-f0-9]{12}\.(css|js)$', r'.\1', url) + url = re.sub(r"\.[a-f0-9]{12}\.(css|js)$", r".\1", url) # print(f"Pipeline storage: {args} {kwargs} {url}") return url + class GzipPipelineStorage(GZIPMixin, PipelineManifestStorage): pass + class AppDirectoriesFinder(PipelineAppDirectoriesFinder): """ Like AppDirectoriesFinder, but doesn't return any additional ignored patterns @@ -28,36 +31,38 @@ class AppDirectoriesFinder(PipelineAppDirectoriesFinder): This allows us to concentrate/compress our components without dragging the raw versions in too. """ + ignore_patterns = [ # '*.js', # '*.css', - '*.less', - '*.scss', - '*.styl', - '*.sh', - '*.html', - '*.ttf', - '*.md', - '*.markdown', - '*.php', - '*.txt', + "*.less", + "*.scss", + "*.styl", + "*.sh", + "*.html", + "*.ttf", + "*.md", + "*.markdown", + "*.php", + "*.txt", # '*.gif', # due to django_extensions/css/jquery.autocomplete.css: django_extensions/img/indicator.gif - '*.png', - '*.jpg', + "*.png", + "*.jpg", # '*.svg', # due to admin/css/base.css: admin/img/sorting-icons.svg - '*.ico', - '*.icns', - '*.psd', - '*.ai', - '*.sketch', - '*.emf', - '*.eps', - '*.pdf', - '*.xml', - '*LICENSE*', - '*README*', + "*.ico", + "*.icns", + "*.psd", + "*.ai", + "*.sketch", + "*.emf", + "*.eps", + "*.pdf", + "*.xml", + "*LICENSE*", + "*README*", ] - + + class FileSystemFinder(PipelineFileSystemFinder): """ Like FileSystemFinder, but doesn't return any additional ignored patterns @@ -65,48 +70,48 @@ class FileSystemFinder(PipelineFileSystemFinder): This allows us to concentrate/compress our components without dragging the raw versions in too. """ + ignore_patterns = [ # '*.js', # '*.css', # '*.less', # '*.scss', # '*.styl', - '*.sh', - '*.html', - '*.ttf', - '*.md', - '*.markdown', - '*.php', - '*.txt', - '*.gif', - '*.png', - '*.jpg', - '*media/**/*.svg', - '*.ico', - '*.icns', - '*.psd', - '*.ai', - '*.sketch', - '*.emf', - '*.eps', - '*.pdf', - '*.xml', - '*embed*', - 'blog*', + "*.sh", + "*.html", + "*.ttf", + "*.md", + "*.markdown", + "*.php", + "*.txt", + "*.gif", + "*.png", + "*.jpg", + "*media/**/*.svg", + "*.ico", + "*.icns", + "*.psd", + "*.ai", + "*.sketch", + "*.emf", + "*.eps", + "*.pdf", + "*.xml", + "*embed*", + "blog*", # # '*bookmarklet*', # # '*circular*', # # '*embed*', - '*css/mobile*', - '*extensions*', - 'fonts/*/*.css', - '*flash*', + "*css/mobile*", + "*extensions*", + "fonts/*/*.css", + "*flash*", # '*jquery-ui*', # 'mobile*', - '*safari*', + "*safari*", # # '*social*', # # '*vendor*', # 'Makefile*', # 'Gemfile*', - 'node_modules', + "node_modules", ] - \ No newline at end of file diff --git a/utils/ratelimit.py b/utils/ratelimit.py index 04e0aeaf9..9e8293324 100644 --- a/utils/ratelimit.py +++ b/utils/ratelimit.py @@ -8,39 +8,40 @@ import hashlib class ratelimit(object): "Instances of this class can be used as decorators" # This class is designed to be sub-classed - minutes = 1 # The time period - requests = 4 # Number of allowed requests in that time period - - prefix = 'rl-' # Prefix for memcache key - + minutes = 1 # The time period + requests = 4 # Number of allowed requests in that time period + + prefix = "rl-" # Prefix for memcache key + def __init__(self, **options): for key, value in options.items(): setattr(self, key, value) - + def __call__(self, fn): def wrapper(request, *args, **kwargs): return self.view_wrapper(request, fn, *args, **kwargs) + functools.update_wrapper(wrapper, fn) return wrapper - + def view_wrapper(self, request, fn, *args, **kwargs): if not self.should_ratelimit(request): return fn(request, *args, **kwargs) - + counts = list(self.get_counters(request).values()) - + # Increment rate limiting counter self.cache_incr(self.current_key(request)) - + # Have they failed? if sum(counts) >= self.requests: return self.disallowed(request) - + return fn(request, *args, **kwargs) - + def cache_get_many(self, keys): return cache.get_many(keys) - + def cache_incr(self, key): # memcache is only backend that can increment atomically try: @@ -49,59 +50,53 @@ class ratelimit(object): cache.incr(key) except (AttributeError, ValueError): cache.set(key, cache.get(key, 0) + 1, self.expire_after()) - + def should_ratelimit(self, request): return True - + def get_counters(self, request): return self.cache_get_many(self.keys_to_check(request)) - + def keys_to_check(self, request): extra = self.key_extra(request) now = datetime.now() return [ - '%s%s-%s' % ( - self.prefix, - extra, - (now - timedelta(minutes = minute)).strftime('%Y%m%d%H%M') - ) for minute in range(self.minutes + 1) + "%s%s-%s" % (self.prefix, extra, (now - timedelta(minutes=minute)).strftime("%Y%m%d%H%M")) + for minute in range(self.minutes + 1) ] - + def current_key(self, request): - return '%s%s-%s' % ( - self.prefix, - self.key_extra(request), - datetime.now().strftime('%Y%m%d%H%M') - ) - + return "%s%s-%s" % (self.prefix, self.key_extra(request), datetime.now().strftime("%Y%m%d%H%M")) + def key_extra(self, request): - key = getattr(request.session, 'session_key', '') + key = getattr(request.session, "session_key", "") if not key: - key = request.META.get('HTTP_X_FORWARDED_FOR', '').split(',')[0] + key = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0] if not key: - key = request.COOKIES.get('newsblur_sessionid', '') + key = request.COOKIES.get("newsblur_sessionid", "") if not key: - key = request.META.get('HTTP_USER_AGENT', '') + key = request.META.get("HTTP_USER_AGENT", "") return key - + def disallowed(self, request): - return HttpResponse('Rate limit exceeded', status=429) - + return HttpResponse("Rate limit exceeded", status=429) + def expire_after(self): "Used for setting the memcached cache expiry" return (self.minutes + 1) * 60 + class ratelimit_post(ratelimit): "Rate limit POSTs - can be used to protect a login form" - key_field = None # If provided, this POST var will affect the rate limit - + key_field = None # If provided, this POST var will affect the rate limit + def should_ratelimit(self, request): - return request.method == 'POST' - + return request.method == "POST" + def key_extra(self, request): # IP address and key_field (if it is set) extra = super(ratelimit_post, self).key_extra(request) if self.key_field: - value = hashlib.sha1((request.POST.get(self.key_field, '')).encode('utf-8')).hexdigest() - extra += '-' + value + value = hashlib.sha1((request.POST.get(self.key_field, "")).encode("utf-8")).hexdigest() + extra += "-" + value return extra diff --git a/utils/redis_raw_log_middleware.py b/utils/redis_raw_log_middleware.py index c1049c7ae..6e11bc13e 100644 --- a/utils/redis_raw_log_middleware.py +++ b/utils/redis_raw_log_middleware.py @@ -6,36 +6,34 @@ from redis.client import Redis, Pipeline from time import time from pprint import pprint -class RedisDumpMiddleware(object): +class RedisDumpMiddleware(object): def __init__(self, get_response=None): self.get_response = get_response def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) def process_view(self, request, callback, callback_args, callback_kwargs): - if not self.activated(request): return - if not getattr(Connection, '_logging', False): + if not self.activated(request): + return + if not getattr(Connection, "_logging", False): # save old methods - setattr(Connection, '_logging', True) + setattr(Connection, "_logging", True) connection.queriesx = [] - Redis.execute_command = \ - self._instrument(Redis.execute_command) - Pipeline._execute_transaction = \ - self._instrument_pipeline(Pipeline._execute_transaction) + Redis.execute_command = self._instrument(Redis.execute_command) + Pipeline._execute_transaction = self._instrument_pipeline(Pipeline._execute_transaction) def process_celery(self, profiler): - if not self.activated(profiler): return - if not getattr(Connection, '_logging', False): + if not self.activated(profiler): + return + if not getattr(Connection, "_logging", False): # save old methods - setattr(Connection, '_logging', True) - Redis.execute_command = \ - self._instrument(Redis.execute_command) - Pipeline._execute_transaction = \ - self._instrument_pipeline(Pipeline._execute_transaction) + setattr(Connection, "_logging", True) + Redis.execute_command = self._instrument(Redis.execute_command) + Pipeline._execute_transaction = self._instrument_pipeline(Pipeline._execute_transaction) def process_response(self, request, response): # if settings.DEBUG and hasattr(self, 'orig_pack_command'): @@ -54,13 +52,16 @@ class RedisDumpMiddleware(object): result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - message['redis_server_name']: message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + message["redis_server_name"]: message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method def _instrument_pipeline(self, original_method): @@ -72,38 +73,41 @@ class RedisDumpMiddleware(object): result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - message['redis_server_name']: message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + message["redis_server_name"]: message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method - + def process_message(self, *args, **kwargs): query = [] redis_server_name = None for a, arg in enumerate(args): if isinstance(arg, Redis): redis_connection = arg - redis_server_name = redis_connection.connection_pool.connection_kwargs['host'] - if 'db-redis-user' in redis_server_name: - redis_server_name = 'redis_user' - elif 'db-redis-session' in redis_server_name: - redis_server_name = 'redis_session' - elif 'db-redis-story' in redis_server_name: - redis_server_name = 'redis_story' - elif 'db-redis-pubsub' in redis_server_name: - redis_server_name = 'redis_pubsub' - elif 'db_redis' in redis_server_name: - redis_server_name = 'redis_user' + redis_server_name = redis_connection.connection_pool.connection_kwargs["host"] + if "db-redis-user" in redis_server_name: + redis_server_name = "redis_user" + elif "db-redis-session" in redis_server_name: + redis_server_name = "redis_session" + elif "db-redis-story" in redis_server_name: + redis_server_name = "redis_story" + elif "db-redis-pubsub" in redis_server_name: + redis_server_name = "redis_pubsub" + elif "db_redis" in redis_server_name: + redis_server_name = "redis_user" continue if len(str(arg)) > 100: arg = "[%s bytes]" % len(str(arg)) - query.append(str(arg).replace('\n', '')) - return { 'query': f"{redis_server_name}: {' '.join(query)}", 'redis_server_name': redis_server_name } - + query.append(str(arg).replace("\n", "")) + return {"query": f"{redis_server_name}: {' '.join(query)}", "redis_server_name": redis_server_name} + def process_pipeline(self, *args, **kwargs): queries = [] redis_server_name = None @@ -112,17 +116,17 @@ class RedisDumpMiddleware(object): continue if isinstance(arg, Pipeline): redis_connection = arg - redis_server_name = redis_connection.connection_pool.connection_kwargs['host'] - if 'db-redis-user' in redis_server_name: - redis_server_name = 'redis_user' - elif 'db-redis-session' in redis_server_name: - redis_server_name = 'redis_session' - elif 'db-redis-story' in redis_server_name: - redis_server_name = 'redis_story' - elif 'db-redis-pubsub' in redis_server_name: - redis_server_name = 'redis_pubsub' - elif 'db_redis' in redis_server_name: - redis_server_name = 'redis_user' + redis_server_name = redis_connection.connection_pool.connection_kwargs["host"] + if "db-redis-user" in redis_server_name: + redis_server_name = "redis_user" + elif "db-redis-session" in redis_server_name: + redis_server_name = "redis_session" + elif "db-redis-story" in redis_server_name: + redis_server_name = "redis_story" + elif "db-redis-pubsub" in redis_server_name: + redis_server_name = "redis_pubsub" + elif "db_redis" in redis_server_name: + redis_server_name = "redis_user" continue if not isinstance(arg, list): continue @@ -132,16 +136,16 @@ class RedisDumpMiddleware(object): if len(str(arg)) > 10000: arg = "[%s bytes]" % len(str(arg)) # query.append(str(arg).replace('\n', '')) - queries_str = '\n\t\t\t\t\t\t~FC'.join(queries) - return { 'query': f"{redis_server_name}: {queries_str}", 'redis_server_name': redis_server_name } + queries_str = "\n\t\t\t\t\t\t~FC".join(queries) + return {"query": f"{redis_server_name}: {queries_str}", "redis_server_name": redis_server_name} def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/utils/request_introspection_middleware.py b/utils/request_introspection_middleware.py index 7ec0bdf03..21e8a594e 100644 --- a/utils/request_introspection_middleware.py +++ b/utils/request_introspection_middleware.py @@ -12,6 +12,7 @@ IGNORE_PATHS = [ RECORD_SLOW_REQUESTS_ABOVE_SECONDS = 10 + class DumpRequestMiddleware: def process_request(self, request): if settings.DEBUG and request.path not in IGNORE_PATHS: @@ -20,33 +21,49 @@ class DumpRequestMiddleware: if request_items: request_items_str = f"{dict(request_items)}" if len(request_items_str) > 500: - request_items_str = request_items_str[:100] + "...[" + str(len(request_items_str)-200) + " bytes]..." + request_items_str[-100:] - logging.debug(" ---> ~FC%s ~SN~FK~BC%s~BT~ST ~FC%s~BK~FC" % (request.method, request.path, request_items_str)) + request_items_str = ( + request_items_str[:100] + + "...[" + + str(len(request_items_str) - 200) + + " bytes]..." + + request_items_str[-100:] + ) + logging.debug( + " ---> ~FC%s ~SN~FK~BC%s~BT~ST ~FC%s~BK~FC" + % (request.method, request.path, request_items_str) + ) else: logging.debug(" ---> ~FC%s ~SN~FK~BC%s~BT~ST" % (request.method, request.path)) def process_response(self, request, response): - if hasattr(request, 'sql_times_elapsed'): - redis_log = "~FCuser:%s%.6f~SNs ~FCstory:%s%.6f~SNs ~FCsession:%s%.6f~SNs ~FCpubsub:%s%.6f~SNs" % ( - self.color_db(request.sql_times_elapsed['redis_user'], '~FC'), - request.sql_times_elapsed['redis_user'], - self.color_db(request.sql_times_elapsed['redis_story'], '~FC'), - request.sql_times_elapsed['redis_story'], - self.color_db(request.sql_times_elapsed['redis_session'], '~FC'), - request.sql_times_elapsed['redis_session'], - self.color_db(request.sql_times_elapsed['redis_pubsub'], '~FC'), - request.sql_times_elapsed['redis_pubsub'], + if hasattr(request, "sql_times_elapsed"): + redis_log = ( + "~FCuser:%s%.6f~SNs ~FCstory:%s%.6f~SNs ~FCsession:%s%.6f~SNs ~FCpubsub:%s%.6f~SNs" + % ( + self.color_db(request.sql_times_elapsed["redis_user"], "~FC"), + request.sql_times_elapsed["redis_user"], + self.color_db(request.sql_times_elapsed["redis_story"], "~FC"), + request.sql_times_elapsed["redis_story"], + self.color_db(request.sql_times_elapsed["redis_session"], "~FC"), + request.sql_times_elapsed["redis_session"], + self.color_db(request.sql_times_elapsed["redis_pubsub"], "~FC"), + request.sql_times_elapsed["redis_pubsub"], + ) + ) + logging.user( + request, + "~SN~FCDB times ~SB~FK%s~SN~FC: ~FYsql: %s%.4f~SNs ~SN~FMmongo: %s%.5f~SNs ~SN~FCredis: %s" + % ( + request.path, + self.color_db(request.sql_times_elapsed["sql"], "~FY"), + request.sql_times_elapsed["sql"], + self.color_db(request.sql_times_elapsed["mongo"], "~FM"), + request.sql_times_elapsed["mongo"], + redis_log, + ), ) - logging.user(request, "~SN~FCDB times ~SB~FK%s~SN~FC: ~FYsql: %s%.4f~SNs ~SN~FMmongo: %s%.5f~SNs ~SN~FCredis: %s" % ( - request.path, - self.color_db(request.sql_times_elapsed['sql'], '~FY'), - request.sql_times_elapsed['sql'], - self.color_db(request.sql_times_elapsed['mongo'], '~FM'), - request.sql_times_elapsed['mongo'], - redis_log - )) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time if seconds > RECORD_SLOW_REQUESTS_ABOVE_SECONDS: r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) @@ -56,9 +73,9 @@ class DumpRequestMiddleware: user_id = request.user.pk if request.user.is_authenticated else "0" data_string = None if request.method == "GET": - data_string = ' '.join([f"{key}={value}" for key, value in request.GET.items()]) + data_string = " ".join([f"{key}={value}" for key, value in request.GET.items()]) elif request.method == "GET": - data_string = ' '.join([f"{key}={value}" for key, value in request.POST.items()]) + data_string = " ".join([f"{key}={value}" for key, value in request.POST.items()]) data = { "user_id": user_id, "time": round(seconds, 2), @@ -66,18 +83,18 @@ class DumpRequestMiddleware: "method": request.method, "data": data_string, } - pipe.lpush(name, base64.b64encode(pickle.dumps(data)).decode('utf-8')) - pipe.expire(name, 60*60*12) # 12 hours + pipe.lpush(name, base64.b64encode(pickle.dumps(data)).decode("utf-8")) + pipe.expire(name, 60 * 60 * 12) # 12 hours pipe.execute() - + return response - + def color_db(self, seconds, default): color = default - if seconds >= .25: - color = '~SB~FR' - elif seconds > .1: - color = '~FW' + if seconds >= 0.25: + color = "~SB~FR" + elif seconds > 0.1: + color = "~FW" # elif seconds == 0: # color = '~FK~SB' return color @@ -87,11 +104,11 @@ class DumpRequestMiddleware: def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/utils/rtail.py b/utils/rtail.py index c2f5854b1..07906ce2d 100755 --- a/utils/rtail.py +++ b/utils/rtail.py @@ -24,7 +24,9 @@ def main(): # this is a remote location hostname, path = arg.split(":", 1) if options.identity: - s = subprocess.Popen(["ssh", "-i", options.identity, hostname, "tail -f " + path], stdout=subprocess.PIPE) + s = subprocess.Popen( + ["ssh", "-i", options.identity, hostname, "tail -f " + path], stdout=subprocess.PIPE + ) else: s = subprocess.Popen(["ssh", hostname, "tail -f " + path], stdout=subprocess.PIPE) s.name = arg @@ -36,8 +38,7 @@ def main(): try: while True: - r, _, _ = select.select( - [stream.stdout.fileno() for stream in streams], [], []) + r, _, _ = select.select([stream.stdout.fileno() for stream in streams], [], []) for fileno in r: for stream in streams: if stream.stdout.fileno() != fileno: @@ -46,12 +47,13 @@ def main(): if not data: streams.remove(stream) break - host = re.match(r'^(.*?)\.', stream.name) + host = re.match(r"^(.*?)\.", stream.name) combination_message = "[%-6s] %s" % (host.group(1)[:6], data) sys.stdout.write(combination_message) break except KeyboardInterrupt: print(" --- End of Logging ---") + if __name__ == "__main__": main() diff --git a/utils/s3_utils.py b/utils/s3_utils.py index d045a3721..f71ec07a7 100644 --- a/utils/s3_utils.py +++ b/utils/s3_utils.py @@ -4,19 +4,18 @@ import time import mimetypes from utils.image_functions import ImageOps -if '/srv/newsblur' not in ' '.join(sys.path): +if "/srv/newsblur" not in " ".join(sys.path): sys.path.append("/srv/newsblur") -os.environ['DJANGO_SETTINGS_MODULE'] = 'newsblur_web.settings' +os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings -ACCESS_KEY = settings.S3_ACCESS_KEY -SECRET = settings.S3_SECRET +ACCESS_KEY = settings.S3_ACCESS_KEY +SECRET = settings.S3_SECRET BUCKET_NAME = settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first class S3Store: - def __init__(self, bucket_name=settings.S3_AVATARS_BUCKET_NAME): # if settings.DEBUG: # import ssl @@ -31,51 +30,47 @@ class S3Store: # ssl._create_default_https_context = _create_unverified_https_context self.bucket_name = bucket_name self.s3 = settings.S3_CONN - + def create_bucket(self, bucket_name): return self.s3.create_bucket(Bucket=bucket_name) - + def save_profile_picture(self, user_id, filename, image_body): content_type, extension = self._extract_content_type(filename) if not content_type or not extension: return - - image_name = 'profile_%s.%s' % (int(time.time()), extension) - - image = ImageOps.resize_image(image_body, 'fullsize', fit_to_size=False) + + image_name = "profile_%s.%s" % (int(time.time()), extension) + + image = ImageOps.resize_image(image_body, "fullsize", fit_to_size=False) if image: - key = 'avatars/%s/large_%s' % (user_id, image_name) + key = "avatars/%s/large_%s" % (user_id, image_name) self._save_object(key, image, content_type=content_type) - image = ImageOps.resize_image(image_body, 'thumbnail', fit_to_size=True) + image = ImageOps.resize_image(image_body, "thumbnail", fit_to_size=True) if image: - key = 'avatars/%s/thumbnail_%s' % (user_id, image_name) + key = "avatars/%s/thumbnail_%s" % (user_id, image_name) self._save_object(key, image, content_type=content_type) - + return image and image_name def _extract_content_type(self, filename): content_type = mimetypes.guess_type(filename)[0] extension = None - - if content_type == 'image/jpeg': - extension = 'jpg' - elif content_type == 'image/png': - extension = 'png' - elif content_type == 'image/gif': - extension = 'gif' - + + if content_type == "image/jpeg": + extension = "jpg" + elif content_type == "image/png": + extension = "png" + elif content_type == "image/gif": + extension = "gif" + return content_type, extension - + def _save_object(self, key, file_object, content_type=None): file_object.seek(0) s3_object = self.s3.Object(bucket_name=self.bucket_name, key=key) if content_type: - s3_object.put(Body=file_object, - ContentType=content_type, - ACL='public-read' - ) + s3_object.put(Body=file_object, ContentType=content_type, ACL="public-read") else: s3_object.put(Body=file_object) - diff --git a/utils/scrubber/__init__.py b/utils/scrubber/__init__.py index 7d38d82a1..f39114286 100755 --- a/utils/scrubber/__init__.py +++ b/utils/scrubber/__init__.py @@ -9,13 +9,14 @@ See LICENSE for license details. __author__ = "Samuel Stauffer " __version__ = "1.6.1" __license__ = "BSD" -__all__ = ['Scrubber', 'SelectiveScriptScrubber', 'ScrubberWarning', 'UnapprovedJavascript', 'urlize'] +__all__ = ["Scrubber", "SelectiveScriptScrubber", "ScrubberWarning", "UnapprovedJavascript", "urlize"] import re, string from urllib.parse import urljoin from itertools import chain from bs4 import BeautifulSoup, Comment + def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): """Converts any URLs in text into clickable links. @@ -30,42 +31,59 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): *Modified from Django* """ from urllib.parse import quote as urlquote - - LEADING_PUNCTUATION = ['(', '<', '<'] - TRAILING_PUNCTUATION = ['.', ',', ')', '>', '\n', '>'] - - word_split_re = re.compile(r'([\s\xa0]+| )') # a0 == NBSP - punctuation_re = re.compile('^(?P(?:%s)*)(?P.*?)(?P(?:%s)*)$' % \ - ('|'.join([re.escape(x) for x in LEADING_PUNCTUATION]), - '|'.join([re.escape(x) for x in TRAILING_PUNCTUATION]))) - simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$') + + LEADING_PUNCTUATION = ["(", "<", "<"] + TRAILING_PUNCTUATION = [".", ",", ")", ">", "\n", ">"] + + word_split_re = re.compile(r"([\s\xa0]+| )") # a0 == NBSP + punctuation_re = re.compile( + "^(?P(?:%s)*)(?P.*?)(?P(?:%s)*)$" + % ( + "|".join([re.escape(x) for x in LEADING_PUNCTUATION]), + "|".join([re.escape(x) for x in TRAILING_PUNCTUATION]), + ) + ) + simple_email_re = re.compile(r"^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$") # del x # Temporary variable def escape(html): - return html.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", ''') + return ( + html.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) - trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x + trim_url = ( + lambda x, limit=trim_url_limit: limit is not None + and (len(x) > limit and ("%s..." % x[: max(0, limit - 3)])) + or x + ) words = word_split_re.split(text) - nofollow_attr = nofollow and ' rel="nofollow"' or '' + nofollow_attr = nofollow and ' rel="nofollow"' or "" for i, word in enumerate(words): match = None - if '.' in word or '@' in word or ':' in word: - match = punctuation_re.match(word.replace('\u2019', "'")) + if "." in word or "@" in word or ":" in word: + match = punctuation_re.match(word.replace("\u2019", "'")) if match: lead, middle, trail = match.groups() - middle = middle.encode('utf-8') - middle = middle.decode('utf-8') # Bytes to str + middle = middle.encode("utf-8") + middle = middle.decode("utf-8") # Bytes to str # Make URL we want to point to. url = None - if middle.startswith('http://') or middle.startswith('https://'): - url = urlquote(middle, safe='%/&=:;#?+*') - elif middle.startswith('www.') or ('@' not in middle and \ - middle and middle[0] in string.ascii_letters + string.digits and \ - (middle.endswith('.org') or middle.endswith('.net') or middle.endswith('.com'))): - url = urlquote('http://%s' % middle, safe='%/&=:;#?+*') - elif '@' in middle and not ':' in middle and simple_email_re.match(middle): - url = 'mailto:%s' % middle - nofollow_attr = '' + if middle.startswith("http://") or middle.startswith("https://"): + url = urlquote(middle, safe="%/&=:;#?+*") + elif middle.startswith("www.") or ( + "@" not in middle + and middle + and middle[0] in string.ascii_letters + string.digits + and (middle.endswith(".org") or middle.endswith(".net") or middle.endswith(".com")) + ): + url = urlquote("http://%s" % middle, safe="%/&=:;#?+*") + elif "@" in middle and not ":" in middle and simple_email_re.match(middle): + url = "mailto:%s" % middle + nofollow_attr = "" # Make link. if url: trimmed = trim_url(middle) @@ -73,40 +91,117 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): lead, trail = escape(lead), escape(trail) url, trimmed = escape(url), escape(trimmed) middle = '%s' % (url, nofollow_attr, trimmed) - words[i] = '%s%s%s' % (lead, middle, trail) + words[i] = "%s%s%s" % (lead, middle, trail) elif autoescape: words[i] = escape(word) elif autoescape: words[i] = escape(word) return "".join(words) - + + class ScrubberWarning(object): pass + class Scrubber(object): - allowed_tags = set(( - 'a', 'abbr', 'acronym', 'b', 'bdo', 'big', 'blockquote', 'br', - 'center', 'cite', 'code', - 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'embed', 'font', - 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins', - 'kbd', 'li', 'object', 'ol', 'param', 'pre', 'p', 'q', - 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', - 'table', 'tbody', 'td', 'th', 'thead', 'tr', 'tt', 'ul', 'u', - 'var', 'wbr', - )) - disallowed_tags_save_content = set(( - 'blink', 'body', 'html', - )) - allowed_attributes = set(( - 'align', 'alt', 'border', 'cite', 'class', 'dir', - 'height', 'href', 'src', 'style', 'title', 'type', 'width', - 'face', 'size', # font tags - 'flashvars', # Not sure about flashvars - if any harm can come from it - 'classid', # FF needs the classid on object tags for flash - 'name', 'value', 'quality', 'data', 'scale', # for flash embed param tags, could limit to just param if this is harmful - 'salign', 'align', 'wmode', - )) # Bad attributes: 'allowscriptaccess', 'xmlns', 'target' - normalized_tag_replacements = {'b': 'strong', 'i': 'em'} + allowed_tags = set( + ( + "a", + "abbr", + "acronym", + "b", + "bdo", + "big", + "blockquote", + "br", + "center", + "cite", + "code", + "dd", + "del", + "dfn", + "div", + "dl", + "dt", + "em", + "embed", + "font", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "hr", + "i", + "img", + "ins", + "kbd", + "li", + "object", + "ol", + "param", + "pre", + "p", + "q", + "s", + "samp", + "small", + "span", + "strike", + "strong", + "sub", + "sup", + "table", + "tbody", + "td", + "th", + "thead", + "tr", + "tt", + "ul", + "u", + "var", + "wbr", + ) + ) + disallowed_tags_save_content = set( + ( + "blink", + "body", + "html", + ) + ) + allowed_attributes = set( + ( + "align", + "alt", + "border", + "cite", + "class", + "dir", + "height", + "href", + "src", + "style", + "title", + "type", + "width", + "face", + "size", # font tags + "flashvars", # Not sure about flashvars - if any harm can come from it + "classid", # FF needs the classid on object tags for flash + "name", + "value", + "quality", + "data", + "scale", # for flash embed param tags, could limit to just param if this is harmful + "salign", + "align", + "wmode", + ) + ) # Bad attributes: 'allowscriptaccess', 'xmlns', 'target' + normalized_tag_replacements = {"b": "strong", "i": "em"} def __init__(self, base_url=None, autolink=True, nofollow=True, remove_comments=True): self.base_url = base_url @@ -122,11 +217,12 @@ class Scrubber(object): # Find all _scrub_tab_ methods self.tag_scrubbers = {} for k in chain(*[cls.__dict__ for cls in self.__class__.__mro__]): - if k.startswith('_scrub_tag_'): + if k.startswith("_scrub_tag_"): self.tag_scrubbers[k[11:]] = [getattr(self, k)] def autolink_soup(self, soup): """Autolink urls in text nodes that aren't already linked (inside anchor tags).""" + def _autolink(node): if isinstance(node, str): text = node @@ -139,6 +235,7 @@ class Scrubber(object): for child in node.contents: _autolink(child) + _autolink(soup) def strip_disallowed(self, soup): @@ -159,7 +256,7 @@ class Scrubber(object): # Remove disallowed attributes attrs = {} - if hasattr(node, 'attrs') and isinstance(node.attrs, dict): + if hasattr(node, "attrs") and isinstance(node.attrs, dict): for k, v in list(node.attrs.items()): if not v: continue @@ -170,7 +267,7 @@ class Scrubber(object): # TODO: This probably needs to be more robust if isinstance(v, str): v2 = v.lower() - if any(x in v2 for x in ('javascript:', 'vbscript:', 'expression(')): + if any(x in v2 for x in ("javascript:", "vbscript:", "expression(")): continue attrs[k] = v @@ -190,46 +287,48 @@ class Scrubber(object): for keep_contentes, node in nodes: if keep_contentes and node.contents: idx = node.parent.contents.index(node) - for n in reversed(list(node.contents)): # Copy the contents list to avoid modifying while traversing + for n in reversed( + list(node.contents) + ): # Copy the contents list to avoid modifying while traversing node.parent.insert(idx, n) node.extract() def _clean_path(self, node, attrname): url = node.get(attrname) - if url and '://' not in url and not url.startswith('mailto:'): + if url and "://" not in url and not url.startswith("mailto:"): print(url) - if url[0] not in ('/', '.') and not self.base_url: + if url[0] not in ("/", ".") and not self.base_url: node[attrname] = "http://" + url - elif not url.startswith('http') and self.base_url: + elif not url.startswith("http") and self.base_url: print(self.base_url) node[attrname] = urljoin(self.base_url, url) def _scrub_tag_a(self, a): if self.nofollow: - a['rel'] = ["nofollow"] + a["rel"] = ["nofollow"] - if not a.get('class', None): - a['class'] = ["external"] + if not a.get("class", None): + a["class"] = ["external"] - self._clean_path(a, 'href') + self._clean_path(a, "href") def _scrub_tag_img(self, img): try: - if img['src'].lower().startswith('chrome://'): + if img["src"].lower().startswith("chrome://"): return True except KeyError: return True # Make sure images always have an 'alt' attribute - img['alt'] = img.get('alt', '') + img["alt"] = img.get("alt", "") - self._clean_path(img, 'src') + self._clean_path(img, "src") def _scrub_tag_font(self, node): attrs = {} - if hasattr(node, 'attrs') and isinstance(node.attrs, dict): + if hasattr(node, "attrs") and isinstance(node.attrs, dict): for k, v in list(node.attrs.items()): - if k.lower() == 'size' and v.startswith('+'): + if k.lower() == "size" and v.startswith("+"): # Remove "size=+0" continue attrs[k] = v @@ -277,49 +376,59 @@ class Scrubber(object): html = str(soup) return self._scrub_html_post(html) + class UnapprovedJavascript(ScrubberWarning): def __init__(self, src): self.src = src - self.path = src[:src.rfind('/')] + self.path = src[: src.rfind("/")] + class SelectiveScriptScrubber(Scrubber): - allowed_tags = Scrubber.allowed_tags | set(('script', 'noscript', 'iframe')) - allowed_attributes = Scrubber.allowed_attributes | set(('scrolling', 'frameborder')) + allowed_tags = Scrubber.allowed_tags | set(("script", "noscript", "iframe")) + allowed_attributes = Scrubber.allowed_attributes | set(("scrolling", "frameborder")) def __init__(self, *args, **kwargs): super(SelectiveScriptScrubber, self).__init__(*args, **kwargs) - self.allowed_script_srcs = set(( - 'http://www.statcounter.com/counter/counter_xhtml.js', - # 'http://www.google-analytics.com/urchin.js', - 'http://pub.mybloglog.com/', - 'http://rpc.bloglines.com/blogroll', - 'http://widget.blogrush.com/show.js', - 'http://re.adroll.com/', - 'http://widgetserver.com/', - 'http://pagead2.googlesyndication.com/pagead/show_ads.js', # are there pageadX for all kinds of numbers? - )) + self.allowed_script_srcs = set( + ( + "http://www.statcounter.com/counter/counter_xhtml.js", + # 'http://www.google-analytics.com/urchin.js', + "http://pub.mybloglog.com/", + "http://rpc.bloglines.com/blogroll", + "http://widget.blogrush.com/show.js", + "http://re.adroll.com/", + "http://widgetserver.com/", + "http://pagead2.googlesyndication.com/pagead/show_ads.js", # are there pageadX for all kinds of numbers? + ) + ) - self.allowed_script_line_res = set(re.compile(text) for text in ( - r"^(var )?sc_project\=\d+;$", - r"^(var )?sc_invisible\=\d;$", - r"^(var )?sc_partition\=\d+;$", - r'^(var )?sc_security\="[A-Za-z0-9]+";$', - # """^_uacct \= "[^"]+";$""", - # """^urchinTracker\(\);$""", - r'^blogrush_feed = "[^"]+";$', - # """^!--$""", - # """^//-->$""", - )) + self.allowed_script_line_res = set( + re.compile(text) + for text in ( + r"^(var )?sc_project\=\d+;$", + r"^(var )?sc_invisible\=\d;$", + r"^(var )?sc_partition\=\d+;$", + r'^(var )?sc_security\="[A-Za-z0-9]+";$', + # """^_uacct \= "[^"]+";$""", + # """^urchinTracker\(\);$""", + r'^blogrush_feed = "[^"]+";$', + # """^!--$""", + # """^//-->$""", + ) + ) - self.allowed_iframe_srcs = set(re.compile(text) for text in ( - r'^http://www\.google\.com/calendar/embed\?[\w&;=\%]+$', # Google Calendar - r'^https?://www\.youtube\.com/', # YouTube - r'^http://player\.vimeo\.com/', # Vimeo - )) + self.allowed_iframe_srcs = set( + re.compile(text) + for text in ( + r"^http://www\.google\.com/calendar/embed\?[\w&;=\%]+$", # Google Calendar + r"^https?://www\.youtube\.com/", # YouTube + r"^http://player\.vimeo\.com/", # Vimeo + ) + ) def _scrub_tag_script(self, script): - src = script.get('src', None) + src = script.get("src", None) if src: for asrc in self.allowed_script_srcs: # TODO: It could be dangerous to only check "start" of string @@ -330,7 +439,7 @@ class SelectiveScriptScrubber(Scrubber): else: self.warnings.append(UnapprovedJavascript(src)) script.extract() - elif script.get('type', '') != 'text/javascript': + elif script.get("type", "") != "text/javascript": script.extract() else: for line in script.string.splitlines(): @@ -345,6 +454,6 @@ class SelectiveScriptScrubber(Scrubber): break def _scrub_tag_iframe(self, iframe): - src = iframe.get('src', None) + src = iframe.get("src", None) if not src or not any(asrc.match(src) for asrc in self.allowed_iframe_srcs): iframe.extract() diff --git a/utils/story_functions.py b/utils/story_functions.py index 57172d429..dc5092f3e 100644 --- a/utils/story_functions.py +++ b/utils/story_functions.py @@ -23,32 +23,37 @@ from binascii import hexlify from hashlib import sha1 # COMMENTS_RE = re.compile('\') -COMMENTS_RE = re.compile('\ Following %s \t[%s]" % (hostname, address)) - if hostname in found: return - s = subprocess.Popen(["ssh", "-l", NEWSBLUR_USERNAME, - "-i", os.path.expanduser("/srv/secrets-newsblur/keys/docker.key"), - address, "%s %s" % (command, path)], stdout=subprocess.PIPE) + if hostname in found: + return + s = subprocess.Popen( + [ + "ssh", + "-l", + NEWSBLUR_USERNAME, + "-i", + os.path.expanduser("/srv/secrets-newsblur/keys/docker.key"), + address, + "%s %s" % (command, path), + ], + stdout=subprocess.PIPE, + ) s.name = hostname streams.append(s) found.add(hostname) + def read_streams(streams): while True: - r, _, _ = select.select( - [stream.stdout.fileno() for stream in streams], [], []) + r, _, _ = select.select([stream.stdout.fileno() for stream in streams], [], []) for fileno in r: for stream in streams: if stream.stdout.fileno() != fileno: @@ -137,11 +152,12 @@ def read_streams(streams): sys.stdout.flush() break + if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Tail logs from multiple hosts.') - parser.add_argument('hostnames', help='Comma-separated list of hostnames', nargs='?') - parser.add_argument('roles', help='Comma-separated list of roles', nargs='?') - parser.add_argument('--command', help='Command to run on the remote host') - parser.add_argument('--path', help='Path to the log file') + parser = argparse.ArgumentParser(description="Tail logs from multiple hosts.") + parser.add_argument("hostnames", help="Comma-separated list of hostnames", nargs="?") + parser.add_argument("roles", help="Comma-separated list of roles", nargs="?") + parser.add_argument("--command", help="Command to run on the remote host") + parser.add_argument("--path", help="Path to the log file") args = parser.parse_args() main(args.hostnames, command=args.command, path=args.path) diff --git a/utils/tlnbt.py b/utils/tlnbt.py index 6d9f0f8f2..5a4a8f090 100755 --- a/utils/tlnbt.py +++ b/utils/tlnbt.py @@ -8,4 +8,3 @@ if __name__ == "__main__": if len(sys.argv) > 1: role = sys.argv[1] tlnb.main(roles=[role]) - \ No newline at end of file diff --git a/utils/tlnbw.py b/utils/tlnbw.py index 3a6b85dec..9fafbccf2 100755 --- a/utils/tlnbw.py +++ b/utils/tlnbw.py @@ -8,4 +8,3 @@ if __name__ == "__main__": if len(sys.argv) > 1: role = sys.argv[1] tlnb.main(roles=[role]) - \ No newline at end of file diff --git a/utils/tornado_escape.py b/utils/tornado_escape.py index cf627a1cf..cd4b2253f 100644 --- a/utils/tornado_escape.py +++ b/utils/tornado_escape.py @@ -21,47 +21,52 @@ have crept in over time. """ - import html.entities import re import sys -import urllib.parse +import urllib.parse from urllib.parse import parse_qs + # json module is in the standard library as of python 2.6; fall back to # simplejson if present for older versions. try: import json + assert hasattr(json, "loads") and hasattr(json, "dumps") _json_decode = json.loads _json_encode = json.dumps except Exception: try: import simplejson + _json_decode = lambda s: simplejson.loads(_unicode(s)) _json_encode = lambda v: simplejson.dumps(v) except ImportError: try: # For Google AppEngine from django.utils import simplejson + _json_decode = lambda s: simplejson.loads(_unicode(s)) _json_encode = lambda v: simplejson.dumps(v) except ImportError: + def _json_decode(s): raise NotImplementedError( "A JSON parser is required, e.g., simplejson at " - "http://pypi.python.org/pypi/simplejson/") + "http://pypi.python.org/pypi/simplejson/" + ) + _json_encode = _json_decode _XHTML_ESCAPE_RE = re.compile('[&<>"]') -_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"'} +_XHTML_ESCAPE_DICT = {"&": "&", "<": "<", ">": ">", '"': """} def xhtml_escape(value): """Escapes a string so it is valid within XML or XHTML.""" - return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], - to_basestring(value)) + return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value)) def xhtml_unescape(value): @@ -94,11 +99,13 @@ def url_escape(value): """Returns a valid URL-encoded version of the given value.""" return urllib.parse.quote_plus(utf8(value)) + # python 3 changed things around enough that we need two separate # implementations of url_unescape. We also need our own implementation # of parse_qs since python 3's version insists on decoding everything. if sys.version_info[0] < 3: - def url_unescape(value, encoding='utf-8'): + + def url_unescape(value, encoding="utf-8"): """Decodes the given value from a URL. The argument may be either a byte or unicode string. @@ -113,7 +120,8 @@ if sys.version_info[0] < 3: parse_qs_bytes = parse_qs else: - def url_unescape(value, encoding='utf-8'): + + def url_unescape(value, encoding="utf-8"): """Decodes the given value from a URL. The argument may be either a byte or unicode string. @@ -136,11 +144,10 @@ else: """ # This is gross, but python3 doesn't give us another way. # Latin1 is the universal donor of character encodings. - result = parse_qs(qs, keep_blank_values, strict_parsing, - encoding='latin1', errors='strict') + result = parse_qs(qs, keep_blank_values, strict_parsing, encoding="latin1", errors="strict") encoded = {} for k, v in result.items(): - encoded[k] = [i.encode('latin1') for i in v] + encoded[k] = [i.encode("latin1") for i in v] return encoded @@ -158,6 +165,7 @@ def utf8(value): assert isinstance(value, str) return value.encode("utf-8") + _TO_UNICODE_TYPES = (str, type(None)) @@ -172,6 +180,7 @@ def to_unicode(value): assert isinstance(value, bytes) return value.decode("utf-8") + # to_unicode was previously named _unicode not because it was private, # but to avoid conflicts with the built-in unicode() function/type _unicode = to_unicode @@ -217,16 +226,20 @@ def recursive_unicode(obj): else: return obj + # I originally used the regex from # http://daringfireball.net/2010/07/improved_regex_for_matching_urls # but it gets all exponential on certain patterns (such as too many trailing # dots), causing the regex matcher to never return. # This regex should avoid those problems. -_URL_RE = re.compile(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""") +_URL_RE = re.compile( + r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""" +) -def linkify(text, shorten=False, extra_params="", - require_protocol=False, permitted_protocols=["http", "https"]): +def linkify( + text, shorten=False, extra_params="", require_protocol=False, permitted_protocols=["http", "https"] +): """Converts plain text into HTML with links. For example: ``linkify("Hello http://tornadoweb.org!")`` would return @@ -269,7 +282,7 @@ def linkify(text, shorten=False, extra_params="", href = m.group(1) if not proto: - href = "http://" + href # no proto specified, use http + href = "http://" + href # no proto specified, use http if callable(extra_params): params = " " + extra_params(href).strip() @@ -291,14 +304,13 @@ def linkify(text, shorten=False, extra_params="", # The path is usually not that interesting once shortened # (no more slug, etc), so it really just provides a little # extra indication of shortening. - url = url[:proto_len] + parts[0] + "/" + \ - parts[1][:8].split('?')[0].split('.')[0] + url = url[:proto_len] + parts[0] + "/" + parts[1][:8].split("?")[0].split(".")[0] if len(url) > max_len * 1.5: # still too long url = url[:max_len] if url != before_clip: - amp = url.rfind('&') + amp = url.rfind("&") # avoid splitting html char entities if amp > max_len - 5: url = url[:amp] @@ -338,4 +350,5 @@ def _build_unicode_map(): unicode_map[name] = chr(value) return unicode_map + _HTML_UNICODE_MAP = _build_unicode_map() diff --git a/utils/twitter_fetcher.py b/utils/twitter_fetcher.py index 356a3eb99..ee33d6cac 100644 --- a/utils/twitter_fetcher.py +++ b/utils/twitter_fetcher.py @@ -14,13 +14,13 @@ from apps.social.models import MSocialServices from apps.reader.models import UserSubscription from utils import log as logging + class TwitterFetcher: - def __init__(self, feed, options=None): self.feed = feed self.address = self.feed.feed_address self.options = options or {} - + def fetch(self, address=None): data = {} if not address: @@ -28,133 +28,144 @@ class TwitterFetcher: self.address = address twitter_user = None - if '/lists/' in address: + if "/lists/" in address: list_id = self.extract_list_id() if not list_id: return - + tweets, list_info = self.fetch_list_timeline(list_id) if not tweets: return - - data['title'] = "%s on Twitter" % list_info.full_name - data['link'] = "https://twitter.com%s" % list_info.uri - data['description'] = "%s on Twitter" % list_info.full_name - elif '/search' in address: + + data["title"] = "%s on Twitter" % list_info.full_name + data["link"] = "https://twitter.com%s" % list_info.uri + data["description"] = "%s on Twitter" % list_info.full_name + elif "/search" in address: search_query = self.extract_search_query() if not search_query: return - + tweets = self.fetch_search_query(search_query) if not tweets: return - - data['title'] = "\"%s\" on Twitter" % search_query - data['link'] = "%s" % address - data['description'] = "Searching \"%s\" on Twitter" % search_query + + data["title"] = '"%s" on Twitter' % search_query + data["link"] = "%s" % address + data["description"] = 'Searching "%s" on Twitter' % search_query else: username = self.extract_username() if not username: - logging.debug(u' ***> [%-30s] ~FRTwitter fetch failed: %s: No active user API access' % - (self.feed.log_title[:30], self.address)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No active user API access" + % (self.feed.log_title[:30], self.address) + ) return - + twitter_user = self.fetch_user(username) if not twitter_user: return tweets = self.user_timeline(twitter_user) - - data['title'] = "%s on Twitter" % username - data['link'] = "https://twitter.com/%s" % username - data['description'] = "%s on Twitter" % username - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Twitter API Decrapifier - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = address + data["title"] = "%s on Twitter" % username + data["link"] = "https://twitter.com/%s" % username + data["description"] = "%s on Twitter" % username + + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Twitter API Decrapifier - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = address rss = feedgenerator.Atom1Feed(**data) - + for tweet in tweets: story_data = self.tweet_story(tweet.__dict__) rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def extract_username(self): username = None try: - address = qurl(self.address, remove=['_']) - username_groups = re.search('twitter.com/(\w+)/?$', address) + address = qurl(self.address, remove=["_"]) + username_groups = re.search("twitter.com/(\w+)/?$", address) if not username_groups: return username = username_groups.group(1) except IndexError: return - + return username def extract_list_id(self): list_id = None try: - list_groups = re.search('twitter.com/i/lists/(\w+)/?', self.address) + list_groups = re.search("twitter.com/i/lists/(\w+)/?", self.address) if not list_groups: return list_id = list_groups.group(1) except IndexError: return - + return list_id def extract_search_query(self): search_query = None - address = qurl(self.address, remove=['_']) + address = qurl(self.address, remove=["_"]) query = urlparse(address).query query_dict = parse_qs(query) - if 'q' in query_dict: - search_query = query_dict['q'][0] - + if "q" in query_dict: + search_query = query_dict["q"][0] + return search_query def twitter_api(self, include_social_services=False): twitter_api = None social_services = None - if self.options.get('requesting_user_id', None): - social_services = MSocialServices.get_user(self.options.get('requesting_user_id')) + if self.options.get("requesting_user_id", None): + social_services = MSocialServices.get_user(self.options.get("requesting_user_id")) try: twitter_api = social_services.twitter_api() except tweepy.error.TweepError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) return else: usersubs = UserSubscription.objects.filter(feed=self.feed) if not usersubs: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: No subscriptions' % - (self.feed.log_title[:30], self.address)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No subscriptions" + % (self.feed.log_title[:30], self.address) + ) return for sub in usersubs: social_services = MSocialServices.get_user(sub.user_id) - if not social_services.twitter_uid: continue + if not social_services.twitter_uid: + continue try: twitter_api = social_services.twitter_api() - if not twitter_api: + if not twitter_api: continue else: break except tweepy.error.TweepError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) continue - + if not twitter_api: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: No twitter API for %s' % - (self.feed.log_title[:30], self.address, usersubs[0].user.username)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No twitter API for %s" + % (self.feed.log_title[:30], self.address, usersubs[0].user.username) + ) return - + if include_social_services: return twitter_api, social_services return twitter_api - + def disconnect_twitter(self): _, social_services = self.twitter_api(include_social_services=True) social_services.disconnect_twitter() @@ -163,298 +174,364 @@ class TwitterFetcher: twitter_api = self.twitter_api() if not twitter_api: return - + try: twitter_user = twitter_api.get_user(username) except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(560, "Twitter Error: %s" % (e)) return except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(562, "Twitter Error: User suspended") # self.disconnect_twitter() return - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(563, "Twitter Error: Expired token") self.disconnect_twitter() return - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(564, "Twitter Error: User not found") return - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(460, "Twitter Error: Over capacity") return - elif '503' in message: - logging.debug(' ***> [%-30s] ~FRTwitter throwing a 503, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "503" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter throwing a 503, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(463, "Twitter Error: Twitter's down") return else: raise e - + return twitter_user - + def user_timeline(self, twitter_user): try: - tweets = twitter_user.timeline(tweet_mode='extended') + tweets = twitter_user.timeline(tweet_mode="extended") except tweepy.error.TweepError as e: message = str(e).lower() - if 'not authorized' in message: - logging.debug(' ***> [%-30s] ~FRTwitter timeline failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "not authorized" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter timeline failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: Not authorized") return [] - elif 'user not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "user not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(566, "Twitter Error: User not found") return [] - elif '429' in message: - logging.debug(' ***> [%-30s] ~FRTwitter rate limited: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "429" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter rate limited: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(567, "Twitter Error: Rate limited") return [] - elif 'blocked from viewing' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user blocked, ignoring: %s' % - (self.feed.log_title[:30], e)) + elif "blocked from viewing" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user blocked, ignoring: %s" % (self.feed.log_title[:30], e) + ) self.feed.save_feed_history(568, "Twitter Error: Blocked from viewing") return [] - elif 'over capacity' in message: - logging.debug(u' ***> [%-30s] ~FRTwitter over capacity, ignoring: %s' % - (self.feed.log_title[:30], e)) + elif "over capacity" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring: %s" % (self.feed.log_title[:30], e) + ) self.feed.save_feed_history(569, "Twitter Error: Over capacity") return [] else: raise e - + if not tweets: return [] return tweets - + def fetch_list_timeline(self, list_id): twitter_api = self.twitter_api() if not twitter_api: return None, None - + try: - list_timeline = twitter_api.list_timeline(list_id=list_id, tweet_mode='extended') + list_timeline = twitter_api.list_timeline(list_id=list_id, tweet_mode="extended") except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(570, "Twitter Error: %s" % (e)) return None, None except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(572, "Twitter Error: User suspended") # self.disconnect_twitter() return None, None - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(573, "Twitter Error: Expired token") self.disconnect_twitter() return None, None - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(574, "Twitter Error: User not found") return None, None - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return None, None - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(470, "Twitter Error: Over capacity") return None, None else: raise e - + list_info = twitter_api.get_list(list_id=list_id) - + if not list_timeline: return [], list_info return list_timeline, list_info - + def fetch_search_query(self, search_query): twitter_api = self.twitter_api() if not twitter_api: return None - + try: - list_timeline = twitter_api.search(search_query, tweet_mode='extended') + list_timeline = twitter_api.search(search_query, tweet_mode="extended") except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(570, "Twitter Error: %s" % (e)) return None except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(572, "Twitter Error: User suspended") # self.disconnect_twitter() return None - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(573, "Twitter Error: Expired token") self.disconnect_twitter() return None - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(574, "Twitter Error: User not found") return None - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return None - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(470, "Twitter Error: Over capacity") return None else: raise e - + if not list_timeline: return [] return list_timeline - + def tweet_story(self, user_tweet): categories = set() - - if user_tweet['full_text'].startswith('RT @'): - categories.add('retweet') - elif user_tweet['in_reply_to_status_id'] or user_tweet['full_text'].startswith('@'): - categories.add('reply') + + if user_tweet["full_text"].startswith("RT @"): + categories.add("retweet") + elif user_tweet["in_reply_to_status_id"] or user_tweet["full_text"].startswith("@"): + categories.add("reply") else: - categories.add('tweet') - if user_tweet['full_text'].startswith('RT @'): - categories.add('retweet') - if user_tweet['favorite_count']: - categories.add('liked') - if user_tweet['retweet_count']: - categories.add('retweeted') - if 'http' in user_tweet['full_text']: - categories.add('link') - + categories.add("tweet") + if user_tweet["full_text"].startswith("RT @"): + categories.add("retweet") + if user_tweet["favorite_count"]: + categories.add("liked") + if user_tweet["retweet_count"]: + categories.add("retweeted") + if "http" in user_tweet["full_text"]: + categories.add("link") + story = {} content_tweet = user_tweet entities = "" - author = user_tweet.get('author') or user_tweet.get('user') - if not isinstance(author, dict): author = author.__dict__ - author_screen_name = author['screen_name'] - author_name = author['name'] - author_fullname = "%s (%s)" % (author_name, author_screen_name) if author_screen_name != author_name else author_screen_name + author = user_tweet.get("author") or user_tweet.get("user") + if not isinstance(author, dict): + author = author.__dict__ + author_screen_name = author["screen_name"] + author_name = author["name"] + author_fullname = ( + "%s (%s)" % (author_name, author_screen_name) + if author_screen_name != author_name + else author_screen_name + ) original_author_screen_name = author_screen_name - if user_tweet['in_reply_to_user_id'] == author['id']: - categories.add('reply-to-self') + if user_tweet["in_reply_to_user_id"] == author["id"]: + categories.add("reply-to-self") retweet_author = "" - tweet_link = "https://twitter.com/%s/status/%s" % (original_author_screen_name, user_tweet['id']) - if 'retweeted_status' in user_tweet: + tweet_link = "https://twitter.com/%s/status/%s" % (original_author_screen_name, user_tweet["id"]) + if "retweeted_status" in user_tweet: retweet_author = """Retweeted by %s on %s""" % ( author_screen_name, - author['profile_image_url_https'], + author["profile_image_url_https"], author_screen_name, author_fullname, - DateFormat(user_tweet['created_at']).format('l, F jS, Y g:ia').replace('.',''), - ) - content_tweet = user_tweet['retweeted_status'].__dict__ - author = content_tweet['author'] - if not isinstance(author, dict): author = author.__dict__ - author_screen_name = author['screen_name'] - author_name = author['name'] - author_fullname = "%s (%s)" % (author_name, author_screen_name) if author_screen_name != author_name else author_screen_name - tweet_link = "https://twitter.com/%s/status/%s" % (author_screen_name, user_tweet['retweeted_status'].id) - - tweet_title = user_tweet['full_text'] - tweet_text = linebreaks(content_tweet['full_text']) - + DateFormat(user_tweet["created_at"]).format("l, F jS, Y g:ia").replace(".", ""), + ) + content_tweet = user_tweet["retweeted_status"].__dict__ + author = content_tweet["author"] + if not isinstance(author, dict): + author = author.__dict__ + author_screen_name = author["screen_name"] + author_name = author["name"] + author_fullname = ( + "%s (%s)" % (author_name, author_screen_name) + if author_screen_name != author_name + else author_screen_name + ) + tweet_link = "https://twitter.com/%s/status/%s" % ( + author_screen_name, + user_tweet["retweeted_status"].id, + ) + + tweet_title = user_tweet["full_text"] + tweet_text = linebreaks(content_tweet["full_text"]) + replaced = {} - entities_media = content_tweet['entities'].get('media', []) - if 'extended_entities' in content_tweet: - entities_media = content_tweet['extended_entities'].get('media', []) + entities_media = content_tweet["entities"].get("media", []) + if "extended_entities" in content_tweet: + entities_media = content_tweet["extended_entities"].get("media", []) for media in entities_media: - if 'media_url_https' not in media: continue - if media['type'] == 'photo': - if media.get('url') and media['url'] in tweet_text: - tweet_title = tweet_title.replace(media['url'], media['display_url']) - replacement = "%s" % (media['expanded_url'], media['display_url']) - if not replaced.get(media['url']): - tweet_text = tweet_text.replace(media['url'], replacement) - replaced[media['url']] = True - entities += "
" % media['media_url_https'] - categories.add('photo') - if media['type'] == 'video' or media['type'] == 'animated_gif': - if media.get('url') and media['url'] in tweet_text: - tweet_title = tweet_title.replace(media['url'], media['display_url']) - replacement = "%s" % (media['expanded_url'], media['display_url']) - if not replaced.get(media['url']): - tweet_text = tweet_text.replace(media['url'], replacement) - replaced[media['url']] = True + if "media_url_https" not in media: + continue + if media["type"] == "photo": + if media.get("url") and media["url"] in tweet_text: + tweet_title = tweet_title.replace(media["url"], media["display_url"]) + replacement = '%s' % (media["expanded_url"], media["display_url"]) + if not replaced.get(media["url"]): + tweet_text = tweet_text.replace(media["url"], replacement) + replaced[media["url"]] = True + entities += '
' % media["media_url_https"] + categories.add("photo") + if media["type"] == "video" or media["type"] == "animated_gif": + if media.get("url") and media["url"] in tweet_text: + tweet_title = tweet_title.replace(media["url"], media["display_url"]) + replacement = '%s' % (media["expanded_url"], media["display_url"]) + if not replaced.get(media["url"]): + tweet_text = tweet_text.replace(media["url"], replacement) + replaced[media["url"]] = True bitrate = 0 chosen_variant = None - for variant in media['video_info']['variants']: + for variant in media["video_info"]["variants"]: if not chosen_variant: chosen_variant = variant - if variant.get('bitrate', 0) > bitrate: - bitrate = variant['bitrate'] + if variant.get("bitrate", 0) > bitrate: + bitrate = variant["bitrate"] chosen_variant = variant if chosen_variant: - entities += "