NewsBlur/settings.py

528 lines
14 KiB
Python
Raw Normal View History

import sys
import logging
import os
import datetime
import redis
import boto
import raven
import redis
from mongoengine import connect
from vendor.dynamodb_mapper.model import ConnectionBorg
2012-09-18 17:09:07 -07:00
from boto.s3.connection import S3Connection
2011-11-07 08:48:00 -08:00
from utils import jammit
# ===================
# = Server Settings =
# ===================
ADMINS = (
('Samuel Clay', 'samuel@newsblur.com'),
)
SERVER_NAME = 'local'
SERVER_EMAIL = 'server@newsblur.com'
HELLO_EMAIL = 'hello@newsblur.com'
NEWSBLUR_URL = 'http://www.newsblur.com'
# ===========================
# = Directory Declaractions =
# ===========================
CURRENT_DIR = os.path.dirname(__file__)
2011-04-11 09:30:39 -04:00
NEWSBLUR_DIR = CURRENT_DIR
TEMPLATE_DIRS = (os.path.join(CURRENT_DIR, 'templates'),)
MEDIA_ROOT = os.path.join(CURRENT_DIR, 'media')
STATIC_ROOT = os.path.join(CURRENT_DIR, 'static')
UTILS_ROOT = os.path.join(CURRENT_DIR, 'utils')
VENDOR_ROOT = os.path.join(CURRENT_DIR, 'vendor')
LOG_FILE = os.path.join(CURRENT_DIR, 'logs/newsblur.log')
IMAGE_MASK = os.path.join(CURRENT_DIR, 'media/img/mask.png')
2009-09-08 03:15:49 +00:00
# ==============
# = PYTHONPATH =
# ==============
if '/utils' not in ' '.join(sys.path):
2011-04-11 21:57:45 -04:00
sys.path.append(UTILS_ROOT)
2011-04-11 21:57:45 -04:00
if '/vendor' not in ' '.join(sys.path):
sys.path.append(VENDOR_ROOT)
# ===================
# = Global Settings =
# ===================
2009-06-16 03:08:55 +00:00
DEBUG = False
TEST_DEBUG = False
SEND_BROKEN_LINK_EMAILS = False
2011-04-11 09:30:39 -04:00
MANAGERS = ADMINS
2010-10-16 18:52:52 -04:00
PAYPAL_RECEIVER_EMAIL = 'samuel@ofbrooklyn.com'
2011-04-11 09:30:39 -04:00
TIME_ZONE = 'GMT'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/reader/login'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
2011-04-11 09:30:39 -04:00
ADMIN_MEDIA_PREFIX = '/media/admin/'
SECRET_KEY = 'YOUR_SECRET_KEY'
EMAIL_BACKEND = 'django_ses.SESBackend'
CIPHER_USERNAMES = False
DEBUG_ASSETS = DEBUG
HOMEPAGE_USERNAME = 'popular'
# ===============
# = Enviornment =
# ===============
PRODUCTION = NEWSBLUR_DIR.find('/home/conesus/newsblur') == 0
STAGING = NEWSBLUR_DIR.find('/home/conesus/staging') == 0
DEVELOPMENT = NEWSBLUR_DIR.find('/Users/') == 0
2011-04-11 09:30:39 -04:00
# ===========================
# = Django-specific Modules =
# ===========================
2009-06-16 03:08:55 +00:00
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
2009-06-16 03:08:55 +00:00
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
2009-06-16 03:08:55 +00:00
"django.core.context_processors.debug",
"django.core.context_processors.media",
'django.core.context_processors.request',
2009-06-16 03:08:55 +00:00
)
MIDDLEWARE_CLASSES = (
'django.middleware.gzip.GZipMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'apps.profile.middleware.TimingMiddleware',
'apps.profile.middleware.LastSeenMiddleware',
'apps.profile.middleware.SQLLogToConsoleMiddleware',
'subdomains.middleware.SubdomainMiddleware',
'apps.profile.middleware.SimpsonsMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
2009-06-16 03:08:55 +00:00
)
# ===========
# = Logging =
# ===========
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '[%(asctime)-12s] %(message)s',
'datefmt': '%b %d %H:%M:%S'
},
'simple': {
'format': '%(message)s'
},
},
'handlers': {
'null': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'log_file':{
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': LOG_FILE,
'maxBytes': '16777216', # 16megabytes
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'handlers': ['null'],
'level': 'DEBUG',
'propagate': False,
},
'newsblur': {
'handlers': ['console', 'log_file'],
'level': 'DEBUG',
'propagate': False,
},
'apps': {
'handlers': ['log_file'],
'level': 'INFO',
'propagate': True,
},
}
}
# ==========================
# = Miscellaneous Settings =
# ==========================
DAYS_OF_UNREAD = 14
SUBSCRIBER_EXPIRE = 2
AUTH_PROFILE_MODULE = 'newsblur.UserProfile'
TEST_DATABASE_COLLATION = 'utf8_general_ci'
TEST_DATABASE_NAME = 'newsblur_test'
ROOT_URLCONF = 'urls'
INTERNAL_IPS = ('127.0.0.1',)
LOGGING_LOG_SQL = True
APPEND_SLASH = False
SOUTH_TESTS_MIGRATE = False
SESSION_ENGINE = "django.contrib.sessions.backends.db"
TEST_RUNNER = "utils.testrunner.TestRunner"
SESSION_COOKIE_NAME = 'newsblur_sessionid'
SESSION_COOKIE_AGE = 60*60*24*365*2 # 2 years
SESSION_COOKIE_DOMAIN = '.newsblur.com'
2012-10-25 14:18:25 -07:00
SENTRY_DSN = 'https://XXXNEWSBLURXXX@app.getsentry.com/99999999'
2009-06-16 03:08:55 +00:00
# ==============
# = Subdomains =
# ==============
SUBDOMAIN_URLCONFS = {
None: 'urls',
'www': 'urls',
}
REMOVE_WWW_FROM_DOMAIN = True
# ===========
# = Logging =
# ===========
LOG_LEVEL = logging.DEBUG
LOG_TO_STREAM = False
# ===============
# = Django Apps =
# ===============
2009-06-16 03:08:55 +00:00
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django_extensions',
'djcelery',
'django_ses',
2009-06-16 03:08:55 +00:00
'apps.rss_feeds',
'apps.reader',
'apps.analyzer',
'apps.feed_import',
2009-06-16 03:08:55 +00:00
'apps.profile',
'apps.recommendations',
2011-04-15 11:34:41 -04:00
'apps.statistics',
'apps.static',
'apps.mobile',
'apps.push',
'apps.social',
'apps.oauth',
'apps.categories',
2010-06-11 16:55:07 -04:00
'south',
2010-07-06 13:56:32 -04:00
'utils',
2011-04-11 21:57:45 -04:00
'vendor',
'vendor.typogrify',
'vendor.paypal.standard.ipn',
'vendor.zebra',
2009-07-25 02:21:12 +00:00
)
2010-10-16 18:52:52 -04:00
if not DEVELOPMENT:
2010-10-17 01:40:41 +00:00
INSTALLED_APPS += (
2010-10-16 18:52:52 -04:00
'gunicorn',
'raven.contrib.django',
2010-10-17 01:40:41 +00:00
)
# ==========
# = Stripe =
# ==========
STRIPE_SECRET = "YOUR-SECRET-API-KEY"
STRIPE_PUBLISHABLE = "YOUR-PUBLISHABLE-API-KEY"
ZEBRA_ENABLE_APP = True
2010-04-05 00:47:21 -04:00
2010-08-31 18:15:00 -04:00
# ==========
# = Celery =
# ==========
import djcelery
djcelery.setup_loader()
CELERY_ROUTES = {
2012-06-28 12:42:49 -07:00
"work-queue": {
"queue": "work_queue",
"binding_key": "work_queue"
},
"new-feeds": {
"queue": "new_feeds",
"binding_key": "new_feeds"
},
"push-feeds": {
"queue": "push_feeds",
"binding_key": "push_feeds"
},
"update-feeds": {
"queue": "update_feeds",
"binding_key": "update_feeds"
},
"beat-tasks": {
"queue": "beat_tasks",
"binding_key": "beat_tasks"
},
}
CELERY_QUEUES = {
2012-06-28 12:42:49 -07:00
"work_queue": {
"exchange": "work_queue",
"exchange_type": "direct",
"binding_key": "work_queue",
},
"new_feeds": {
"exchange": "new_feeds",
"exchange_type": "direct",
"binding_key": "new_feeds"
},
"push_feeds": {
"exchange": "push_feeds",
"exchange_type": "direct",
"binding_key": "push_feeds"
},
"update_feeds": {
"exchange": "update_feeds",
"exchange_type": "direct",
"binding_key": "update_feeds"
},
"beat_tasks": {
"exchange": "beat_tasks",
"exchange_type": "direct",
"binding_key": "beat_tasks"
},
}
2012-06-28 12:42:49 -07:00
CELERY_DEFAULT_QUEUE = "work_queue"
BROKER_BACKEND = "redis"
2012-05-03 19:09:36 -07:00
BROKER_URL = "redis://db01:6379/0"
2012-06-28 12:42:49 -07:00
CELERY_REDIS_HOST = "db01"
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_IMPORTS = ("apps.rss_feeds.tasks",
"apps.social.tasks",
"apps.reader.tasks",
"apps.feed_import.tasks",)
2011-04-11 09:30:39 -04:00
CELERYD_CONCURRENCY = 4
CELERY_IGNORE_RESULT = True
CELERY_ACKS_LATE = True # Retry if task fails
CELERYD_MAX_TASKS_PER_CHILD = 10
CELERYD_TASK_TIME_LIMIT = 12 * 30
2011-04-11 09:30:39 -04:00
CELERY_DISABLE_RATE_LIMITS = True
SECONDS_TO_DELAY_CELERY_EMAILS = 60
2010-08-31 18:15:00 -04:00
CELERYBEAT_SCHEDULE = {
'freshen-homepage': {
'task': 'freshen-homepage',
'schedule': datetime.timedelta(hours=1),
'options': {'queue': 'beat_tasks'},
},
'task-feeds': {
'task': 'task-feeds',
'schedule': datetime.timedelta(minutes=1),
'options': {'queue': 'beat_tasks'},
},
'collect-stats': {
'task': 'collect-stats',
'schedule': datetime.timedelta(minutes=1),
'options': {'queue': 'beat_tasks'},
},
'collect-feedback': {
'task': 'collect-feedback',
'schedule': datetime.timedelta(minutes=1),
'options': {'queue': 'beat_tasks'},
},
'share-popular-stories': {
'task': 'share-popular-stories',
'schedule': datetime.timedelta(hours=1),
'options': {'queue': 'beat_tasks'},
},
'clean-analytics': {
'task': 'clean-analytics',
'schedule': datetime.timedelta(hours=12),
'options': {'queue': 'beat_tasks'},
},
}
# =========
# = Mongo =
# =========
MONGO_DB = {
'host': '127.0.0.1:27017',
'name': 'newsblur',
}
MONGO_ANALYTICS_DB = {
'host': '127.0.0.1:27017',
'name': 'nbanalytics',
}
2011-02-09 16:28:10 -05:00
# ====================
# = Database Routers =
# ====================
class MasterSlaveRouter(object):
"""A router that sets up a simple master/slave configuration"""
2011-02-09 16:28:10 -05:00
def db_for_read(self, model, **hints):
"Point all read operations to a random slave"
2011-02-09 16:34:05 -05:00
return 'slave'
2011-02-09 16:28:10 -05:00
def db_for_write(self, model, **hints):
"Point all write operations to the master"
2011-02-09 16:34:05 -05:00
return 'default'
2011-02-09 16:28:10 -05:00
def allow_relation(self, obj1, obj2, **hints):
"Allow any relation between two objects in the db pool"
2011-02-09 16:34:05 -05:00
db_list = ('slave','default')
2011-02-09 16:28:10 -05:00
if obj1._state.db in db_list and obj2._state.db in db_list:
return True
return None
2011-02-09 16:28:10 -05:00
def allow_syncdb(self, db, model):
"Explicitly put all models on all databases."
return True
# =========
# = Redis =
# =========
REDIS = {
2012-05-03 19:09:36 -07:00
'host': 'db01',
}
# ===============
# = Social APIs =
# ===============
FACEBOOK_APP_ID = '111111111111111'
FACEBOOK_SECRET = '99999999999999999999999999999999'
2012-10-16 13:40:27 -07:00
FACEBOOK_NAMESPACE = 'newsblur'
TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo'
TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
2012-09-18 17:09:07 -07:00
# ===============
# = AWS Backing =
# ===============
BACKED_BY_AWS = {
'pages_on_s3': False,
'icons_on_s3': False,
'stories_on_dynamodb': False,
}
PROXY_S3_PAGES = True
2012-09-20 12:29:52 -07:00
S3_BACKUP_BUCKET = 'newsblur_backups'
S3_PAGES_BUCKET_NAME = 'pages.newsblur.com'
S3_ICONS_BUCKET_NAME = 'icons.newsblur.com'
2010-04-05 00:47:21 -04:00
# ==================
# = Configurations =
# ==================
2010-10-16 18:52:52 -04:00
try:
from gunicorn_conf import *
except ImportError, e:
pass
2010-04-05 00:47:21 -04:00
from local_settings import *
COMPRESS = not DEBUG
TEMPLATE_DEBUG = DEBUG
2010-04-09 18:30:25 -04:00
ACCOUNT_ACTIVATION_DAYS = 30
AWS_ACCESS_KEY_ID = S3_ACCESS_KEY
AWS_SECRET_ACCESS_KEY = S3_SECRET
os.environ["AWS_ACCESS_KEY_ID"] = AWS_ACCESS_KEY_ID
os.environ["AWS_SECRET_ACCESS_KEY"] = AWS_SECRET_ACCESS_KEY
def custom_show_toolbar(request):
return DEBUG
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': True,
'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
'HIDE_DJANGO_SQL': False,
}
2012-10-25 16:23:35 -07:00
RAVEN_CLIENT = raven.Client(SENTRY_DSN)
# =========
# = Mongo =
# =========
MONGO_DB_DEFAULTS = {
'name': 'newsblur',
'host': 'db02:27017',
'alias': 'default',
}
MONGO_DB = dict(MONGO_DB_DEFAULTS, **MONGO_DB)
# if MONGO_DB.get('read_preference', pymongo.ReadPreference.PRIMARY) != pymongo.ReadPreference.PRIMARY:
# MONGO_PRIMARY_DB = MONGO_DB.copy()
# MONGO_PRIMARY_DB.update(read_preference=pymongo.ReadPreference.PRIMARY)
# MONGOPRIMARYDB = connect(MONGO_PRIMARY_DB.pop('name'), **MONGO_PRIMARY_DB)
# else:
# MONGOPRIMARYDB = MONGODB
MONGODB = connect(MONGO_DB.pop('name'), **MONGO_DB)
MONGO_ANALYTICS_DB_DEFAULTS = {
'name': 'nbanalytics',
'host': 'db02:27017',
'alias': 'nbanalytics',
}
MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB)
MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'), **MONGO_ANALYTICS_DB)
# =========
# = Redis =
# =========
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
2012-07-15 22:51:27 -07:00
REDIS_STORY_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=1)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
2011-11-07 08:48:00 -08:00
JAMMIT = jammit.JammitAssets(NEWSBLUR_DIR)
2012-04-06 18:28:26 -07:00
if DEBUG:
MIDDLEWARE_CLASSES += ('utils.mongo_raw_log_middleware.SqldumpMiddleware',)
MIDDLEWARE_CLASSES += ('utils.redis_raw_log_middleware.SqldumpMiddleware',)
MIDDLEWARE_CLASSES += ('utils.request_introspection_middleware.DumpRequestMiddleware',)
MIDDLEWARE_CLASSES += ('utils.exception_middleware.ConsoleExceptionMiddleware',)
2012-09-18 17:09:07 -07:00
# =======
# = AWS =
# =======
S3_CONN = None
if BACKED_BY_AWS.get('pages_on_s3') or BACKED_BY_AWS.get('icons_on_s3'):
S3_CONN = S3Connection(S3_ACCESS_KEY, S3_SECRET)
S3_PAGES_BUCKET = S3_CONN.get_bucket(S3_PAGES_BUCKET_NAME)
S3_ICONS_BUCKET = S3_CONN.get_bucket(S3_ICONS_BUCKET_NAME)
2012-09-20 11:34:36 -07:00
if BACKED_BY_AWS.get('stories_on_dynamodb'):
try:
DDB = ConnectionBorg().get_table('stories')
except boto.exception.DynamoDBResponseError:
DDB = None