NewsBlur-viq/fabfile.py

110 lines
2.6 KiB
Python
Raw Normal View History

2010-09-08 18:30:33 -07:00
from fabric.api import env, run, require, sudo, settings
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.conf import settings as django_settings
# =========
# = Roles =
# =========
env.user = 'conesus'
env.hosts = ['www.newsblur.com', 'db01.newsblur.com', 'db02.newsblur.com', 'db03.newsblur.com']
env.roledefs ={
'web': ['www.newsblur.com'],
'db': ['db01.newsblur.com'],
'task': ['db02.newsblur.com', 'db03.newsblur.com'],
}
2010-09-08 18:30:33 -07:00
"""
Base configuration
"""
env.project_name = '$(project)'
env.database_password = '$(db_password)'
env.site_media_prefix = "site_media"
env.admin_media_prefix = "admin_media"
env.newsapps_media_prefix = "na_media"
env.path = '/home/conesus/%(project_name)s' % env
2010-09-08 18:30:33 -07:00
env.python = 'python2.6'
2010-09-08 18:30:33 -07:00
"""
Environments
"""
def production():
"""
Work on production environment
"""
env.settings = 'production'
env.hosts = ['$(production_domain)']
env.user = '$(production_user)'
env.s3_bucket = '$(production_s3)'
def staging():
"""
Work on staging environment
"""
env.settings = 'staging'
env.hosts = ['$(staging_domain)']
env.user = '$(staging_user)'
env.s3_bucket = '$(staging_s3)'
2010-09-08 18:30:33 -07:00
"""
Branches
"""
def stable():
"""
Work on stable branch.
"""
env.branch = 'stable'
2010-09-08 18:30:33 -07:00
def master():
"""
Work on development branch.
"""
env.branch = 'master'
2010-09-08 18:30:33 -07:00
def branch(branch_name):
"""
Work on any specified branch.
"""
env.branch = branch_name
# ======
# = S3 =
# ======
2010-09-08 18:30:33 -07:00
ACCESS_KEY = django_settings.S3_ACCESS_KEY
SECRET = django_settings.S3_SECRET
BUCKET_NAME = django_settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first
2010-09-08 18:30:33 -07:00
def save_file_in_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.set_contents_from_filename(filename)
def get_file_from_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
2010-09-08 18:30:33 -07:00
k.get_contents_to_filename(filename)
2010-09-08 18:30:33 -07:00
def list_backup_in_s3():
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
2010-09-08 18:30:33 -07:00
for i, key in enumerate(bucket.get_all_keys()):
print "[%s] %s" % (i, key.name)
2010-09-08 18:30:33 -07:00
def delete_all_backups():
#FIXME: validate filename exists
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
2010-09-08 18:30:33 -07:00
for i, key in enumerate(bucket.get_all_keys()):
print "deleting %s" % (key.name)
key.delete()