2to3 the rest of utils/

This commit is contained in:
Jonathan Math 2020-06-19 02:29:40 -04:00
parent 3a0a2c28b8
commit 2e8deade58
10 changed files with 32 additions and 32 deletions

View file

@ -8,6 +8,6 @@ while True:
req = requests.get(url)
content = req.content
end = time.time()
print(" ---> [%s] Retrieved %s bytes - %s %s" % (str(end - start)[:4], len(content), req.status_code, req.reason))
print((" ---> [%s] Retrieved %s bytes - %s %s" % (str(end - start)[:4], len(content), req.status_code, req.reason)))
time.sleep(5)

View file

@ -78,7 +78,7 @@ def find_entries():
entries = []
while True:
print " ---> Found %s entries so far. Now on page: %s" % (len(entries), page)
print(" ---> Found %s entries so far. Now on page: %s" % (len(entries), page))
knight_url = "http://newschallenge.tumblr.com/page/%s" % (page)
html = requests.get(knight_url).content
@ -135,16 +135,16 @@ def find_entries():
for i, entry in enumerate(entries):
is_winner = entry['url'] in winners
if is_winner: winner_count += 1
print " * %s#%s: %s likes - [%s](%s)%s" % (
print(" * %s#%s: %s likes - [%s](%s)%s" % (
"**" if is_winner else "",
i + 1,
entry['likes'], entry['title'],
entry['url'],
"**" if is_winner else "")
"**" if is_winner else ""))
found_entries.append(entry)
print " ***> Found %s active entries among %s total applications with %s/%s winners." % (
active_entry_count, total_entry_count, winner_count, len(winners))
print(" ***> Found %s active entries among %s total applications with %s/%s winners." % (
active_entry_count, total_entry_count, winner_count, len(winners)))
return found_entries
if __name__ == '__main__':

View file

@ -7,7 +7,7 @@ from settings import CACHE_BACKEND
verbose = False
if not CACHE_BACKEND.startswith( 'memcached://' ):
print "you are not configured to use memcched as your django cache backend"
print("you are not configured to use memcched as your django cache backend")
else:
m = re.search( r'//(.+:\d+)', CACHE_BACKEND )
cache_host = m.group(1)
@ -24,7 +24,7 @@ else:
while l.find( 'END' ) < 0 :
l = h.readline()
if verbose:
print l
print(l)
m = pat.match( l )
if m :
stats[ m.group(1) ] = m.group(2)
@ -33,15 +33,15 @@ else:
h.close_socket()
if verbose:
print stats
print(stats)
items = int( stats[ 'curr_items' ] )
bytes = int( stats[ 'bytes' ] )
limit_maxbytes = int( stats[ 'limit_maxbytes' ] ) or bytes
current_conns = int( stats[ 'curr_connections' ] )
print "MemCache status for %s" % ( CACHE_BACKEND )
print "%d items using %d of %d" % ( items, bytes, limit_maxbytes )
print "%5.2f%% full" % ( 100.0 * bytes / limit_maxbytes )
print "%d connections being handled" % ( current_conns )
print
print("MemCache status for %s" % ( CACHE_BACKEND ))
print("%d items using %d of %d" % ( items, bytes, limit_maxbytes ))
print("%5.2f%% full" % ( 100.0 * bytes / limit_maxbytes ))
print("%d connections being handled" % ( current_conns ))
print()

View file

@ -24,17 +24,17 @@ os.mkdir(dir_name)
for collection in collections:
cmd = 'mongodump --db %s --collection %s -o %s' % (db_name, collection, dir_name)
print "Dumping %s: %s" % (collection, cmd)
print("Dumping %s: %s" % (collection, cmd))
os.system(cmd)
print "Compressing %s..." % filename
print("Compressing %s..." % filename)
cmd = 'tar -zcf %s %s' % (filename, dir_name)
os.system(cmd)
print 'Uploading %s to S3...' % filename
print('Uploading %s to S3...' % filename)
try:
s3.save_file_in_s3(filename, name="mongo/%s" % (filename))
except Exception, e:
print " ****> Exceptions: %s" % e
except Exception as e:
print(" ****> Exceptions: %s" % e)
shutil.rmtree(dir_name)
os.remove(filename)

View file

@ -16,9 +16,9 @@ db_pass = settings.DATABASES['default']['PASSWORD']
os.environ['PGPASSWORD'] = db_pass
filename = 'backup_postgresql_%s.sql.gz' % time.strftime('%Y-%m-%d-%H-%M')
cmd = '/usr/lib/postgresql/9.4/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc %s > %s' % (db_name, filename)
print 'Backing up PostgreSQL: %s' % cmd
print('Backing up PostgreSQL: %s' % cmd)
os.system(cmd)
print 'Uploading %s to S3...' % filename
print('Uploading %s to S3...' % filename)
s3.save_file_in_s3(filename, name="postgres/%s" % filename)
os.remove(filename)

View file

@ -12,5 +12,5 @@ from django.conf import settings
filename = 'backup_redis/backup_redis_%s.rdb.gz' % time.strftime('%Y-%m-%d-%H-%M')
path = '/var/lib/redis/dump.rdb'
print 'Uploading %s (from %s) to S3...' % (filename, path)
print('Uploading %s (from %s) to S3...' % (filename, path))
s3.save_file_in_s3(path, name=filename)

View file

@ -12,5 +12,5 @@ from django.conf import settings
filename = 'backup_redis_sessions/backup_redis_sessions_%s.rdb.gz' % time.strftime('%Y-%m-%d-%H-%M')
path = '/var/lib/redis/dump.rdb'
print 'Uploading %s (from %s) to S3...' % (filename, path)
print('Uploading %s (from %s) to S3...' % (filename, path))
s3.save_file_in_s3(path, name=filename)

View file

@ -12,5 +12,5 @@ from django.conf import settings
filename = 'redis_story/backup_redis_story_%s.rdb.gz' % time.strftime('%Y-%m-%d-%H-%M')
path = '/var/lib/redis/dump.rdb'
print 'Uploading %s (from %s) to S3...' % (filename, path)
print('Uploading %s (from %s) to S3...' % (filename, path))
s3.save_file_in_s3(path, name=filename)

View file

@ -35,9 +35,9 @@ for collection in collections:
for item in items:
if item.get('_id') != latest_item_id:
latest_item_id = item['_id']
print " ---> Inserted %s items in %s (at: %s) (%2s%%)" % (
print(" ---> Inserted %s items in %s (at: %s) (%2s%%)" % (
i, collection, item['_id'], (round(i/float(total), 4)*100)
)
))
sys.stdout.flush()
db02.newsblur[collection].insert(item)
i += 1
@ -57,7 +57,7 @@ for f, feed in enumerate(feeds):
total_inserted += 1
feed_inserted += 1
if feed_inserted:
print " ---> Inserted %s items (total: %s) in stories (at: %s -- %s/month) (%2s%%)" % (
print(" ---> Inserted %s items (total: %s) in stories (at: %s -- %s/month) (%2s%%)" % (
feed_inserted, total_inserted, latest_feed_id, feed.average_stories_per_month, (round(f/float(feed_count), 4)*100)
)
))
sys.stdout.flush()

View file

@ -35,7 +35,7 @@ def list_backup_in_s3():
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "[%s] %s" % (i, key.name)
print("[%s] %s" % (i, key.name))
def delete_all_backups():
#FIXME: validate filename exists
@ -43,13 +43,13 @@ def delete_all_backups():
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "deleting %s" % (key.name)
print("deleting %s" % (key.name))
key.delete()
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
print('Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0]))
else:
if sys.argv[1] == 'set':
save_file_in_s3(sys.argv[2])
@ -60,4 +60,4 @@ if __name__ == '__main__':
elif sys.argv[1] == 'delete':
delete_all_backups()
else:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
print('Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0]))