from fabric_rundeck import cron from infra import awscli logger = logging.getLogger(__name__) class ArgumentError(ValueError): pass # archive_bucketed_logs.py \ # balanced.log /mnt/logs/ --aws-creds=/home/deploy/.aws_creds --verbose aws = awscli._AWSCli() @cron('30 0 * * *') @hosts(*find_hosts('log-prod')) @task def archive(s3_bucket_name='balanced.log', paths='/mnt/log/', reap_threshold='30', ripe_threshold='1', aws_credentials=None, verbose=False): """ Archives {host}/{date} bucketed logs to S3. Thresholds are in day units. """
import hashlib import requests from fabric.api import task, run, hosts, local from fabfile.utils import schedule from fabric_rundeck import cron from infra import awscli logger = logging.getLogger(__name__) GEO_DATABASE_URL = "http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz" S3_BUCKET_NAME = "balanced.geoip" aws = awscli._AWSCli(executor=local) def md5sum(filename, blocksize=65536): hash = hashlib.md5() with open(filename, 'r+b') as f: for block in iter(lambda: f.read(blocksize), ''): hash.update(block) return hash.hexdigest() @cron('0 4 7 * *') @hosts('localhost') @task def update(url=GEO_DATABASE_URL, s3_bucket=S3_BUCKET_NAME,