Beispiel #1
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option("-v", "--verbose", action="store_true", default=False)
    opt_parser.add_option("-a", "--aws-creds", default=None)
    opts, args = opt_parser.parse_args()
    if not args:
        raise Exception(USAGE)

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_bucket_name = args[0]
    base_paths = args[1:]
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    s3_bucket = Bucket(s3_cxn, s3_bucket_name)
    for base_path in args:
        logger.debug("getting rotated ossec logs in %s", base_path)
        for log in get_logs(base_path):
            if not log.is_archived(s3_bucket):
                log.archive(s3_bucket)
            elif log.expired:
                log.remove()
Beispiel #2
0
def main():
    # command line
    arg_parser = create_arg_parser()
    args = arg_parser.parse_args()

    # logging
    logging.getLogger() if args.spew else logger
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    # connection
    if args.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(args.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()
    logger.debug('creating connection')
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)

    # bucket
    if args.command.func_name == 'upload_command' and args.create_bucket:
        logger.debug('getting/creating bucket %s', args.s3_bucket_name[0])
        s3_bucket = s3_cxn.create_bucket(args.s3_bucket_name[0])
    else:
        logger.debug('getting bucket %s', args.s3_bucket_name[0])
        s3_bucket = s3_cxn.get_bucket(args.s3_bucket_name[0])

    # do it
    args.command(s3_bucket, args)
Beispiel #3
0
def main():
    # command line
    arg_parser = create_arg_parser()
    args = arg_parser.parse_args()

    # logging
    logging.getLogger() if args.spew else logger
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    # connection
    if args.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(args.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()
    logger.debug('creating connection')
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)

    # bucket
    if args.command.func_name == 'upload_command' and args.create_bucket:
        logger.debug('getting/creating bucket %s', args.s3_bucket_name[0])
        s3_bucket = s3_cxn.create_bucket(args.s3_bucket_name[0])
    else:
        logger.debug('getting bucket %s', args.s3_bucket_name[0])
        s3_bucket = s3_cxn.get_bucket(args.s3_bucket_name[0])

    # do it
    args.command(s3_bucket, args)
Beispiel #4
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option('-v',
                          '--verbose',
                          action='store_true',
                          default=False)
    opt_parser.add_option('-a', '--aws-creds', default=None)
    opts, args = opt_parser.parse_args()
    if not args:
        raise Exception(USAGE)

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_bucket_name = args[0]
    base_paths = args[1:]
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    s3_bucket = Bucket(s3_cxn, s3_bucket_name)
    for base_path in args:
        logger.debug('getting rotated ossec logs in %s', base_path)
        for log in get_logs(base_path):
            if not log.is_archived(s3_bucket):
                log.archive(s3_bucket)
            elif log.expired:
                log.remove()
Beispiel #5
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option('-v',
                          '--verbose',
                          action='store_true',
                          default=False)
    opt_parser.add_option('-a', '--aws-creds', default=None)
    opt_parser.add_option('--reap-threshold',
                          default=None,
                          type="int",
                          help='Age in days after which to remove bucket.')
    opt_parser.add_option('--ripe-threshold',
                          default=None,
                          type="int",
                          help='Age in days after which to archive bucket.')
    opts, args = opt_parser.parse_args()
    if not args:
        raise Exception(USAGE)

    if opts.reap_threshold is not None:
        BucketedLog.REAP_THRESHOLD = timedelta(days=opts.reap_threshold)
    if opts.ripe_threshold is not None:
        BucketedLog.RIPE_THRESHOLD = timedelta(days=opts.ripe_threshold)

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_bucket_name = args[0]
    base_paths = args[1:]
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    s3_bucket = Bucket(s3_cxn, s3_bucket_name)
    for base_path in args:
        logger.debug('getting buckets in %s', base_path)
        for log in get_logs(base_path):
            if not log.ripe:
                logger.debug('%s is not ripe, skipping', log.path)
            elif not log.is_archived(s3_bucket):
                log.archive(s3_bucket)
            elif log.expired:
                logger.debug('%s is expired, removing', log.path)
                log.remove()
Beispiel #6
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option('-v',
                          '--verbose',
                          action='store_true',
                          default=False)
    opt_parser.add_option('-d', '--dry', action='store_true', default=False)
    # TODO: if you do more intelligent backup naming this is not needed
    #       e.g. hrmod48, daymod14, year+month, etc.
    opt_parser.add_option('-c',
                          '--capacity-count',
                          type='int',
                          default=24 * 365)
    opt_parser.add_option('--host', default='localhost')
    opt_parser.add_option('-a', '--aws-creds', default=None)
    opts, args = opt_parser.parse_args()
    if args:
        lines = [' '.join(args)]
    else:
        lines = sys.stdin

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    for line in lines:
        parts = line.strip().split()
        if len(parts) == 3:
            s3_bucket_name, db, username = parts
        else:
            raise Exception(USAGE)
        with DumpDB(opts.host, db, username) as dump:
            s3_bucket = Bucket(s3_cxn, s3_bucket_name)
            if os.path.getsize(dump.tmp_path) > MAX_UPLOAD_SIZE:
                archive_in_parts(s3_bucket, dump.tmp_path,
                                 dump.timestamp + '.sql')
            else:
                archive(s3_bucket, dump.tmp_path, dump.timestamp + '.sql')
            reap(s3_bucket, opts.capacity_count, opts.dry)
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option(
        '-v', '--verbose', action='store_true', default=False)
    opt_parser.add_option(
        '-a', '--aws-creds', default=None)
    opt_parser.add_option(
        '--reap-threshold', default=None, type="int",
        help='Age in days after which to remove bucket.')
    opt_parser.add_option(
        '--ripe-threshold', default=None, type="int",
        help='Age in days after which to archive bucket.')
    opts, args = opt_parser.parse_args()
    if not args:
        raise Exception(USAGE)

    if opts.reap_threshold is not None:
        BucketedLog.REAP_THRESHOLD = timedelta(days=opts.reap_threshold)
    if opts.ripe_threshold is not None:
        BucketedLog.RIPE_THRESHOLD = timedelta(days=opts.ripe_threshold)

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_bucket_name = args[0]
    base_paths = args[1:]
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    s3_bucket = Bucket(s3_cxn, s3_bucket_name)
    for base_path in args:
        logger.debug('getting buckets in %s', base_path)
        for log in get_logs(base_path):
            if not log.ripe:
                logger.debug('%s is not ripe, skipping', log.path)
            elif not log.is_archived(s3_bucket):
                log.archive(s3_bucket)
            elif log.expired:
                logger.debug('%s is expired, removing', log.path)
                log.remove()
Beispiel #8
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option(
        '-v', '--verbose', action='store_true', default=False)
    opt_parser.add_option(
        '-d', '--dry', action='store_true', default=False)
    # TODO: if you do more intelligent backup naming this is not needed
    #       e.g. hrmod48, daymod14, year+month, etc.
    opt_parser.add_option(
        '-c', '--capacity-count', type='int', default=24 * 365)
    opt_parser.add_option(
         '--host', default='localhost')
    opt_parser.add_option(
        '-a', '--aws-creds', default=None)
    opts, args = opt_parser.parse_args()
    if args:
        lines = [' '.join(args)]
    else:
        lines = sys.stdin

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    for line in lines:
        parts = line.strip().split()
        if len(parts) == 3:
            s3_bucket_name, db, username = parts
        else:
            raise Exception(USAGE)
        with DumpDB(opts.host, db, username) as dump:
            s3_bucket = Bucket(s3_cxn, s3_bucket_name)
            if os.path.getsize(dump.tmp_path) > MAX_UPLOAD_SIZE:
                archive_in_parts(s3_bucket, dump.tmp_path, dump.timestamp + '.sql')
            else:
                archive(s3_bucket, dump.tmp_path, dump.timestamp + '.sql')
            reap(s3_bucket, opts.capacity_count, opts.dry)