Example #1
0
def sharded_iterator(start=None, stop=None, test=False, all=False):
    if not stop:
        stop = datetime.datetime.utcnow()
    # Support for timedelta stop time
    if isinstance(stop, td):
        stop = datetime.datetime.utcnow() - stop

    if not start:
        if all:
            assert not test
            start = datetime.datetime(year=2011, month=9, day=15)
        else:
            start = stop - (td(7) if not test else td(minutes=120))

    try:
        s3_conn = ec2.s3_connection()
    except boto.exception.NoAuthHandlerFound, e:
        print e
        exit("You need a ~/aws.json file. Ask Timothy for it.")
def sharded_iterator(start=None, stop=None, test=False, all=False):
    if not stop:
        stop = datetime.datetime.utcnow()
    # Support for timedelta stop time
    if isinstance(stop, td):
        stop = datetime.datetime.utcnow() - stop

    if not start:
        if all:
            assert not test
            start = datetime.datetime(year=2011, month=9, day=15)
        else:
            start = stop - (td(7) if not test else td(minutes=120))

    try:
        s3_conn = ec2.s3_connection()
    except boto.exception.NoAuthHandlerFound, e:
        print e
        exit("You need a ~/aws.json file. Ask Timothy for it.")
def get_data_from_key(key_name):
    s3_conn = ec2.s3_connection()
    bucket = s3_conn.get_bucket(settings.FACT_BUCKET)
    key = Key(bucket)
    key.name = key_name

    cache_filename = os.path.join('/var/canvas/analytics/cache/', key.name)
    if os.path.exists(cache_filename):
        gzip_data = file(cache_filename, 'rb').read()
    else:
        try:
            os.makedirs(os.path.dirname(cache_filename))
        except OSError:
            pass
        gzip_data = key.get_contents_as_string()
        file(cache_filename, 'wb').write(gzip_data)

    try:
        return nginx_unescape(gzip.GzipFile(fileobj=cStringIO.StringIO(gzip_data)).read())
    except IOError:
        # Bad JSON / file
        return ""
Example #4
0
def get_data_from_key(key_name):
    s3_conn = ec2.s3_connection()
    bucket = s3_conn.get_bucket(settings.FACT_BUCKET)
    key = Key(bucket)
    key.name = key_name

    cache_filename = os.path.join('/var/canvas/analytics/cache/', key.name)
    if os.path.exists(cache_filename):
        gzip_data = file(cache_filename, 'rb').read()
    else:
        try:
            os.makedirs(os.path.dirname(cache_filename))
        except OSError:
            pass
        gzip_data = key.get_contents_as_string()
        file(cache_filename, 'wb').write(gzip_data)

    try:
        return nginx_unescape(
            gzip.GzipFile(fileobj=cStringIO.StringIO(gzip_data)).read())
    except IOError:
        # Bad JSON / file
        return ""