Exemple #1
0
 def insert_distro(self, dt=None):
     if dt is None:
         dt = datetime.utcnow() + timedelta(hours=5)
     elif dt == -1:
         dt = None
     insert_distribution('http://some_url', 1, False, dt)
     return dt
 def insert_distro(self, channel_id=1, deployed=False, dt=None):
     if dt is None:
         dt = datetime.utcnow() + timedelta(hours=5)
     elif dt == -1:
         dt = None
     insert_distribution('http://some_url', channel_id, deployed, dt)
     return dt
Exemple #3
0
 def insert_distro(self, channel_id=1, deployed=False, dt=None):
     if dt is None:
         dt = datetime.utcnow() + timedelta(hours=5)
     elif dt == -1:
         dt = None
     insert_distribution('http://some_url', channel_id, deployed, dt)
     return dt
Exemple #4
0
def distribute(data, channel_id, deploy, scheduled_dt=None):
    """Upload tile data to S3
    :data: tile data
    :channel_id: channel id for which to distribute tile data
    :deploy: whether to deploy tiles to firefox immediately
    :scheduled_dt: an optional scheduled date in the future for deploy. overrides deploy
    """
    command_logger.info("Generating Data")

    from splice.models import Channel
    from splice.environment import Environment

    env = Environment.instance()

    if scheduled_dt:
        now = datetime.utcnow()
        if now > scheduled_dt:
            raise ScheduleError("scheduled date needs to be in the future")
        elif deploy:
            raise ScheduleError("cannot specify deploy and schedule at the same time")

    channel = (
        env.db.session
        .query(Channel)
        .filter(Channel.id == channel_id)
        .one())

    artifacts = generate_artifacts(data, channel.name, deploy)

    command_logger.info("Uploading to S3 for channel {0}".format(channel.name))

    bucket = Environment.instance().s3.get_bucket(Environment.instance().config.S3["bucket"])
    cors = CORSConfiguration()
    cors.add_rule("GET", "*", allowed_header="*")
    bucket.set_cors(cors)

    distributed = []

    headers = {
        'Cache-Control': 'public, max-age=31536000',
        'Content-Disposition': 'inline',
    }

    # upload individual files
    for file in artifacts:
        if "mime" in file:
            headers['Content-Type'] = file["mime"]
        else:
            # default to JSON for artifacts
            headers['Content-Type'] = "application/json"

        key = bucket.get_key(file["key"])
        uploaded = False

        if key is None or file.get("force_upload"):
            key = Key(bucket)
            key.name = file["key"]
            key.set_contents_from_string(file["data"], headers=headers)
            key.set_acl("public-read")
            uploaded = True

        url = key.generate_url(expires_in=0, query_auth=False)

        # remove x-amz-security-token, which is inserted even if query_auth=False
        # ref: https://github.com/boto/boto/issues/1477
        uri = furl(url)
        try:
            uri.args.pop('x-amz-security-token')
        except:
            pass
        url = uri.url

        if uploaded:
            command_logger.info("UPLOADED {0}".format(url))
        else:
            command_logger.info("SKIPPED {0}".format(url))
        distributed.append([url, uploaded])

        if file.get("dist", False):
            insert_distribution(url, channel_id, deploy, scheduled_dt)

    return distributed
Exemple #5
0
def distribute(data, channel_id, deploy, scheduled_dt=None):
    """Upload tile data to S3
    :data: tile data
    :channel_id: channel id for which to distribute tile data
    :deploy: whether to deploy tiles to firefox immediately
    :scheduled_dt: an optional scheduled date in the future for deploy. overrides deploy
    """
    command_logger.info("Generating Data")

    from splice.models import Channel
    from splice.environment import Environment

    env = Environment.instance()

    if scheduled_dt:
        now = datetime.utcnow()
        if now > scheduled_dt:
            raise ScheduleError("scheduled date needs to be in the future")
        elif deploy:
            raise ScheduleError(
                "cannot specify deploy and schedule at the same time")

    channel = (env.db.session.query(Channel).filter(
        Channel.id == channel_id).one())

    artifacts = generate_artifacts(data, channel.name, deploy)

    command_logger.info("Uploading to S3 for channel {0}".format(channel.name))

    bucket = Environment.instance().s3.get_bucket(
        Environment.instance().config.S3["bucket"])
    cors = CORSConfiguration()
    cors.add_rule("GET", "*", allowed_header="*")
    bucket.set_cors(cors)

    distributed = []

    headers = {
        'Cache-Control': 'public, max-age=31536000',
        'Content-Disposition': 'inline',
    }

    # upload individual files
    for file in artifacts:
        if "mime" in file:
            headers['Content-Type'] = file["mime"]
        else:
            # default to JSON for artifacts
            headers['Content-Type'] = "application/json"

        key = bucket.get_key(file["key"])
        uploaded = False

        if key is None or file.get("force_upload"):
            key = Key(bucket)
            key.name = file["key"]
            key.set_contents_from_string(file["data"], headers=headers)
            key.set_acl("public-read")
            uploaded = True

        url = key.generate_url(expires_in=0, query_auth=False)

        # remove x-amz-security-token, which is inserted even if query_auth=False
        # ref: https://github.com/boto/boto/issues/1477
        uri = furl(url)
        try:
            uri.args.pop('x-amz-security-token')
        except:
            pass
        url = uri.url

        if uploaded:
            command_logger.info("UPLOADED {0}".format(url))
        else:
            command_logger.info("SKIPPED {0}".format(url))
        distributed.append([url, uploaded])

        if file.get("dist", False):
            insert_distribution(url, channel_id, deploy, scheduled_dt)

    return distributed