Esempio n. 1
0
    def test_distribute_suggested(self):
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "enhancedImageURI": "data:image/png;base64,somemoredata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF",
                "frecent_sites": ['http://xyz.com', 'http://abc.com']
            }
        ]

        tiles_ca = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Other Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({
            "STAR/en-US": tiles_star,
            "CA/en-US": tiles_ca,
        }, self.channels[0].id)
        distribute(data, self.channels[0].id, True)

        # in this case, the 3rd element should be the mock of the s3 upload for the 'ag' index
        frecents = json.loads(self.key_mock.set_contents_from_string.mock_calls[3][1][0])['suggested'][0]['frecent_sites']
        assert_equal(frecents, ['http://abc.com', 'http://xyz.com'])
Esempio n. 2
0
    def test_distribute(self):
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "enhancedImageURI": "data:image/png;base64,somemoredata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        tiles_ca = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        # 6 files are uploaded, mirrors generate artifacts
        assert_equal(6, self.key_mock.set_contents_from_string.call_count)

        self.key_mock.set_contents_from_string = Mock()
        data = ingest_links({
            "STAR/en-US": tiles_star,
            "CA/en-US": tiles_ca,
        }, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        #  includes two more upload: the locate data payload (for both versions)
        assert_equal(8, self.key_mock.set_contents_from_string.call_count)
Esempio n. 3
0
    def test_distribute_adgroup_explanation(self):
        tile_en_us = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere_else.com",
            "title": "Some Title US",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "adgroup_name": "Teçhnology".decode('utf-8'),
            "explanation": "推荐 for %1$S fans who also like %2$S".decode('utf-8')
        }

        tiles_en_us_suggested = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title US Suggested",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frecent_sites": ['http://xyz.com', 'http://abc.com'],
            "adgroup_name": "Technology",
            "explanation": "Suggested for %1$S fans who also like %2$S"
        }

        distribution = {
            "US/en-US": [tile_en_us, tiles_en_us_suggested],
            "GB/en-US": [tile_en_us],
        }

        data = ingest_links(distribution, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        # one image, 2 AG distributions, 2 legacy distributions, one index, one input distribution
        assert_equal(7, self.key_mock.set_contents_from_string.call_count)

        num_tiles_checked = 0
        for i, key in enumerate(self.key_names):
            ag = AG_DIST_PATHNAME.match(key)
            leg = LEGACY_DIST_PATHNAME.match(key)
            if ag:
                country_locale, locale = ag.groups()
                data = json.loads(self.key_contents[i])
                for tile in data['directory']:
                    # index 0 expected, only for US/en-US
                    assert_equal(distribution[country_locale][0]['adgroup_name'], tile.get('adgroup_name'))
                    assert_equal(distribution[country_locale][0]['explanation'], tile.get('explanation'))
                    num_tiles_checked += 1
                for tile in data['suggested']:
                    # index 1 expected, only for US/en-US
                    assert_equal(distribution[country_locale][1]['adgroup_name'], tile.get('adgroup_name'))
                    assert_equal(distribution[country_locale][1]['explanation'], tile.get('explanation'))
                    num_tiles_checked += 1

            elif leg:
                country_locale, locale = leg.groups()
                data = json.loads(self.key_contents[i])
                assert_equal(1, len(data[locale]))
                tile = data[locale][0]
                assert_equal(None, tile.get('adgroup_name'))
                assert_equal(None, tile.get('explanation'))
                num_tiles_checked += 1

        assert_equal(5, num_tiles_checked)
Esempio n. 4
0
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args,
                     **kwargs):
    """
    Find scheduled distributions and deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    elif quiet:
        logger = setup_command_logger(logging.ERROR)
    else:
        logger = setup_command_logger(logging.INFO)

    from splice.queries import get_scheduled_distributions, unschedule_distribution
    import requests

    dt = datetime.utcnow()
    distributions = get_scheduled_distributions(leniency, dt)
    logger.info("{0} - found {1} distributions".format(dt, len(distributions)))

    dist_data = []
    for dist in distributions:
        logger.info("fetching {0}".format(dist.url))
        r = requests.get(dist.url)
        if r.status_code == 200:
            dist_data.append((r.json(), dist.channel_id, dist.id))
        else:
            logger.error("FETCH_ERROR status_code:{0} url:{1}".format(
                r.status_code, dist.url))

    from splice.ingest import ingest_links, distribute, IngestError

    if deploy_flag:
        for rawdata, channel_id, dist_id in dist_data:
            try:
                new_data = ingest_links(rawdata, channel_id)

                if console_out:
                    print json.dumps(new_data, sort_keys=True, indent=2)

                distribute(new_data, channel_id, deploy_flag)
                unschedule_distribution(dist_id)
            except IngestError, e:
                raise InvalidCommand(e.message)
            except:
Esempio n. 5
0
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args, **kwargs):
    """
    Find scheduled distributions and deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    elif quiet:
        logger = setup_command_logger(logging.ERROR)
    else:
        logger = setup_command_logger(logging.INFO)

    from splice.queries import get_scheduled_distributions, unschedule_distribution
    import requests

    dt = datetime.utcnow()
    distributions = get_scheduled_distributions(leniency, dt)
    logger.info("{0} - found {1} distributions".format(dt, len(distributions)))

    dist_data = []
    for dist in distributions:
        logger.info("fetching {0}".format(dist.url))
        r = requests.get(dist.url)
        if r.status_code == 200:
            dist_data.append((r.json(), dist.channel_id, dist.id))
        else:
            logger.error("FETCH_ERROR status_code:{0} url:{1}".format(r.status_code, dist.url))

    from splice.ingest import ingest_links, distribute, IngestError

    if deploy_flag:
        for rawdata, channel_id, dist_id in dist_data:
            try:
                new_data = ingest_links(rawdata, channel_id)

                if console_out:
                    print json.dumps(new_data, sort_keys=True, indent=2)

                distribute(new_data, channel_id, deploy_flag)
                unschedule_distribution(dist_id)
            except IngestError, e:
                raise InvalidCommand(e.message)
            except:
Esempio n. 6
0
def ingest_tiles(in_file, channel_id, out_path, console_out, deploy_flag,
                 verbose, *args, **kwargs):
    """
    Load a set of links for all country/locale combinations into data warehouse and optionally deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, distribute, IngestError

    try:
        new_data = ingest_links(rawdata, channel_id)

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

        if deploy_flag:
            logger.info("Distributing AND Deploying data")
        else:
            logger.info("Distributing data (NO deploy)")

        distribute(new_data, channel_id, deploy_flag)
    except IngestError, e:
        raise InvalidCommand(e.message)
Esempio n. 7
0
def ingest_tiles(in_file, channel_id, out_path, console_out, deploy_flag, verbose, *args, **kwargs):
    """
    Load a set of links for all country/locale combinations into data warehouse and optionally deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, distribute, IngestError

    try:
        new_data = ingest_links(rawdata, channel_id)

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

        if deploy_flag:
            logger.info("Distributing AND Deploying data")
        else:
            logger.info("Distributing data (NO deploy)")

        distribute(new_data, channel_id, deploy_flag)
    except IngestError, e:
        raise InvalidCommand(e.message)
Esempio n. 8
0
    def test_deploy_always_generates_tile_index(self):
        """A tiles index file should always be generated"""

        # this is a dict, because of a quirk in python's namespacing/scoping
        # https://docs.python.org/2/tutorial/classes.html#python-scopes-and-namespaces
        index_uploaded = {'count': 0}

        def key_set_name(name):
            if name == "{0}_tile_index.v3.json".format(self.channels[0].name):
                index_uploaded['count'] += 1
        name_mock = PropertyMock(side_effect=key_set_name)
        type(self.key_mock).name = name_mock

        with open(self.get_fixture_path("mozilla-tiles.fennec.json"), 'r') as f:
            tiles = json.load(f)

        data = ingest_links(tiles, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        assert_equal(1, index_uploaded['count'])

        data = ingest_links(tiles, self.channels[0].id)
        distribute(data, self.channels[0].id, True)

        assert_equal(2, index_uploaded['count'])
Esempio n. 9
0
def all_tiles():
    deploy_flag = request.args.get('deploy')
    channel_id = request.args.get('channelId')
    scheduled_ts = request.args.get('scheduledTS')
    scheduled_dt = None

    deploy = deploy_flag == '1'

    if channel_id is None:
        msg = "channel_id not provided"
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 400

    try:
        data = request.get_json(force=True)
        new_data = ingest_links(data, channel_id)
        if scheduled_ts:
            # scheduled_ts assumed to be in seconds
            scheduled_dt = datetime.utcfromtimestamp(int(scheduled_ts))
        urls = distribute(new_data, channel_id, deploy, scheduled_dt)
    except NoResultFound, e:
        msg = "channel_id {0} does not exist".format(channel_id)
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 404
Esempio n. 10
0
def all_tiles():
    deploy_flag = request.args.get('deploy')
    channel_id = request.args.get('channelId')
    scheduled_ts = request.args.get('scheduledTS')
    scheduled_dt = None

    deploy = deploy_flag == '1'

    if channel_id is None:
        msg = "channel_id not provided"
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 400

    try:
        data = request.get_json(force=True)
        new_data = ingest_links(data, channel_id)
        if scheduled_ts:
            # scheduled_ts assumed to be in seconds
            scheduled_dt = datetime.utcfromtimestamp(int(scheduled_ts))
        urls = distribute(new_data, channel_id, deploy, scheduled_dt)
    except NoResultFound, e:
        msg = "channel_id {0} does not exist".format(channel_id)
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 404
Esempio n. 11
0
    def test_distribute_frequency_cap(self):
        """
        Tests if frequency cap makes it in distributions
        """
        tile_en_gb = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title CA",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frequency_caps": {
                "daily": 3,
                "total": 10
            }
        }

        tile_en_us = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere_else.com",
            "title": "Some Title US",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frequency_caps": {
                "daily": 5,
                "total": 15
            }
        }

        tiles_en_us_suggested = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title US Suggested",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frecent_sites": ['http://xyz.com', 'http://abc.com'],
            "frequency_caps": {
                "daily": 7,
                "total": 20
            }
        }

        distribution = {
            "US/en-US": [tile_en_us, tiles_en_us_suggested],
            "GB/en-US": [tile_en_us],
            "GB/en-GB": [tile_en_gb]
        }

        data = ingest_links(distribution, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        # one image, 3 AG distributions, 3 legacy distributions, one index, one input distribution
        assert_equal(9, self.key_mock.set_contents_from_string.call_count)

        num_tiles_checked = 0
        for i, key in enumerate(self.key_names):
            ag = AG_DIST_PATHNAME.match(key)
            leg = LEGACY_DIST_PATHNAME.match(key)
            if ag:
                country_locale, locale = ag.groups()
                data = json.loads(self.key_contents[i])
                for tile in data['directory']:
                    # index 0 expected, only for US/en-US
                    assert_equal(distribution[country_locale][0]['frequency_caps'], tile.get('frequency_caps'))
                    num_tiles_checked += 1
                for tile in data['suggested']:
                    # index 1 expected, only for US/en-US
                    assert_equal(distribution[country_locale][1]['frequency_caps'], tile.get('frequency_caps'))
                    num_tiles_checked += 1

            elif leg:
                country_locale, locale = leg.groups()
                data = json.loads(self.key_contents[i])
                assert_equal(1, len(data[locale]))
                tile = data[locale][0]
                assert_equal(None, tile.get('frequency_caps'))
                num_tiles_checked += 1

        assert_equal(7, num_tiles_checked)