Example #1
0
    def test_distribute(self):
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "enhancedImageURI": "data:image/png;base64,somemoredata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        tiles_ca = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        # 6 files are uploaded, mirrors generate artifacts
        assert_equal(6, self.key_mock.set_contents_from_string.call_count)

        self.key_mock.set_contents_from_string = Mock()
        data = ingest_links({
            "STAR/en-US": tiles_star,
            "CA/en-US": tiles_ca,
        }, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        #  includes two more upload: the locate data payload (for both versions)
        assert_equal(8, self.key_mock.set_contents_from_string.call_count)
Example #2
0
 def test_ingest_dbpool(self):
     """
     Test a ingestion of a large number of tiles that could use up connections to the db
     """
     with open(self.get_fixture_path("2014-10-30.ja-pt.json"), 'r') as f:
         tiles = json.load(f)
     ingest_links(tiles, self.channels[0].id)
     num_tiles = self.env.db.session.query(Tile).count()
     assert(num_tiles > 30)
Example #3
0
    def test_ingest_no_duplicates(self):
        """
        Test that there is no duplication when ingesting tiles
        """
        with open(self.get_fixture_path("tiles_duplicates.json"), 'r') as f:
            tiles = json.load(f)

        num_tiles = self.env.db.session.query(Tile).count()
        ingest_links(tiles, self.channels[0].id)
        new_num_tiles = self.env.db.session.query(Tile).count()
        assert_equal(num_tiles + 1, new_num_tiles)
Example #4
0
    def test_error_mid_ingestion(self):
        """
        Test an error happening mid-ingestion
        """
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            },
            {
                "imageURI": "data:image/png;base64,someotherdata",
                "url": "https://somewhereelse.com",
                "title": "Some Other Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            },
        ]
        tile_count_before = self.env.db.session.query(Tile).count()

        import splice.ingest
        insert_function = splice.ingest.insert_tile

        # put counts in a dict to get around python's
        # non-local scope restrictions on variables
        # for access in mock_ingest
        counts = {
            'call': 0,
            'exception_at': 2,
        }

        def mock_ingest(*args, **kwargs):
            counts['call'] += 1
            if counts['call'] < counts['exception_at']:
                return insert_function(*args, **kwargs)
            else:
                raise Exception('Boom')

        function_mock = Mock(side_effect=mock_ingest)
        splice.ingest.insert_tile = function_mock

        ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        tile_count_after = self.env.db.session.query(Tile).count()

        # only one has been inserted out of two
        assert_equal(1, tile_count_after - tile_count_before)

        # put the module function back to what it was
        splice.ingest.insert_tile = insert_function
Example #5
0
    def test_image_artifact_hash(self):
        """
        Test that the correct number of image artifacts are produced
        """
        with open(self.get_fixture_path("valid_tile.json"), 'r') as f:
            fixture = json.load(f)

        tile_1 = fixture["STAR/en-US"][0]

        tile_2 = copy.deepcopy(tile_1)
        tile_2['title'] = 'Some Other Title'

        tile_3 = copy.deepcopy(tile_1)
        tile_3['title'] = 'Yet Another Title'

        tiles = {'STAR/en-US': [tile_1, tile_2, tile_3]}
        data = ingest_links(tiles, self.channels[0].id)
        artifacts = generate_artifacts(data, self.channels[0].name, True)

        # even if there are 3 tiles, there should only be 2 images
        image_count = 0
        for a in artifacts:
            mime = a.get('mime')
            if mime and mime == 'image/png':
                image_count += 1

        assert_equal(2, image_count)
Example #6
0
    def test_distribute_suggested(self):
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "enhancedImageURI": "data:image/png;base64,somemoredata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF",
                "frecent_sites": ['http://xyz.com', 'http://abc.com']
            }
        ]

        tiles_ca = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Other Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({
            "STAR/en-US": tiles_star,
            "CA/en-US": tiles_ca,
        }, self.channels[0].id)
        distribute(data, self.channels[0].id, True)

        # in this case, the 3rd element should be the mock of the s3 upload for the 'ag' index
        frecents = json.loads(self.key_mock.set_contents_from_string.mock_calls[3][1][0])['suggested'][0]['frecent_sites']
        assert_equal(frecents, ['http://abc.com', 'http://xyz.com'])
Example #7
0
    def test_frequency_caps(self):
        """
        A simple test of frequency caps
        """
        tile = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frequency_caps": {
                "daily": 3,
                "total": 10
            }
        }
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        data = ingest_links({"US/en-US": [tile]}, self.channels[0].id)
        assert_equal(1, len(data["US/en-US"]))
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(31, c)

        tile = self.env.db.session.query(Tile).filter(Tile.id == 31).one()
        ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == 31).one()
        assert_equal(tile.adgroup_id, ag.id)
        assert_equal(ag.frequency_cap_daily, 3)
        assert_equal(ag.frequency_cap_total, 10)
Example #8
0
    def test_start_end_dates_optional(self):
        """
        Ensure that start/end dates are optional
        """
        tile_no_start = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "end": "2014-01-31T00:00:00.000"
            }
        }

        tile_no_end = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhereelse.com",
            "title": "Some Other Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "start": "2014-01-12T00:00:00.000",
            }
        }

        tile_empty_limits = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://yetsomewhereelse.com",
            "title": "Yet Some Other Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {}
        }

        dist = {"US/en-US": [tile_no_start, tile_no_end, tile_empty_limits]}
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        data = ingest_links(dist, self.channels[0].id)
        assert_equal(len(dist["US/en-US"]), len(data["US/en-US"]))
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30 + len(dist["US/en-US"]), c)

        tile_tested = 0
        for i, tile_def in enumerate(dist["US/en-US"]):
            obj_id = 30 + 1 + i
            tile = self.env.db.session.query(Tile).filter(Tile.id == obj_id).one()
            ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == obj_id).one()
            assert_equal(tile.adgroup_id, ag.id)

            if ag.start_date:
                assert_equal(ag.start_date, tile_def['time_limits']['start'])
                tile_tested += 1

            if ag.end_date:
                assert_equal(ag.end_date, tile_def['time_limits']['end'])
                tile_tested += 1

        # one tile not tested because it has neither start or end dates
        assert_equal(len(dist["US/en-US"]) - 1, tile_tested)
Example #9
0
    def test_start_end_dates(self):
        """
        a simple start/end date tile
        """
        tile_no_tz = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "start": "2014-01-12T00:00:00.000",
                "end": "2014-01-31T00:00:00.000"
            }
        }

        dist = {"US/en-US": [tile_no_tz]}
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        data = ingest_links(dist, self.channels[0].id)
        assert_equal(1, len(data["US/en-US"]))
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(31, c)

        tile = self.env.db.session.query(Tile).filter(Tile.id == 31).one()
        ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == 31).one()
        assert_equal(tile.adgroup_id, ag.id)

        assert_equal(ag.start_date, dist["US/en-US"][0]['time_limits']['start'])
        assert_equal(ag.end_date, dist["US/en-US"][0]['time_limits']['end'])
        assert_equal(ag.start_date_dt, du_parse(dist["US/en-US"][0]['time_limits']['start']))
        assert_equal(ag.end_date_dt, du_parse(dist["US/en-US"][0]['time_limits']['end']))
Example #10
0
def ingest_tiles(in_file, out_path, console_out, deploy_flag, verbose, *args, **kwargs):
    """
    Load a set of links for all country/locale combinations into data warehouse and optionally deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, deploy, IngestError

    try:
        new_data = ingest_links(rawdata)

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

        if deploy_flag:
            deploy(new_data)
    except IngestError, e:
        raise InvalidCommand(e.message)
Example #11
0
    def test_distribute_adgroup_explanation(self):
        tile_en_us = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere_else.com",
            "title": "Some Title US",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "adgroup_name": "Teçhnology".decode('utf-8'),
            "explanation": "推荐 for %1$S fans who also like %2$S".decode('utf-8')
        }

        tiles_en_us_suggested = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title US Suggested",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "frecent_sites": ['http://xyz.com', 'http://abc.com'],
            "adgroup_name": "Technology",
            "explanation": "Suggested for %1$S fans who also like %2$S"
        }

        distribution = {
            "US/en-US": [tile_en_us, tiles_en_us_suggested],
            "GB/en-US": [tile_en_us],
        }

        data = ingest_links(distribution, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        # one image, 2 AG distributions, 2 legacy distributions, one index, one input distribution
        assert_equal(7, self.key_mock.set_contents_from_string.call_count)

        num_tiles_checked = 0
        for i, key in enumerate(self.key_names):
            ag = AG_DIST_PATHNAME.match(key)
            leg = LEGACY_DIST_PATHNAME.match(key)
            if ag:
                country_locale, locale = ag.groups()
                data = json.loads(self.key_contents[i])
                for tile in data['directory']:
                    # index 0 expected, only for US/en-US
                    assert_equal(distribution[country_locale][0]['adgroup_name'], tile.get('adgroup_name'))
                    assert_equal(distribution[country_locale][0]['explanation'], tile.get('explanation'))
                    num_tiles_checked += 1
                for tile in data['suggested']:
                    # index 1 expected, only for US/en-US
                    assert_equal(distribution[country_locale][1]['adgroup_name'], tile.get('adgroup_name'))
                    assert_equal(distribution[country_locale][1]['explanation'], tile.get('explanation'))
                    num_tiles_checked += 1

            elif leg:
                country_locale, locale = leg.groups()
                data = json.loads(self.key_contents[i])
                assert_equal(1, len(data[locale]))
                tile = data[locale][0]
                assert_equal(None, tile.get('adgroup_name'))
                assert_equal(None, tile.get('explanation'))
                num_tiles_checked += 1

        assert_equal(5, num_tiles_checked)
Example #12
0
    def test_ingest_suggested_sites(self):
        """
        Test that there is no duplication when ingesting tiles
        """
        with open(self.get_fixture_path("tiles_suggested.json"), 'r') as f:
            tiles = json.load(f)

        num_tiles = self.env.db.session.query(Tile).count()
        data = ingest_links(tiles, self.channels[0].id)
        assert_equal(len(data['STAR/en-US']), 5)
        new_num_tiles = self.env.db.session.query(Tile).count()
        assert_equal(num_tiles + 4, new_num_tiles)

        # ingesting the same thing a second time should be idempotent
        data = ingest_links(tiles, self.channels[0].id)
        assert_equal(len(data['STAR/en-US']), 5)
        new_num_tiles = self.env.db.session.query(Tile).count()
        assert_equal(num_tiles + 4, new_num_tiles)
Example #13
0
    def test_explanation_template_sanitization(self):
        # test templates with html tags
        tile = self._make_dist({
            "adgroup_name": "<script>Technology</script>",
            "explanation": "<br/>Suggested for %1$S, %2$S<br/>"})
        ingest_links(tile, self.channels[0].id)
        ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == 31).one()
        assert_equal(ag.name, "Technology")
        assert_equal(ag.explanation, "Suggested for %1$S, %2$S")

        # test templates with tags only and special characters
        tile = self._make_dist({
            "title": "Some Another Title",
            "adgroup_name": "<script><script/>",
            "explanation": "< Suggested for %1$S, %2$S >"})
        ingest_links(tile, self.channels[0].id)
        ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == 32).one()
        assert_equal(ag.name, None)
        assert_equal(ag.explanation, "&lt; Suggested for %1$S, %2$S &gt;")
Example #14
0
    def test_adgroups_channel_id_uniqueness(self):
        """
        Test that channel_ids in adgroups are part of what makes Tiles unique
        """
        tile = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
        }

        dist = {"US/en-US": [tile]}
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        ingest_links(dist, self.channels[0].id)
        ingest_links(dist, self.channels[1].id)
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(32, c)
Example #15
0
    def test_start_end_dates_uniqueness(self):
        """
        Test that start/end are part of what make tiles unique
        """
        tile = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "start": "2014-01-12T00:00:00.000",
                "end": "2014-01-31T00:00:00.000"
            }
        }

        tile_no_start = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "end": "2014-01-31T00:00:00.000"
            }
        }

        tile_no_end = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {
                "start": "2014-01-12T00:00:00.000",
            }
        }

        tile_empty_limits = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Tile",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "time_limits": {}
        }

        dist = {"US/en-US": [tile, tile_no_start, tile_no_end, tile_empty_limits]}
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        data = ingest_links(dist, self.channels[0].id)
        assert_equal(len(dist["US/en-US"]), len(data["US/en-US"]))
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30 + len(dist["US/en-US"]), c)
Example #16
0
    def test_id_not_overwritten(self):
        """
        Test an id is created for a valid tile
        """
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        directory_id = data["STAR/en-US"][0]["directoryId"]
        assert_equal(31, directory_id)

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        directory_id = data["STAR/en-US"][0]["directoryId"]
        assert_equal(31, directory_id)
Example #17
0
def all_tiles():

    try:
        data = request.get_json(force=True)
        new_data = ingest_links(data)
        urls = deploy(new_data)
    except ValidationError, e:
        errors = []
        error = {"path": e.path[0], "msg": e.message}
        errors.append(error)
        env.log("VALIDATION_ERROR path:{0} msg:{1}".format(e.path[0], e.message), level=logging.ERROR, name="client_error")
        return jsonify({"err": errors}), 400
Example #18
0
    def test_generate_artifacts(self):
        """
        Tests that the correct number of artifacts are generated
        """
        with open(self.get_fixture_path("tiles_suggested.json"), 'r') as f:
            fixture = json.load(f)

        tile = fixture["STAR/en-US"][4]

        data = ingest_links({"STAR/en-US": [tile]}, self.channels[0].id)
        artifacts = generate_artifacts(data, self.channels[0].name, True)
        # tile index, v2, v3 and 2 image files are generated
        assert_equal(6, len(artifacts))

        data = ingest_links({
            "STAR/en-US": [tile],
            "CA/en-US": [tile]
        }, self.channels[0].id)
        artifacts = generate_artifacts(data, self.channels[0].name, True)
        # includes two more file: the locale data payload for each version
        assert_equal(8, len(artifacts))
Example #19
0
    def test_image_content(self):
        with open(self.get_fixture_path("valid_tile.json"), 'r') as f:
            tiles = json.load(f)
        data = ingest_links(tiles, self.channels[0].id)
        artifacts = generate_artifacts(data, self.channels[0].name, True)

        found_image = False
        for file in artifacts:
            if "mime" in file:
                found_image = True
                assert_equal(file["mime"], magic.from_buffer(file["data"], mime=True))

        assert_true(found_image)
Example #20
0
    def test_deploy_always_generates_tile_index(self):
        """A tiles index file should always be generated"""

        # this is a dict, because of a quirk in python's namespacing/scoping
        # https://docs.python.org/2/tutorial/classes.html#python-scopes-and-namespaces
        index_uploaded = {'count': 0}

        def key_set_name(name):
            if name == "{0}_tile_index.v3.json".format(self.channels[0].name):
                index_uploaded['count'] += 1
        name_mock = PropertyMock(side_effect=key_set_name)
        type(self.key_mock).name = name_mock

        with open(self.get_fixture_path("mozilla-tiles.fennec.json"), 'r') as f:
            tiles = json.load(f)

        data = ingest_links(tiles, self.channels[0].id)
        distribute(data, self.channels[0].id, True)
        assert_equal(1, index_uploaded['count'])

        data = ingest_links(tiles, self.channels[0].id)
        distribute(data, self.channels[0].id, True)

        assert_equal(2, index_uploaded['count'])
Example #21
0
    def test_malformed_data_uri_meta(self):
        """
        Tests that a malformed data uri declaration is rejected
        """
        tiles_star = [
            {
                "imageURI": "data:image/somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        assert_raises(IngestError, generate_artifacts, data, self.channels[0].name, True)
Example #22
0
 def test_sorted_suggested_sites(self):
     """
     ensure suggested sites are sorted
     """
     tile = {
         "imageURI": "data:image/png;base64,somedata",
         "url": "https://somewhere.com",
         "title": "Some Title",
         "type": "organic",
         "bgColor": "#FFFFFF",
         "frecent_sites": ["http://lmnop.org", "http://def.com", "http://abc.com", "http://def.com", "https://xyz.com"]
     }
     data = ingest_links({"CA/en-US": [tile]}, self.channels[0].id)
     assert_equal(1, len(data["CA/en-US"]))
     assert_equal(data["CA/en-US"][0]['frecent_sites'],
                  ["http://abc.com", "http://def.com", "http://lmnop.org", "https://xyz.com"])
Example #23
0
    def test_unknown_mime_type(self):
        """
        Tests that an unknown mime type is rejected
        """
        tiles_star = [
            {
                "imageURI": "data:image/weirdimage;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]

        data = ingest_links({"STAR/en-US": tiles_star}, self.channels[0].id)
        assert_raises(IngestError, generate_artifacts, data, self.channels[0].name, True)
Example #24
0
    def test_id_creation(self):
        """
        Test an id is created for a valid tile
        """
        tile = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF"
        }
        data = ingest_links({"STAR/en-US": [tile]}, self.channels[0].id)
        directory_id = data["STAR/en-US"][0]["directoryId"]

        # the biggest ID is 30 - next one should be 31
        assert_equal(31, directory_id)
Example #25
0
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args,
                     **kwargs):
    """
    Find scheduled distributions and deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    elif quiet:
        logger = setup_command_logger(logging.ERROR)
    else:
        logger = setup_command_logger(logging.INFO)

    from splice.queries import get_scheduled_distributions, unschedule_distribution
    import requests

    dt = datetime.utcnow()
    distributions = get_scheduled_distributions(leniency, dt)
    logger.info("{0} - found {1} distributions".format(dt, len(distributions)))

    dist_data = []
    for dist in distributions:
        logger.info("fetching {0}".format(dist.url))
        r = requests.get(dist.url)
        if r.status_code == 200:
            dist_data.append((r.json(), dist.channel_id, dist.id))
        else:
            logger.error("FETCH_ERROR status_code:{0} url:{1}".format(
                r.status_code, dist.url))

    from splice.ingest import ingest_links, distribute, IngestError

    if deploy_flag:
        for rawdata, channel_id, dist_id in dist_data:
            try:
                new_data = ingest_links(rawdata, channel_id)

                if console_out:
                    print json.dumps(new_data, sort_keys=True, indent=2)

                distribute(new_data, channel_id, deploy_flag)
                unschedule_distribution(dist_id)
            except IngestError, e:
                raise InvalidCommand(e.message)
            except:
Example #26
0
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args, **kwargs):
    """
    Find scheduled distributions and deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    elif quiet:
        logger = setup_command_logger(logging.ERROR)
    else:
        logger = setup_command_logger(logging.INFO)

    from splice.queries import get_scheduled_distributions, unschedule_distribution
    import requests

    dt = datetime.utcnow()
    distributions = get_scheduled_distributions(leniency, dt)
    logger.info("{0} - found {1} distributions".format(dt, len(distributions)))

    dist_data = []
    for dist in distributions:
        logger.info("fetching {0}".format(dist.url))
        r = requests.get(dist.url)
        if r.status_code == 200:
            dist_data.append((r.json(), dist.channel_id, dist.id))
        else:
            logger.error("FETCH_ERROR status_code:{0} url:{1}".format(r.status_code, dist.url))

    from splice.ingest import ingest_links, distribute, IngestError

    if deploy_flag:
        for rawdata, channel_id, dist_id in dist_data:
            try:
                new_data = ingest_links(rawdata, channel_id)

                if console_out:
                    print json.dumps(new_data, sort_keys=True, indent=2)

                distribute(new_data, channel_id, deploy_flag)
                unschedule_distribution(dist_id)
            except IngestError, e:
                raise InvalidCommand(e.message)
            except:
Example #27
0
    def test_generate_artifacts_tile_count(self):
        """
        Tests that the correct number of tiles are produced
        """

        with open(self.get_fixture_path('mozilla-tiles.fennec.sg.json'), 'r') as f:
            tiles = json.load(f)

        data = ingest_links(tiles, self.channels[0].id)
        artifacts = generate_artifacts(data, self.channels[0].name, True)

        assertions_run = False
        for a in artifacts:
            m = DESKTOP_LOCALE_DISTRO_PATTERN.match(a['key'])
            if m:
                country_locale = m.groups()[0]
                distro_data = json.loads(a['data'])
                assert_equal(len(tiles[country_locale]) - 1, len(distro_data['directory']))
                assert_equal(1, len(distro_data['suggested']))
                assertions_run = True
        assert(assertions_run)
Example #28
0
def load_links(in_file, country_code, channel_id, out_path, console_out,
               verbose, old_format, *args, **kwargs):
    """
    Load a set of links in the data warehouse
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, IngestError

    try:
        locale = rawdata.keys()[0]
        country_locale_str = "/".join([country_code, locale])

        new_data = ingest_links({country_locale_str: rawdata[locale]},
                                channel_id)

        if old_format:
            new_data = new_data[new_data.keys()[0]]

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

    except IngestError, e:
        raise InvalidCommand(e.message)
Example #29
0
    def test_id_not_duplicated(self):
        """
        Test an id is created for a valid tile
        """
        tiles_star = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            },
            {
                "imageURI": "data:image/png;base64,someotherdata",
                "url": "https://somewhereelse.com",
                "title": "Some Other Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            },
        ]

        tiles_ca = [
            {
                "imageURI": "data:image/png;base64,somedata",
                "url": "https://somewhere.com",
                "title": "Some Title",
                "type": "organic",
                "bgColor": "#FFFFFF"
            }
        ]
        data = ingest_links({
            "STAR/en-US": tiles_star,
            "CA/en-US": tiles_ca,
        }, self.channels[0].id)
        directory_id_star = data["STAR/en-US"][0]["directoryId"]
        directory_id_ca = data["CA/en-US"][0]["directoryId"]
        assert_equal(31, directory_id_star)
        assert_not_equal(data["STAR/en-US"][1]["directoryId"], directory_id_star)
        assert_equal(directory_id_ca, directory_id_star)
Example #30
0
def ingest_tiles(in_file, channel_id, out_path, console_out, deploy_flag,
                 verbose, *args, **kwargs):
    """
    Load a set of links for all country/locale combinations into data warehouse and optionally deploy
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, distribute, IngestError

    try:
        new_data = ingest_links(rawdata, channel_id)

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

        if deploy_flag:
            logger.info("Distributing AND Deploying data")
        else:
            logger.info("Distributing data (NO deploy)")

        distribute(new_data, channel_id, deploy_flag)
    except IngestError, e:
        raise InvalidCommand(e.message)
Example #31
0
 def test_suggested_sites(self):
     """
     just a simple suggested site tile
     """
     tile = {
         "imageURI": "data:image/png;base64,somedata",
         "url": "https://somewhere.com",
         "title": "Some Title",
         "type": "organic",
         "bgColor": "#FFFFFF",
         "frecent_sites": ["http://abc.com", "https://xyz.com"]
     }
     c = self.env.db.session.query(Adgroup).count()
     assert_equal(30, c)
     c = self.env.db.session.query(AdgroupSite).count()
     assert_equal(0, c)
     data = ingest_links({"US/en-US": [tile]}, self.channels[0].id)
     assert_equal(1, len(data["US/en-US"]))
     c = self.env.db.session.query(Adgroup).count()
     assert_equal(31, c)
     c = self.env.db.session.query(AdgroupSite).count()
     assert_equal(2, c)
Example #32
0
def load_links(in_file, country_code, channel_id, out_path, console_out, verbose, old_format, *args, **kwargs):
    """
    Load a set of links in the data warehouse
    """
    if verbose:
        logger = setup_command_logger(logging.DEBUG)
    else:
        logger = setup_command_logger(logging.INFO)

    rawdata = None
    with open(in_file, 'r') as f:
        rawdata = json.load(f)

    from splice.ingest import ingest_links, IngestError

    try:
        locale = rawdata.keys()[0]
        country_locale_str = "/".join([country_code, locale])

        new_data = ingest_links({country_locale_str: rawdata[locale]}, channel_id)

        if old_format:
            new_data = new_data[new_data.keys()[0]]

        if console_out:
            print json.dumps(new_data, sort_keys=True, indent=2)

        if out_path:
            directory, _ = os.path.split(out_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

            with open(out_path, "w") as f:
                json.dump(new_data, f, sort_keys=True, indent=2)
                logger.info("wrote {0}".format(out_path))

    except IngestError, e:
        raise InvalidCommand(e.message)
Example #33
0
    def test_explanation(self):
        explanation = "Suggested for %1$S fans who visit site %2$S"
        tile = {
            "imageURI": "data:image/png;base64,somedata",
            "url": "https://somewhere.com",
            "title": "Some Title",
            "type": "organic",
            "bgColor": "#FFFFFF",
            "adgroup_name": "Technology",
            "explanation": explanation,
        }
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(30, c)
        data = ingest_links({"US/en-US": [tile]}, self.channels[0].id)
        assert_equal(1, len(data["US/en-US"]))
        c = self.env.db.session.query(Adgroup).count()
        assert_equal(31, c)

        tile = self.env.db.session.query(Tile).filter(Tile.id == 31).one()
        ag = self.env.db.session.query(Adgroup).filter(Adgroup.id == 31).one()
        assert_equal(tile.adgroup_id, ag.id)
        assert_equal(ag.name, "Technology")
        assert_equal(ag.explanation, explanation)
Example #34
0
def all_tiles():
    deploy_flag = request.args.get('deploy')
    channel_id = request.args.get('channelId')
    scheduled_ts = request.args.get('scheduledTS')
    scheduled_dt = None

    deploy = deploy_flag == '1'

    if channel_id is None:
        msg = "channel_id not provided"
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 400

    try:
        data = request.get_json(force=True)
        new_data = ingest_links(data, channel_id)
        if scheduled_ts:
            # scheduled_ts assumed to be in seconds
            scheduled_dt = datetime.utcfromtimestamp(int(scheduled_ts))
        urls = distribute(new_data, channel_id, deploy, scheduled_dt)
    except NoResultFound, e:
        msg = "channel_id {0} does not exist".format(channel_id)
        env.log("INGEST_ERROR msg: {0}".format(msg))
        return jsonify({"err": [{"msg": msg}]}), 404