def only_map_valid_ids(self, mapping_filename):
     vrt_ids = utils.all_id_lists(utils.get_json(utils.VRT_FILENAME))
     mapping_ids = utils.all_id_lists(utils.get_json(mapping_filename))
     for id_list in mapping_ids:
         self.assertIn(
             id_list, vrt_ids, 'invalid id in ' + mapping_filename + ' - ' +
             '.'.join(id_list))
 def all_vrt_ids_have_mapping(self, mappping_filename, key):
     vrt = utils.get_json(utils.VRT_FILENAME)
     mapping = utils.get_json(mappping_filename)
     keyed_mapping = utils.key_by_id(mapping['content'])
     for vrt_id_list in utils.all_id_lists(vrt, include_internal=False):
         self.assertTrue(
             utils.has_mapping(keyed_mapping, vrt_id_list, key),
             'no ' + key + ' mapping for ' + '.'.join(vrt_id_list))
 def validate_schema(self, schema_file, data_file):
     schema = utils.get_json(schema_file)
     data = utils.get_json(data_file)
     jsonschema.Draft4Validator.check_schema(schema)
     error = jsonschema.exceptions.best_match(
         jsonschema.Draft4Validator(schema).iter_errors(data))
     if error:
         raise error
 def setUp(self):
     self.vrt_versions = utils.all_versions(utils.VRT_FILENAME)
     self.last_tagged_version = max([
         Version.coerce(x) for x in self.vrt_versions.keys()
         if x != 'current'
     ])
     self.deprecated_json = utils.get_json(
         utils.DEPRECATED_MAPPING_FILENAME)
예제 #5
0
    def get_request_proxy(self, url, host, types):
        """
        use proxy to send requests, and record the proxy cann't use
        @types 1:json, 0:html
        support failured retry
        """

        if not len(self.proxylist):
            self.initproxy()

        httptype = url[4] == 's'
        index = random.randint(
            0,
            len(self.proxylists if httptype else self.proxylist) - 1)
        if httptype:
            proxies = {'https': self.proxylists[index]}
        else:
            proxies = {'http': self.proxylist[index]}

        try:
            if types:
                json = get_json(url, proxies, host)
                if 'code' in json and json['code'] != 200:
                    ppap = self.retry(url, host, types)
                    if not ppap:
                        return False
                else:
                    return json
            else:
                html = get_html(url, proxies, host)
                if 'code' in html or not html:
                    ppap = self.retry(url, host, types)
                    if not ppap:
                        return False
                else:
                    return html
        except Exception as e:
            self.cannotuseip.append(proxies[self.typemap[httptype]])
            if httptype:
                if index < len(self.proxylists) and proxies[
                        'https'] == self.proxylists[index]:
                    self.proxylists.remove(proxies['https'])
            else:
                if index < len(self.proxylist
                               ) and proxies['http'] == self.proxylist[index]:
                    self.proxylist.remove(proxies['http'])
            ppap = self.retry(url, host, types)
            if not ppap:
                return False
예제 #6
0
    def judgeurl(self, urls, times):
        """
        use /api/playlist to judge http; use /discover/playlist judge https
        1. don't timeout = 5
        2. response.result.tracks.size() != 1
        """

        http_type = urls[4] == 's'
        proxies = {self.typemap[http_type]: urls}

        test_url = 'https://music.163.com/discover/playlist/?order=hot&limit=35&offset=0' if http_type else 'http://music.163.com/api/playlist/detail?id=432853362'
        if http_type:
            try:
                html = get_html(test_url, proxies, test_url[8:21])
                alist = html.find_all('a', class_='s-fc1')
                if len(alist) == 73:
                    self.canuseip.append(urls)
                else:
                    self.cannotuseip.append(urls)
            except Exception as e:
                self.cannotuseip.append(urls)
                pass
        else:
            try:
                data = get_json(test_url, proxies, test_url[7:20])
                result = data['result']
                tracks = result['tracks']
                if len(tracks) == 56:
                    if times < 2:
                        self.judgeurl(urls, times + 1)
                    else:
                        self.canuseip.append(urls)
                else:
                    self.cannotuseip.append(urls)
            except Exception as e:
                self.cannotuseip.append(urls)
                pass
예제 #7
0
import os
import sys
import json
from utils import utils
from artifacts import scw_artifact

artifact_json = utils.get_json(scw_artifact.OUTPUT_FILENAME)
repo_path = os.path.join(utils.THIRD_PARTY_MAPPING_DIR, utils.SCW_DIR,
                         utils.SCW_FILENAME)
print(os.path.abspath(repo_path))
repo_json = utils.get_json(repo_path)

sorted_artifact_json = json.dumps(artifact_json, sort_keys=True)
sorted_repo_json = json.dumps(repo_json, sort_keys=True)

if sorted_artifact_json == sorted_repo_json:
    print('SCW Document is valid!')
    sys.exit(0)
else:
    print(
        'SCW Document is invalid, copy the artifact to the remediation training'
    )
    sys.exit(1)
 def test_artifact_loads_valid_json(self):
     self.assertTrue(utils.get_json(self.scw_artifact_path),
                     self.scw_artifact_path + ' is not valid JSON.')
from utils import utils
from artifacts import scw_artifact

url_mapping = {}
current_vrt = utils.get_json(utils.VRT_FILENAME)
scw_artifact.write_artifact_file(
  scw_artifact.generate_urls(current_vrt['content'], url_mapping)
)