Exemplo n.º 1
0
    def _build_id(self, id):
        url = "https://treeherder.mozilla.org/api/project/"+self.repo+"/jobs/?count=2000&result_set_id="+str(id)+"&return_type=list"
        data = utils.fetch_json(url)
        builds = [i for i in data["results"] if i[1] == "buildbot"] # Builds
        builds = [i for i in builds if i[25] == "B" or i[25] == "Bo"] # Builds
        builds = [i for i in builds if i[13] == self.treeherder_platform()] # platform
        builds = [i for i in builds if i[5] == "opt"] # opt / debug / pgo

        assert len(builds) == 1

        url = "https://treeherder.mozilla.org/api/project/mozilla-inbound/job-log-url/?job_id="+str(builds[0][10])
        data = utils.fetch_json(url)
        return data[0]["url"].split("/")[-2]
Exemplo n.º 2
0
    def _build_id(self, id):
        url = "https://treeherder.mozilla.org/api/project/"+self.repo+"/jobs/?count=2000&result_set_id="+str(id)+""
        data = utils.fetch_json(url)
        builds = [i for i in data["results"] if i["build_system_type"] == "buildbot"] # Builds
        builds = [i for i in builds if i["job_type_symbol"] == "B" or i["job_type_symbol"] == "Bo"] # Builds
        builds = [i for i in builds if i["platform"] == self.treeherder_platform()] # platform
        builds = [i for i in builds if i["platform_option"] == "opt"] # opt / debug / pgo

        assert len(builds) == 1

        url = "https://treeherder.mozilla.org/api/project/mozilla-inbound/job-log-url/?job_id="+str(builds[0]["id"])
        data = utils.fetch_json(url)
        return data[0]["url"].split("/")[-2]
Exemplo n.º 3
0
def main():
    cnx = mysqllib.get_connection()
    cursor = cnx.cursor()
    url = "http://www.jisilu.cn/jisiludata/newstock.php?qtype=apply"
    jo = utils.fetch_json(url)
    for row in jo['rows']:
        cell = row['cell']
        name = row['id']
        sid = cell['stock_cd']
        apply_dt = transform_date(cell['apply_dt'])
        utils.print_with_time("%s %s %s" % (sid, name, apply_dt))

        try:
            keys = ['code', 'name', 'date']
            keys = ["`" + f + "`" for f in keys]
            vals = [sid, name, apply_dt]
            vals = ["'" + f + "'" for f in vals]
            updates = [keys[i] + "=" + vals[i] for i in range(0, len(keys))]
        except:
            traceback.print_exc()
            return

        sql = "INSERT INTO new_stock (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s" % (
            ', '.join(keys), ', '.join(vals), ', '.join(updates))
        #         print sql
        cursor.execute(sql)
        cnx.commit()
    cursor.close()
    cnx.close()
Exemplo n.º 4
0
    def _task_id(cls, repo_name, product, platform, buildtype, revision=None):
        '''Return taskId for specific configuration.'''
        assert buildtype in cls.BUILDTYPES
        assert platform in cls.PLATFORMS
        assert product in cls.PRODUCTS

        if revision:
            url = '{}.{}.revision.{}.{}.{}-{}'.format(
                cls._index_url,
                repo_name,
                revision,
                product,
                platform,
                buildtype)
        else:
            if buildtype == 'nightly':
                # Unsigned nightly builds use '-opt' in their name
                platform = platform + '-opt'
                url = '{}.{}.{}.latest.{}.{}'.format(
                    cls._index_url,
                    repo_name,
                    buildtype,
                    product,
                    platform)
            else:
                url = '{}.{}.latest.{}.{}-{}'.format(
                    cls._index_url,
                    repo_name,
                    product,
                    platform,
                    buildtype)

        return utils.fetch_json(url)['taskId']
Exemplo n.º 5
0
def extract_stock(code):
    fields = {
              'code' : 'stockcode',
              'name' : 'stockname',
              'fieldcode' : 'fieldcode',
              'fieldname' : 'fieldname',
              'fieldjp' : 'fieldjp',
              'syl' : 'syl',
              'xj' : 'xj',
              }
    cnx = mysqllib.get_connection()
    cursor = cnx.cursor()
    url = "http://stockpage.10jqka.com.cn/spService/%s/Header/realHeader"%(code)
    jo = utils.fetch_json(url)
    if jo is not None:
        try:
            keys = fields.keys()
            vals = ["'"+ (jo[fields[k]] or '')+"'" for k in keys]
            updates = [keys[i]+"="+vals[i] for i in range(0, len(keys))]
        except:
            utils.print_with_time("url=%s"%(url))
            traceback.print_exc()
            return
        
        sql = "INSERT INTO stock (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s"%(', '.join(keys), ', '.join(vals), ', '.join(updates))
#         print sql
        cursor.execute(sql)
        cnx.commit()
    cursor.close()
    cnx.close()
Exemplo n.º 6
0
    def __init__(self, searchterm):
        pokemondata = fetch_json(POKEAPI + searchterm)

        self.name = pokemondata["name"]
        self.weight = pokemondata["weight"] / 10

        types = [
            i["type"]["name"]
            for i in sorted(pokemondata["types"], key=lambda x: x["slot"])
        ]
        self.primarytype = types[0]
        self.secondarytype = types[1] if len(types) > 1 else None

        stats = {
            i["stat"]["name"]: i["base_stat"]
            for i in pokemondata["stats"]
        }
        self.hp = stats["hp"]
        self.defense = stats["defense"]
        self.attack = stats["attack"]
        self.spattack = stats["special-attack"]
        self.spdefense = stats["special-defense"]
        self.speed = stats["speed"]

        self.typeprofile = TypeProfile(self.primarytype, self.secondarytype)
Exemplo n.º 7
0
def __search_motifs(catalog, keywords='', count=50):
    url = ('https://www.motifinvesting.com/data/search?catalog={0}&offset=0&size=200&keywords={1}'\
        '&featured=true&volatility=&valuation=&dividendyield=&oneyear=&thirtyday=&sort=thirtyday&change='\
        '&order=desc&thirdParty=false&hot=true&dynamicCategories=&popularity=&stockCount=5-30&stockPrice=5-'\
        '&purchased=true&imageAvailable=true&excludeETFs=true').format(catalog, keywords)
    #print(url)
    json = fetch_json(url, 60 * 60 * 24)
    return [motif['urlName'] for motif in json['motifs']]
Exemplo n.º 8
0
def main():
    for page in range(1, 57):
        url = "http://q.10jqka.com.cn/interface/stock/fl/zdf/desc/%d/hsa/quote" % (
            page)
        utils.print_with_time(url)
        jo = utils.fetch_json(url)
        time.sleep(settings.sleepTime)
        for st in jo['data']:
            #             print st['stockcode']
            extractor.extract_code(st['stockcode'])
Exemplo n.º 9
0
    def __init__(self, primarytype, secondarytype):
        self.type_modifiers = {}

        typedata = fetch_json(TYPEAPI + primarytype)
        primary_damage_0 = [
            i['name'] for i in typedata['damage_relations']['no_damage_from']
        ]
        primary_damage_50 = [
            i['name'] for i in typedata['damage_relations']['half_damage_from']
        ]
        primary_damage_200 = [
            i['name']
            for i in typedata['damage_relations']['double_damage_from']
        ]

        if secondarytype:
            secondarytypedata = fetch_json(TYPEAPI + secondarytype)
            secondary_damage_0 = [
                i['name'] for i in secondarytypedata['damage_relations']
                ['no_damage_from']
            ]
            secondary_damage_50 = [
                i['name'] for i in secondarytypedata['damage_relations']
                ['half_damage_from']
            ]
            secondary_damage_200 = [
                i['name'] for i in secondarytypedata['damage_relations']
                ['double_damage_from']
            ]
        else:
            secondary_damage_0 = []
            secondary_damage_50 = []
            secondary_damage_200 = []

        for t in primary_damage_0 + secondary_damage_0:
            self.type_modifiers[t] = self.type_modifiers.get(t, 1) * 0

        for t in primary_damage_50 + secondary_damage_50:
            self.type_modifiers[t] = self.type_modifiers.get(t, 1) * 0.5

        for t in primary_damage_200 + secondary_damage_200:
            self.type_modifiers[t] = self.type_modifiers.get(t, 1) * 2
Exemplo n.º 10
0
    def retrieve_info(self):
        infoname = self.get_info_filename()

        raw_info = utils.fetch_json(self.url + infoname)

        info = {}
        info["revision"] = raw_info["moz_source_stamp"]
        info["engine_type"] = "firefox"
        info["shell"] = False
        info["binary"] = os.path.abspath(self.get_binary())
        info["folder"] = os.path.abspath(self.folder)

        return info
Exemplo n.º 11
0
    def urlForRevision(self, cset):
        # here we use a detour using treeherder to find the build_id,
        # corresponding to a revision.
        url = "https://treeherder.mozilla.org/api/project/"+self.repo+"/resultset/?full=false&revision="+cset
        data = utils.fetch_json(url)

        # No corresponding build found given revision
        if len(data["results"]) != 1:
            return None

        # The revision is not pushed seperately. It is not the top commit
        # of a list of pushes that were done at the same time.
        if not data["results"][0]["revision"].startswith(cset):
            return None

        build_id = self._build_id(data["results"][0]["id"])
        return [self._url()+str(build_id)+"/", self._archive_url()+str(build_id)+"/"]
Exemplo n.º 12
0
    def urlForRevision(self, cset):
        # here we use a detour using treeherder to find the build_id,
        # corresponding to a revision.
        url = "https://treeherder.mozilla.org/api/project/"+self.repo+"/resultset/?full=false&revision="+cset
        data = utils.fetch_json(url)

        # No corresponding build found given revision
        if len(data["results"]) != 1:
            return None

        # The revision is not pushed seperately. It is not the top commit
        # of a list of pushes that were done at the same time.
        if data["results"][0]["revision"] != cset:
            return None

        build_id = self._build_id(data["results"][0]["id"])
        return [self._url()+str(build_id)+"/", self._archive_url()+str(build_id)+"/"]
Exemplo n.º 13
0
def main():
    today = datetime.datetime.today()
    ts = today.strftime("%Y-%m-%d")
    #     ts = "2019-08-21"
    stocks = []
    url = "https://www.jisilu.cn/data/new_stock/apply/?___jsl=LST___t"
    jo = utils.fetch_json(url)
    for row in jo['rows']:
        cell = row['cell']
        name = row['id']
        sid = cell['stock_cd']
        apply_dt = transform_date(cell['apply_dt'])
        if apply_dt == ts:
            stocks.append("%s-%s" % (sid, name))

    if len(stocks) > 0:
        ns = "申购新股:%s" % (' '.join(stocks))
        send_mail("申购新股", ns)
        utils.print_with_time(ns)
    utils.print_with_time("Done")
Exemplo n.º 14
0
def extract_year(code):
    indexs = {
              1 : 'jbmgsy',
              7 : 'mgjzc',
              13 : 'mgxjl',
              }
    cnx = mysqllib.get_connection()
    cursor = cnx.cursor()
    url = "http://stockpage.10jqka.com.cn/basic/%s/main.txt"%(code)
    jo = utils.fetch_json(url)
#     print data; return
    if jo is not None:
        dmap = {}
        for index in indexs.keys():
            title = jo['title'][index]
            title = '_'.join(title)
            years = jo['year'][0]
            values = jo['year'][index]
            for y in range(0, len(years)):
                year = years[y]
                value = values[y]
                if not dmap.has_key(year):
                    dmap[year] = {}
                dmap[year][indexs[index]] = value
    #             print '%s\t%s\t%s'%(title, year, value)
        for year, ydata in dmap.items():
            fields = indexs.values()
            values = [ydata[f] or '0' for f in fields]
            updates = [fields[i]+"="+values[i] for i in range(0, len(fields))]
    #         print fields
    #         print values
            sql = "INSERT INTO stock_year (code, year, %s) VALUES ('%s', %s, %s) ON DUPLICATE KEY UPDATE %s"%(', '.join(fields), code, year, ', '.join(values), ', '.join(updates))
    #         print sql
            cursor.execute(sql)
        cnx.commit()
    cursor.close()
    cnx.close()
Exemplo n.º 15
0
def main():
    global notify_rate
    today = datetime.datetime.today()
    ts = today.strftime("%Y-%m-%d")
    #     ts = "2019-04-19"
    stocks = []
    url = "https://www.jisilu.cn/data/cbnew/pre_list/?___jsl=LST___t"
    jo = utils.fetch_json(url)
    for row in jo['rows']:
        cell = row['cell']
        apply_dt = cell['apply_date']
        pma_rt = 100
        if 'pma_rt' in cell and cell['pma_rt'] is not None:
            pma_rt = float(cell['pma_rt'])
        if apply_dt == ts and pma_rt >= notify_rate and cell[
                'cb_type'] == u'可转债':
            stocks.append("%s-%s-%.2f%%" %
                          (cell['bond_nm'], cell['apply_cd'], pma_rt))

    if len(stocks) > 0:
        ns = "申购可转债:%s" % (' '.join(stocks))
        send_mail("申购可转债", ns)
        utils.print_with_time(ns)
    utils.print_with_time("Done")
    def _task_id(cls, repo_name, product, platform, buildtype, revision=None):
        '''Return taskId for specific configuration.'''
        assert buildtype in cls.BUILDTYPES
        assert platform in cls.PLATFORMS
        assert product in cls.PRODUCTS

        if revision:
            url = '{}.{}.revision.{}.{}.{}-{}'.format(cls._index_url,
                                                      repo_name, revision,
                                                      product, platform,
                                                      buildtype)
        else:
            if buildtype == 'nightly':
                # Unsigned nightly builds use '-opt' in their name
                platform = platform + '-opt'
                url = '{}.{}.{}.latest.{}.{}'.format(cls._index_url, repo_name,
                                                     buildtype, product,
                                                     platform)
            else:
                url = '{}.{}.latest.{}.{}-{}'.format(cls._index_url, repo_name,
                                                     product, platform,
                                                     buildtype)

        return utils.fetch_json(url)['taskId']
Exemplo n.º 17
0
def compare(test, buildername, revision, previous_revision):
    "This function will compare between 2 given revisions and return result as percentage"
    repo_name = query_repo_name_from_buildername(buildername)
    # Using TWO_WEEKS as interval, may change it afterwards
    signature_request_url = SIGNATURE_URL % (repo_name, TWO_WEEKS)
    signatures = fetch_json(signature_request_url)
    options_collection_hash_list = fetch_json(OPTION_COLLECTION_HASH)

    for signature, value in signatures.iteritems():
        # Skip processing subtests. They are identified by 'test' key in the dicitonary.
        if 'test' in value:
            continue

        # Ignoring e10s here.
        # TODO: Revisit this later
        if TBPL_TESTS[test]['testname'].lower() == value['suite'].lower() and \
           TREEHERDER_PLATFORM[value["machine_platform"]] in buildername and \
           'test_options' not in value:
            test_signature = signature
        else:
            continue

        hash_signature = value['option_collection_hash']
        for key in options_collection_hash_list:
            if hash_signature == key["option_collection_hash"]:
                typeOfTest = key["options"][0]["name"]
                break

        if typeOfTest == 'pgo' and typeOfTest not in buildername:
            # if pgo, it should be present in buildername
            continue
        elif typeOfTest == 'opt':
            # if opt, nothing present in buildername
            break
        else:
            # We do not run talos on any branch other than pgo and opt.
            continue

    # Using TWO_WEEKS as interval, may change it afterwards
    req = fetch_json(PERFORMANCE_DATA % (repo_name, TWO_WEEKS, test_signature))
    performance_data = req[test_signature]
    treeherder_client = TreeherderClient()
    revision_resultset_id = treeherder_client.get_resultsets(repo_name, revision=revision)[0]["id"]
    previous_revision_resultset_id = treeherder_client.get_resultsets(repo_name, revision=previous_revision)[0]["id"]
    revision_perfdata = []
    previous_revision_perfdata = []

    for data in performance_data:
        if data["result_set_id"] == revision_resultset_id:
            revision_perfdata.append(data["value"])
        elif data["result_set_id"] == previous_revision_resultset_id:
            previous_revision_perfdata.append(data["value"])

    if revision_perfdata and previous_revision_perfdata:
        mean_revision_perfdata = sum(revision_perfdata) / float(len(revision_perfdata))
        mean_previous_revision_perfdata = sum(previous_revision_perfdata) / float(len(previous_revision_perfdata))
    else:
        print "previous_revision_perfdata: %s" % previous_revision_perfdata
        print "revision_perfdata: %s" % revision_perfdata
        return 0

    if test in REVERSE_TESTS:
        # lower value results in regression
        return (mean_revision_perfdata - mean_previous_revision_perfdata) * 100.0 / mean_previous_revision_perfdata
    else:
        # higher value results in regression
        return (mean_previous_revision_perfdata - mean_revision_perfdata) * 100.0 / mean_previous_revision_perfdata
Exemplo n.º 18
0
def chooseGameVideoMenu():
    video_id = vars.params.get("video_id")
    video_type = vars.params.get("video_type")
    seo_name = vars.params.get("seo_name")
    has_away_feed = vars.params.get("has_away_feed", "0") == "1"
    has_condensed_game = vars.params.get("has_condensed_game", "0") == "1"
    start_time = vars.params.get("start_time")
    duration = vars.params.get("duration")
    game_data_json = utils.fetch_json(vars.config['game_data_endpoint'] % seo_name)
    game_state = game_data_json['gameState']
    game_home_team = vars.params.get("home_team")
    game_visitor_team = vars.params.get("visitor_team")
    game_cameras = []
    if 'multiCameras' in game_data_json:
        game_cameras = game_data_json['multiCameras'].split(",")

    nba_config = utils.fetch_json(vars.config['config_endpoint'])
    nba_cameras = {}
    for camera in nba_config['content']['cameras']:
        nba_cameras[ camera['number'] ] = camera['name']

    if has_away_feed:
        # Create the "Home" and "Away" list items
        for ishomefeed in [True, False]:
            listitemname = "Full game, " + ("away feed" if not ishomefeed else "home feed")

            # Show actual team names instead of 'home feed' and 'away feed'
            if game_home_team and game_visitor_team:
                if ishomefeed:
                    listitemname += " (" + game_home_team + ")"
                else:
                    listitemname += " (" + game_visitor_team + ")"

            params = {
                'video_id': video_id,
                'video_type': video_type,
                'video_ishomefeed': 1 if ishomefeed else 0,
                'game_state': game_state,
                'start_time': start_time,
                'duration': duration,
            }
            common.addListItem(listitemname, url="", mode="playgame", iconimage="", customparams=params)
    else:
        #Add a "Home" list item
        params = {
            'video_id': video_id,
            'video_type': video_type,
            'game_state': game_state,
            'start_time': start_time,
            'duration': duration,
        }
        common.addListItem("Full game", url="", mode="playgame", iconimage="", customparams=params)

    if vars.show_cameras:
        utils.log(nba_cameras, xbmc.LOGDEBUG)
        utils.log(game_cameras, xbmc.LOGDEBUG)

        # Add all the cameras available
        for camera_number in game_cameras:
            camera_number = int(camera_number)

            # Skip camera number 0 (broadcast?) - the full game links are the same
            if camera_number == 0:
                continue

            params = {
                'video_id': video_id,
                'video_type': video_type,
                'game_state': game_state,
                'camera_number': camera_number,
                'start_time': start_time,
                'duration': duration,
            }

            name = "Camera %d: %s" % (camera_number, nba_cameras.get(camera_number, 'Unknown'))
            common.addListItem(name, url="", mode="playgame", iconimage="", customparams=params)

    # Live games have no condensed or highlight link
    if video_type != "live":
        # Create the "Condensed" list item
        if has_condensed_game:
            params = {
                'video_id': video_id,
                'video_type': 'condensed',
                'game_state': game_state
            }
            common.addListItem("Condensed game", url="", mode="playgame", iconimage="", customparams=params)

        # Get the highlights video if available
        highlights_url = getHighlightGameUrl(video_id)
        if highlights_url:
            common.addVideoListItem("Highlights", highlights_url, iconimage="")

    xbmcplugin.endOfDirectory(handle=int(sys.argv[1]))
Exemplo n.º 19
0
def get_stock_data(symbol):
    url = 'https://www.motifinvesting.com/data/stocks/{0}/info?_='.format(symbol.upper())
    #print(url)
    json = fetch_json(url, 60 * 60 * 12)
    return Stock(json)