def get_event_info(cloudfare_cookie,
                   event_id,
                   save_source=False,
                   output_dir=".",
                   race_type="HORSE_RACING"):
    event_info = requests.get(url=EVENT_INFO_URL.format(event_ids=event_id),
                              headers=COMMON_HEADERS.update(
                                  {'__cfduid': cloudfare_cookie})).json()
    if save_source:
        filename = os.path.join(output_dir, f"{event_id}-event.json")
        with open(filename, "w") as debugfile:
            json.dump(event_info, debugfile, indent=4)
    event_info = event_info['data']['events'][0]
    mapped_event_info = map_data(event_info,
                                 mapping=UPCOMING_RACE_DATA_MAPPING)
    mapped_event_info['type'] = race_type
    format_data(mapped_event_info, UPCOMING_RACE_FORMAT_RULES)
    mapped_event_info['group'] = []
    for runner in event_info['race']['runners']:
        runner_info = map_data(runner,
                               mapping=UPCOMING_RACE_GROUP_MAPPINGS[race_type])
        win_odds = get_odds(event_info, runner["name"], price_type="LP")
        runner_info["indicative odds"] = win_odds
        tote_odds = get_odds(event_info, runner["name"], price_type="WIN_POOL")
        runner_info["tote_odds"] = tote_odds
        format_data(runner_info, UPCOMING_RACE_GROUP_FORMAT_RULES)
        if str(runner_info["scratched"]).lower() != "true":
            mapped_event_info['group'].append(runner_info)
    mapped_event_info['group'] = sorted(mapped_event_info['group'],
                                        key=lambda x: x['position'])
    return mapped_event_info
Beispiel #2
0
def get_resulted_event(cloudfare_cookie, event_id, save_source=False, output_dir=".", race_type="HORSE_RACING"):
    event_info = requests.get(url=RESULTED_EVENT_URL.format(event_ids=event_id),
                              headers=COMMON_HEADERS.update({'__cfduid': cloudfare_cookie})).json()
    logger.debug(json.dumps(event_info, indent=4))
    if save_source:
        filename = os.path.join(output_dir, f"{event_id}-results.json")
        with open(filename, "w") as debugfile:
            json.dump(event_info, debugfile, indent=4)
    event_info = event_info['data']['eventResults'][0]
    dereference_outcomes(event_info)
    mapped_event_info = map_data(event_info, mapping=RESULTED_RACE_DATA_MAPPING)
    mapped_event_info['type'] = race_type
    format_data(mapped_event_info, RESULTED_RACE_FORMATTING_RULES)
    mapped_event_info['groups'] = [{"name": "prizes", "records": get_top_positions(event_info)},
                                   {"name": "exotics", "records": get_exotics(event_info)}]
    return mapped_event_info
Beispiel #3
0
def get_top_positions(event_info):
    try:
        final_positions = event_info['result']['finalPositions']
    except KeyError:
        logger.error("Cannot find result and finalPositions")
        return []
    records = [format_data(map_data(x, RESULTED_RACE_PRIZES_DATA_MAPPING), RESULTED_RACE_PRIZES_FORMATTING_RULES) for
               x in sorted(final_positions, key=lambda x: x["position"])]
    [x.update(get_prize(event_info, i+1)) for i, x in enumerate(records)]
    return records
Beispiel #4
0
def get_exotics(event_info):
    records = []
    exotic_pools = [x for x in event_info["pools"] if x["type"] != "PLC" and x["type"] != "WIN"]
    for pool in exotic_pools:
        dividends = pool.get("dividends", [])
        if len(dividends) > 0:
            dividend = dividends[0]
            record = map_data(dividend, RESULTED_RACE_EXOTIC_POOL_DIVIDEND_DATA_MAPPING)
            record = format_data(record, RESULTED_RACE_EXOTIC_POOL_DIVIDEND_FORMATTING_RULES)
            records.append(record)
    return records
Beispiel #5
0
 def save(self, key, data, timestamp=None, comment=None, machine_comment=None, ip=None, author=None, transaction_id=None):
     timestamp = timestamp or datetime.datetime.utcnow()
     t = self.db.transaction()
     
     metadata = self.get_metadata(key)
     
     try:
         typekey = data['type']
         type_id = self._key2id(typekey)
         
         if metadata: # already existing object
             revision = None
             thing_id = metadata.id
             olddata = simplejson.loads(self.get(key))
             created = metadata.created
             action = "update"
         else:
             revision = 1
             thing_id = self.new_thing(key=key, type=type_id, latest_revision=1, last_modified=timestamp, created=timestamp)
             olddata = {}
             created = timestamp
             action = "create"
     
         if transaction_id is None:
             transaction_id = self._add_transaction(action=action, author=author, ip=ip, comment=comment, created=timestamp)
         revision = self._add_version(thing_id=thing_id, revision=revision, transaction_id=transaction_id, created=timestamp)
                 
         self._update_tables(thing_id, key, olddata, dict(data)) #@@ why making copy of data?
         
         data['created'] = created
         data['revision'] = revision
         data['last_modified'] = timestamp
         data['key'] = key
         data['id'] = thing_id
         data['latest_revision'] = revision
                 
         data = common.format_data(data)
     
         self.db.update('thing', where='id=$thing_id', last_modified=timestamp, latest_revision=revision, type=type_id, vars=locals())
         self.db.insert('data', seqname=False, thing_id=thing_id, revision=revision, data=simplejson.dumps(data))
     except:
         t.rollback()
         self.cache.clear(local=True)        
         raise
     else:
         t.commit()
     
     web.ctx.new_objects[key] = simplejson.dumps(data)
     return {'key': key, 'revision': revision}
Beispiel #6
0
 def save_many(self, docs, timestamp, comment, data, ip, author, action=None):
     action = action or "bulk_update"
     s = SaveImpl(self.db, self.schema, self.indexer, self.property_manager)
     
     # Hack to allow processing of json before using. Required for OL legacy.
     s.process_json = process_json
     
     docs = common.format_data(docs)
     changeset = s.save(docs, timestamp=timestamp, comment=comment, ip=ip, author=author, action=action, data=data)
     
     # update cache. 
     # Use the docs from result as they contain the updated revision and last_modified fields.
     for doc in changeset.get('docs', []):
         web.ctx.new_objects[doc['key']] = simplejson.dumps(doc)
         
     return changeset
Beispiel #7
0
 def format_data(self):
     import common
     return common.format_data(self._get_data())
Beispiel #8
0
 def format_data(self):
     import common
     return common.format_data(self._get_data())