示例#1
0
def parse_replay(replay_player_path, sampled_action_path, reward):
    if os.path.isfile(os.path.join(FLAGS.parsed_replay_path, 'GlobalFeatures', replay_player_path)):
        return

    # Global Info
    with open(os.path.join(FLAGS.parsed_replay_path, 'GlobalInfos', replay_player_path)) as f:
        global_info = json.load(f)
    units_info = static_data.StaticData(Parse(global_info['data_raw'], sc_pb.ResponseData())).units
    feat = features.Features(Parse(global_info['game_info'], sc_pb.ResponseGameInfo()))

    # Sampled Actions
    with open(sampled_action_path) as f:
        sampled_action = json.load(f)
    sampled_action_id = [id // FLAGS.step_mul + 1 for id in sampled_action]

    # Actions
    with open(os.path.join(FLAGS.parsed_replay_path, 'Actions', replay_player_path)) as f:
        actions = json.load(f)
    actions = [None if len(actions[idx]) == 0 else Parse(actions[idx][0], sc_pb.Action())
                for idx in sampled_action_id]

    # Observations
    observations =  [obs for obs in stream.parse(os.path.join(FLAGS.parsed_replay_path,
                            'SampledObservations', replay_player_path), sc_pb.ResponseObservation)]

    assert len(sampled_action) == len(sampled_action_id) == len(actions) == len(observations)

    states = process_replay(sampled_action, actions, observations, feat, units_info, reward)

    with open(os.path.join(FLAGS.parsed_replay_path, 'GlobalFeatures', replay_player_path), 'w') as f:
        json.dump(states, f)
示例#2
0
def parse_replay(replay_player_path, sampled_frame_path, reward):
    if os.path.isfile(os.path.join(FLAGS.parsed_replay_path, 'GlobalFeatures', replay_player_path)):
        return

    # Global Info
    with open(os.path.join(FLAGS.parsed_replay_path, 'GlobalInfos', replay_player_path)) as f:
        global_info = json.load(f)
    units_info = static_data.StaticData(Parse(global_info['data_raw'], sc_pb.ResponseData())).units
    feat = features.features_from_game_info(Parse(global_info['game_info'], sc_pb.ResponseGameInfo()))

    # Sampled Frames
    with open(sampled_frame_path) as f:
        sampled_frames = json.load(f)
    sampled_actions_idx = [frame // FLAGS.step_mul - 1 for frame in sampled_frames] # Create index to retrieve actions corresponding to sampled frames

    # Actions
    with open(os.path.join(FLAGS.parsed_replay_path, 'Actions', replay_player_path)) as f:
        actions = json.load(f)
    sampled_actions = [None if len(actions[idx]) == 0 else Parse(actions[idx][0], sc_pb.Action()) 
                for idx in sampled_actions_idx] # Get first action executed after each sampled frame

    # Observations
    observations =  [obs for obs in stream.parse(os.path.join(FLAGS.parsed_replay_path,
                            'SampledObservations', replay_player_path), sc_pb.ResponseObservation)]

    assert len(sampled_frames) == len(sampled_actions_idx) == len(sampled_actions) == len(observations)

    states = process_replay(sampled_frames, sampled_actions, observations, feat, units_info, reward)

    with open(os.path.join(FLAGS.parsed_replay_path, 'GlobalFeatures', replay_player_path), 'w') as f:
        json.dump(states, f)
示例#3
0
def get_trials(cnx, trial_id, study_id):
    cursor = cnx.cursor()
    if trial_id != "":
        cursor.execute("SELECT * FROM trials WHERE id = '%s'" % trial_id)
    else:
        cursor.execute("SELECT * FROM trials WHERE study_id = '%s'" % study_id)

    ret = []
    for (id, study_id, parameters, status, objective_value, tags) in cursor:
        params = parameters.split("&\n")
        param_list = []
        for param in params:
            temp = api_pb2.Parameter()
            param_list.append(Parse(param, temp))

        tags = tags.split("&\n")
        tag_list = []
        for tag in tags:
            if tag != "":
                temp = api_pb2.Tag()
                tag_list.append(Parse(tag, temp))

        ret.append(
            api_pb2.Trial(
                trial_id=id,
                study_id=study_id,
                parameter_set=param_list,
                status=status,
                objective_value=objective_value,
                tags=tag_list,
            ))

    cursor.close()
    return ret
示例#4
0
 def _parse_response(response, clazz, is_list=False, resource_name=None):
     """Parse a Marathon response into an object or list of objects."""
     target = response.json(
     )[resource_name] if resource_name else response.json()
     if is_list:
         return [
             Parse(json.dumps(resource), clazz()) for resource in target
         ]
     else:
         return Parse(json.dumps(target), clazz())
示例#5
0
def schema_mapping(event_data):
    """
    The idea behind this function, is to quickly check if the json provided is
    correct and can be validated through a conversion into a Protocol Buffer.
    The event data is  initially sent as a default protobuf in case there is an error.
    The default Protobuf data should be something that does not affect the behavior or the KPIs in the long term.
    For the sake of simplicity I will use one of the events shared in the challenge description.
    :param event_data: Result of the Decode process, event bytes string (It's converted to this on the first stage
    of the Beam pipeline).
    :return: Protobuf ready to be parsed and written into BigQuery (Or stored
    on Google Cloud Storage in case of an error).
    """
    default_data = {
        "id": "AED96FC7-19F1-46AB-B79F-D412117119BD",
        "received_at": "2018-02-03 18:28:12.378000",
        "anonymous_id": "8E0302A3-2184-4592-851D-B93C32E410AB",
        "context_device_manufacturer": "Apple",
        "context_device_model": "iPhone8,4",
        "context_device_type": "ios",
        "context_library_name": "analytics-ios",
        "context_library_version": "3.6.7",
        "context_locale": "de-DE",
        "context_network_wifi": "true",
        "context_os_name": "iOS",
        "event": "registration_initiated",
        "event_text": "registrationInitiated",
        "original_timestamp": "2018 - 02 - 03T19: 28: 06.291 + 0100",
        "sent_at": "2018 - 02 - 0318: 28: 12.000000",
        "timestamp": "2018 - 02 - 0318: 28: 06.561000",
        "context_network_carrier": "o2 - de",
        "context_traits_taxfix_language": "de-DE"
    }
    tracking_event_pb = pb.TaxfixEvent()
    event_protobuf = Parse(default_data, tracking_event_pb)

    try:
        event_data = json.loads(data)
        event_protobuf = Parse(event_data, tracking_event_pb)
    except AttributeError as e:
        raise ProtobufConversionError(
            'Failed to convert to PB attribute: {0}.'.format(e))
        send_event_data_to_storage(event_data, e)
    except TypeError as e:
        raise ProtobufConversionError(
            'Failed to convert to PB type: {0}.'.format(e))
        send_event_data_to_storage(event_data, e)
    except ValueError as e:
        raise ProtobufConversionError(
            'Failed to convert to JSON: {0}.'.format(e))
        send_event_data_to_storage(event_data, e)
    return event_protobuf
示例#6
0
def main():
    parser = argparse.ArgumentParser(description="Locust: Python parsing functionality")
    populate_argument_parser(parser)
    parser.add_argument(
        "-i",
        "--input",
        type=argparse.FileType("r"),
        default=sys.stdin,
        help="Path to git result. If not specified, reads from stdin.",
    )
    parser.add_argument(
        "-o",
        "--output",
        type=argparse.FileType("w"),
        default=sys.stdout,
        help="Path to write parse results to (in JSON format)",
    )

    args = parser.parse_args()

    with args.input as ifp:
        git_result = Parse(ifp.read(), git.GitResult())

    result = run(git_result, args.plugins)

    try:
        with args.output as ofp:
            print(
                json.dumps(MessageToDict(result, preserving_proto_field_name=True)),
                file=ofp,
            )
    except BrokenPipeError:
        pass
示例#7
0
def test_binary_compatibility(repeat, test_data: TestData) -> None:
    plugin_module, reference_module, json_data = test_data

    for sample in json_data:
        reference_instance = Parse(sample.json, reference_module().Test())
        reference_binary_output = reference_instance.SerializeToString()

        for _ in range(repeat):
            plugin_instance_from_json: betterproto.Message = (
                plugin_module.Test().from_json(sample.json)
            )
            plugin_instance_from_binary = plugin_module.Test.FromString(
                reference_binary_output
            )

            # Generally this can't be relied on, but here we are aiming to match the
            # existing Python implementation and aren't doing anything tricky.
            # https://developers.google.com/protocol-buffers/docs/encoding#implications
            assert bytes(plugin_instance_from_json) == reference_binary_output
            assert bytes(plugin_instance_from_binary) == reference_binary_output

            assert plugin_instance_from_json == plugin_instance_from_binary
            assert dict_replace_nans(
                plugin_instance_from_json.to_dict()
            ) == dict_replace_nans(plugin_instance_from_binary.to_dict())
示例#8
0
def createPipeline(port=None,
                   session=None,
                   data=None,
                   predictor=None,
                   response=None,
                   task_type=None,
                   task_subtype=None,
                   output_type=None,
                   metric=None):
    stub = get_stub(int(port))

    data_uri = 'file://%s' % (data)

    predictor = json.loads(predictor)
    response = json.loads(response)

    resp = stub.CreatePipelines(
        cpb.PipelineCreateRequest(
            context=Parse(session, cpb.SessionContext()),
            train_features=[
                cpb.Feature(feature_id=pred, data_uri=data_uri)
                for pred in predictor
            ],
            target_features=[
                cpb.Feature(feature_id=targ, data_uri=data_uri)
                for targ in response
            ],
            task=cpb.TaskType.Value(task_type.upper()),
            task_subtype=cpb.TaskSubtype.Value(toConstCase(task_subtype)),
            output=cpb.OutputType.Value(toConstCase(output_type)),
            metrics=[cpb.Metric.Value(toConstCase(metric))],
            task_description='TA2 pipeline creation',
            max_pipelines=5))

    return map(lambda x: json.loads(MessageToJson(x)), resp)
示例#9
0
def json2proto_bin(vcd_json_file_in, vcd_proto_file_out):
    vcd_message = proto.VCD()
    with open(vcd_json_file_in, "r") as f:
        Parse(f.read(), vcd_message)

    with open(vcd_proto_file_out, "wb") as f:
        f.write(vcd_message.SerializeToString())
示例#10
0
def sample_action_from_player(action_path):
    feat = features.Features(screen_size_px=(1, 1), minimap_size_px=(1, 1))
    with open(action_path) as f:
        actions = json.load(f)

    frame_id = 0
    result_frames = []
    for action_strs in actions:
        action_name = None
        for action_str in action_strs:
            action = Parse(action_str, sc_pb.Action())
            try:
                func_id = feat.reverse_action(action).function
                func_name = FUNCTIONS[func_id].name
                if func_name.split('_')[0] in {'Attack', 'Scan', 'Behavior','BorrowUp', 'Effect','Hallucination', 'Harvest', 'Hold','Land','Lift', \
   'Load','Move','Patrol','Rally','Smart','TrainWarp', 'UnloadAll', 'UnloadAllAt''Build', 'Train', 'Research', 'Morph',\
    'Cancel', 'Halt', 'Stop'}:
                    action_name = func_name
                    break
            except:
                pass
        if frame_id > 0 and (action_name is not None
                             or frame_id % FLAGS.skip == 0):
            result_frames.append(frame_id - FLAGS.step_mul)

        frame_id += FLAGS.step_mul

    return result_frames
示例#11
0
def msg_and_back(msg_obj, CORE_OBJ):

    content = MessageToJson(msg_obj, including_default_value_fields=True)
    # print('content as JSON:\n', content)

    req = Parse(content, CORE_OBJ())
    print('\n\nJSON back to request:\n', req)
def _load_json_doc(filename):
    """Load a disk file as JSON.

  This function reads the specified filename and parses the contents
  as JSON.

  Args:
      filename: The file whose contents are to be read as JSON data

  Returns:
      A JobControl object populated with the contestns from the
      specified JSON file
  """
    contents = None
    log.debug(f"Opening JSON file {filename}")
    try:
        with open(filename, 'r') as json_doc:
            contents = Parse(json_doc.read(), JobControl())
    except FileNotFoundError as file_not_found:
        log.exception(f"Unable to load {filename}: {file_not_found}")
    except Error as json_parse_error:
        log.exception(
            f"Unable to parse JSON contents {filename}: {json_parse_error}")

    return contents
示例#13
0
def list_likes(query, stub, metadata):
    """ get the likes_summary object in json format """
    req = bl_pb2.LikesQuery(ref_type=bl_pb2.RefType(**query))
    res = stub.ListLikes(req, metadata=metadata)
    like_summary = bl_pb2.LikesSummary()
    for item in res:
        ## There is an error with protobuf that cannot compare the classes
        ## https://github.com/protocolbuffers/protobuf/issues/4928. For now we have to
        ## rebuild the Message from JSON
        # like_summary.likes.extend([item])
        like_summary.likes.extend([Parse(MessageToJson(item), bl_pb2.Like())])
        if item.liked:
            like_summary.total += 1
        else:
            like_summary.total -= 1
    #logger.debug("LikeSummary %s", MessageToJson(like_summary))
    data = json.loads(MessageToJson(like_summary))
    if not data:
        return {
            'code': HTTPStatus.NOT_FOUND.value,
			'status': HTTPStatus.NOT_FOUND.phrase,
            'description': HTTPStatus.NOT_FOUND.description
        }
    return {
      'code': HTTPStatus.OK.value,
      'data': data.get('likes', []),
      'total_count': data.get('total', None),
      'status': HTTPStatus.OK.phrase,
      'description': HTTPStatus.OK.description
    }
示例#14
0
def load_remote_scene_dataset_json(scene_dataset_json_url, save_dir=None):
    """Fetch scene dataset JSON from remote given its remote uri.

    Parameters
    ----------
    scene_dataset_json_uri: str
        Remote scene dataset JSON URI.

    save_dir: str, default: None
        Directory to save dataset JSON.

    Returns
    -------
    dataset: dgp.proto.dataset_pb2.SceneDataset
    SceneDataset proto object
    """
    logging.info('Fetching scene dataset {}'.format(scene_dataset_json_url))
    bucket_name, s3_base_path = convert_uri_to_bucket_path(
        scene_dataset_json_url)
    dataset_blob = get_string_from_s3_file(s3_bucket(bucket_name),
                                           s3_base_path)
    dataset = Parse(dataset_blob, SceneDataset())
    if save_dir is not None:
        dataset_dir = os.path.join(
            save_dir,
            '{name}_{version}'.format(name=dataset.metadata.name,
                                      version=dataset.metadata.version))
        save_path = os.path.join(dataset_dir, os.path.basename(s3_base_path))
        if not os.path.exists(save_path):
            os.makedirs(dataset_dir, exist_ok=True)
            save_pbobject_as_json(dataset, save_path)
    return dataset
示例#15
0
    def _send(self, data):
        '''
        _send(data: dict)
        Only called internally. Used to send JSON to the channel. Returns a list of protobuf objects.
        '''
        cmd = api_pb2.Command()  # Create the protobuf
        Parse(json.dumps(data), cmd)
        cmd.session = 0
        cmd.channel = self.id

        data = cmd.SerializeToString(
        )  # Serialize the protobuf and send it to the websocket
        self.ws.send(data)

        done = False
        got = []  # The protobufs we recieved
        while not done:
            res = api_pb2.Command()
            res.ParseFromString(
                self.ws.recv())  # Get a protobuf from the websocket
            if res.channel == self.id:  # Only respond to messages for our channel
                got.append(res)  # Store the message
                done = res.HasField(
                    'state'
                ) and res.state != 1  # Check if we are done getting data

        if res.ok:  # The last request was ok, return what we got
            return got
        elif res.error:  # There was an error, yell at somebody
            raise PyReplError(f'Command returned error: {res.error}')
示例#16
0
def main(_):
    if not os.path.isdir(FLAGS.save_path):
        os.makedirs(FLAGS.save_path)
    replay_infos = glob.glob(os.path.join(FLAGS.infos_path, '*.SC2Replay'))

    run_config = run_configs.get()
    with run_config.start() as controller:
        ping = controller.ping()

    result = {}
    pbar = tqdm(total=len(replay_infos), desc='#Replay')
    for info_path in replay_infos:
        with open(info_path) as f:
            info = json.load(f)

        proto = Parse(info['info'], sc_pb.ResponseReplayInfo())
        if valid_replay(proto, ping):
            players_info = proto.player_info
            races = '_vs_'.join(sorted(sc_pb.Race.Name(player_info.player_info.race_actual)
                                       for player_info in players_info))
            if races not in result:
                result[races] = []
            result[races].append((info['path'], info_path))
        pbar.update()

    for k, v in result.items():
        with open(os.path.join(FLAGS.save_path, k+'.json'), 'w') as f:
            json.dump(v, f)
示例#17
0
def test_create_schedule():
    fake_response = """
{
  "id": "everyminute",
  "cron": "* * * * *",
  "concurrencyPolicy": "ALLOW",
  "enabled": true,
  "startingDeadlineSeconds": 60,
  "timezone": "America/Chicago"
}
    """
    request = fake_response
    with requests_mock.mock() as m:
        m.post('http://fake_server/v1/jobs/example/schedules',
               text=fake_response,
               status_code=201)
        mock_client = MetronomeClient(servers='http://fake_server')
        schedule = Parse(request, models.ScheduleSpec())
        actual = mock_client.create_schedule(job_id='example',
                                             schedule=schedule)
        expected = models.ScheduleSpec()
        expected.id = 'everyminute'
        expected.cron = '* * * * *'
        expected.concurrencyPolicy = expected.ALLOW
        expected.enabled = True
        expected.startingDeadlineSeconds = 60
        expected.timezone = 'America/Chicago'
        assert expected == actual
示例#18
0
def main():
    parser = argparse.ArgumentParser(
        description="Locust: rendering functionality")
    populate_argument_parser(parser)
    parser.add_argument(
        "-i",
        "--input",
        type=argparse.FileType("r"),
        default=sys.stdin,
        help="Path to parse result. If not specified, reads from stdin.",
    )
    parser.add_argument(
        "-o",
        "--output",
        type=argparse.FileType("w"),
        default=sys.stdout,
        help="Path to write summary to",
    )

    args = parser.parse_args()

    with args.input as ifp:
        parse_result = Parse(ifp.read(), parse.ParseResult())

    summary = run(parse_result, args.format, args.github, args.metadata)

    try:
        with args.output as ofp:
            print(summary, file=ofp)
    except BrokenPipeError:
        pass
示例#19
0
    def async_post(self, request):
        workspace = request.get(PAYLOAD)
        if not workspace:
            return {
                PAYLOAD: {
                    'reason':
                    'Request payload must include JSON formatted ARWorkspaceData.'
                },
                STATUS: http.BAD_REQUEST,
            }

        try:
            workspace = Parse(workspace, ARWorkspaceData())
            workspace = set_default_layout(workspace)
            return {
                PAYLOAD: {
                    'workspace':
                    MessageToDict(workspace,
                                  including_default_value_fields=True)
                },
                STATUS: http.OK
            }
        except ParseError as e:
            return {
                PAYLOAD: {
                    'reason': 'Failed to parse workspace payload.',
                    'exception': str(e),
                },
                STATUS: http.BAD_REQUEST
            }
示例#20
0
def sample_action_from_player(action_path):
    feat = features.Features(screen_size_px=(1, 1), minimap_size_px=(1, 1))
    with open(action_path) as f:
        actions = json.load(f)

    frame_id = 0
    result_frames = []
    for action_strs in actions:
        action_name = None
        for action_str in action_strs:
            action = Parse(action_str, sc_pb.Action())
            try:
                func_id = feat.reverse_action(action).function
                func_name = FUNCTIONS[func_id].name
                if func_name.split('_')[0] in {
                        'Build', 'Train', 'Research', 'Morph', 'Cancel',
                        'Halt', 'Stop'
                }:
                    action_name = func_name
                    break
            except:
                pass
        if frame_id > 0 and (action_name is not None
                             or frame_id % FLAGS.skip == 0):
            result_frames.append(frame_id - FLAGS.step_mul)

        frame_id += FLAGS.step_mul

    return result_frames
def main(_):
    with open(FLAGS.hq_replay_set) as f:
        replay_list = sorted(json.load(f))

    race_vs_race = os.path.basename(FLAGS.hq_replay_set).split('.')[0]
    global_feature_vec_path = os.path.join(FLAGS.parsed_replay_path,
                                           'GlobalFeatureVector', race_vs_race)
    races = set(race_vs_race.split('_vs_'))
    for race in races:
        path = os.path.join(global_feature_vec_path, race)
        if not os.path.isdir(path):
            os.makedirs(path)

    pbar = tqdm(total=len(replay_list), desc='#Replay')
    for replay_path, replay_info_path in replay_list:
        with open(replay_info_path) as f:
            info = json.load(f)
        info = Parse(info['info'], sc_pb.ResponseReplayInfo())

        replay_name = os.path.basename(replay_path)
        for player_info in info.player_info:
            race = sc_pb.Race.Name(player_info.player_info.race_actual)
            player_id = player_info.player_info.player_id
            reward = player_info.player_result.result

            replay_player_path = os.path.join(
                race_vs_race, race, '{}@{}'.format(player_id, replay_name))
            parse_replay(replay_player_path, reward, race,
                         race if len(races) == 1 else list(races - {race})[0])

        pbar.update()
示例#22
0
 def to_type_proto(self, rs: 'ResourceSession', options, top_level=False):
     d = {}
     if top_level:
         self.set_boilerplate(d)
     rs.load_data_service(self)
     pub_uuids = [
         v for k, v in rs.type_to_api_path_to_uuid['pub'].items()
         if k[0] == self.data_service_id
     ]
     d['dataFeeds'] = sorted([
         MessageToDict(rs.uuid_to_resource[uuid].to_type_proto(
             rs, options, top_level=True)) for uuid in pub_uuids
     ],
                             key=lambda d: d['id'])
     if options.recursive and not getattr(options, 'directory', False):
         uuids = [
             v for k, v in rs.type_to_api_path_to_uuid['project'].items()
             if k[0] == self.data_service_id
         ]
         d['dataflows'] = sorted([
             MessageToDict(rs.uuid_to_resource[uuid].to_type_proto(
                 rs, options, top_level=True)) for uuid in uuids
         ],
                                 key=lambda d: d['id'])
     message = resource_pb2.DataService()
     Parse(json.dumps(d), message, ignore_unknown_fields=True)
     return message
示例#23
0
def test_feature_vector_values(proto_json_patch):
    # FeatureVector contains "repeated<feast.types.Value> values" proto field.
    # We want to test that feast.types.Value can take different types in JSON
    # without using additional structure (e.g. 1 instead of {int64_val: 1}).
    feature_vector_str = """{
        "values": [
            1,
            2.0,
            true,
            "foo",
            [1, 2, 3],
            [2.0, 3.0, 4.0, null],
            [true, false, true],
            ["foo", "bar", "foobar"]
        ]
    }"""
    feature_vector_proto = FeatureVector()
    Parse(feature_vector_str, feature_vector_proto)
    assertpy.assert_that(len(feature_vector_proto.values)).is_equal_to(8)
    assertpy.assert_that(
        feature_vector_proto.values[0].int64_val).is_equal_to(1)
    assertpy.assert_that(
        feature_vector_proto.values[1].double_val).is_equal_to(2.0)
    assertpy.assert_that(
        feature_vector_proto.values[2].bool_val).is_equal_to(True)
    assertpy.assert_that(
        feature_vector_proto.values[3].string_val).is_equal_to("foo")
    assertpy.assert_that(
        feature_vector_proto.values[4].int64_list_val.val).is_equal_to(
            [1, 2, 3])
    # Can't directly check equality to [2.0, 3.0, 4.0, float("nan")], because float("nan") != float("nan")
    assertpy.assert_that(
        feature_vector_proto.values[5].double_list_val.val[:3]).is_equal_to(
            [2.0, 3.0, 4.0])
    assertpy.assert_that(
        feature_vector_proto.values[5].double_list_val.val[3]).is_nan()
    assertpy.assert_that(
        feature_vector_proto.values[6].bool_list_val.val).is_equal_to(
            [True, False, True])
    assertpy.assert_that(
        feature_vector_proto.values[7].string_list_val.val).is_equal_to(
            ["foo", "bar", "foobar"])

    # Now convert protobuf back to json and check that
    feature_vector_json = MessageToDict(feature_vector_proto)
    assertpy.assert_that(len(feature_vector_json["values"])).is_equal_to(8)
    assertpy.assert_that(feature_vector_json["values"][0]).is_equal_to(1)
    assertpy.assert_that(feature_vector_json["values"][1]).is_equal_to(2.0)
    assertpy.assert_that(feature_vector_json["values"][2]).is_equal_to(True)
    assertpy.assert_that(feature_vector_json["values"][3]).is_equal_to("foo")
    assertpy.assert_that(feature_vector_json["values"][4]).is_equal_to(
        [1, 2, 3])
    # Can't directly check equality to [2.0, 3.0, 4.0, float("nan")], because float("nan") != float("nan")
    assertpy.assert_that(feature_vector_json["values"][5][:3]).is_equal_to(
        [2.0, 3.0, 4.0])
    assertpy.assert_that(feature_vector_json["values"][5][3]).is_nan()
    assertpy.assert_that(feature_vector_json["values"][6]).is_equal_to(
        [True, False, True])
    assertpy.assert_that(feature_vector_json["values"][7]).is_equal_to(
        ["foo", "bar", "foobar"])
示例#24
0
def jsonToPB(filename, map):

    with open(filename, 'r') as json:
        Parse(json.read(), map)

    with open('{}.pb'.format(filename), 'wb') as output:
        output.write(map.SerializeToString())
def _load_yaml_doc(filename):
    """Load a disk file as YAML.

  This function reads the specified filename and parses the contents
  as YAML.

  Args:
      filename: The file whose contents are to be read as YAML data

  Returns:
      A JobControl object populated with the contestns from the
      specified YAML file
  """
    log.debug(f"Opening YAML file {filename}")
    contents = None
    try:
        with open(filename, 'r') as yaml_doc:
            contents = yaml.load(yaml_doc.read())
            contents = Parse(json.dumps(contents), JobControl())
    except FileNotFoundError as file_not_found:
        log.exception(f"Unable to load {filename}: {file_not_found}")
    except Error as yaml_parse_error:
        log.exception(
            f"Unable to parse YAML contents {filename}: {yaml_parse_error}")

    return contents
示例#26
0
def sample_frames(action_path):
    agent_intf = features.AgentInterfaceFormat(
        feature_dimensions=features.Dimensions(screen=(1, 1), minimap=(1, 1)))
    feat = features.Features(agent_intf)

    with open(action_path) as f:
        actions = json.load(f)

    frame_id = 0
    result_frames = []
    for action_step in actions:  # Get actions performed since previous observed frame
        frame_id += FLAGS.step_mul  # Advance to current frame
        action_name = None
        for action_str in action_step:  # Search all actions from step
            action = Parse(action_str, sc_pb.Action())
            try:
                func_id = feat.reverse_action(action).function
                func_name = FUNCTIONS[func_id].name
                if func_name.split('_')[0] in {
                        'Build', 'Train', 'Research', 'Morph', 'Cancel',
                        'Halt', 'Stop'
                }:  # Macro action found in step
                    action_name = func_name
                    break  # Macro step found, no need to process further actions from this step
            except:
                pass
        if (action_name is not None) or (
            (frame_id % FLAGS.skip)
                == 0):  # This is a macro step or fixed recording step
            result_frames.append(frame_id)

    return result_frames
示例#27
0
def get_document_boxes(ocr_file, feature):
    """Returns document bounds given an image."""
    boxes = []

    with open(ocr_file) as json_file:
        data = json.load(json_file)
        parsed = image_annotator_pb2.AnnotateImageResponse()
        Parse(data, parsed)
        # import pdb; pdb.set_trace()

    #texts = parsed.text_annotations
    #for text in texts:
    #    print('\n"{}"'.format(text.description))
    document = parsed.full_text_annotation

    # Collect specified feature bounds by enumerating all document features
    for page in document.pages:
        for block in page.blocks:
            for paragraph in block.paragraphs:
                for word in paragraph.words:
                    for symbol in word.symbols:
                        if (feature == FeatureType.SYMBOL):
                            boxes.append(
                                box(symbol.bounding_box, symbol.confidence))

                    if (feature == FeatureType.WORD):
                        boxes.append(box(word.bounding_box, word.confidence))
    return boxes
示例#28
0
def sample_frames(replay_path, action_path, sampled_frame_path):
    replay_info = os.path.join(FLAGS.infos_path, replay_path)
    if not os.path.isfile(replay_info):
        return
    with open(replay_info) as f:
        info = json.load(f)

    result = []
    proto = Parse(info['info'], sc_pb.ResponseReplayInfo())
    for p in proto.player_info:  # Sample actions taken by each player
        player_id = p.player_info.player_id
        race = common_pb.Race.Name(p.player_info.race_actual)

        action_file = os.path.join(action_path, race,
                                   '{}@{}'.format(player_id, replay_path))
        if not os.path.isfile(
                action_file
        ):  # Skip replays where actions haven't been extracted yet
            print('Unable to locate', action_file)
            return

        result.append(sample_frames_from_player(action_file)
                      )  # Get the frames where each player took a macro action

    assert len(result) == 2
    sampled_actions = sorted(
        set(result[0]) | set(result[1])
    )  # Collect all frames where either player took a macro action

    with open(os.path.join(sampled_frame_path, replay_path), 'w') as f:
        json.dump(sampled_actions, f)
示例#29
0
def sample_action(replay_path, action_path, sampled_path):
    replay_info = os.path.join(FLAGS.infos_path, replay_path)
    if not os.path.isfile(replay_info):
        return
    with open(replay_info) as f:
        info = json.load(f)

    result = []
    proto = Parse(info['info'], sc_pb.ResponseReplayInfo())
    for p in proto.player_info:
        player_id = p.player_info.player_id
        race = sc_common.Race.Name(p.player_info.race_actual)

        action_file = os.path.join(action_path, race,
                                   '{}@{}'.format(player_id, replay_path))
        if not os.path.isfile(action_file):
            return

        result.append(sample_action_from_player(action_file))

    assert len(result) == 2
    sampled_actions = sorted(set(result[0]) | set(result[1]))

    with open(os.path.join(sampled_path, replay_path), 'w') as f:
        json.dump(sampled_actions, f)
示例#30
0
文件: protobuf.py 项目: weihaosky/dgp
def open_pbobject(path, pb_class, verbose=True):
    """Load JSON as a protobuf (pb2) object.

    Any calls to load protobuf objects from JSON in this repository should be through this function.
    Returns `None` if the loading failed.

    Parameters
    ----------
    path: str
        JSON file path to load

    pb_class: pb2 object class
        Protobuf object we want to load into.

    verbose: bool, default: True
        Verbose prints on failure

    Returns
    ----------
    pb_object: pb2 object
        Desired pb2 object to be opened.
    """
    assert path.endswith(
        ".json"), 'File extension for {} needs to be json.'.format(path)
    with open(path, 'r') as json_file:
        try:
            pb_object = Parse(json_file.read(), pb_class())
        except Exception as e:
            if verbose:
                print('open_pbobject: Failed to load pbobject {}'.format(e))
            return None
    return pb_object