示例#1
0
def _replace_behv_feature_json(params):
    _replace_behv_feature_proto(Parse(json.dumps(params), ReplaceBehvFeatureMessage()))
示例#2
0
def get_execute_pipeline_results(info_str=None):
    """Ask a TA2 to GetExecutePipelineResults via gRPC"""
    if info_str is None:
        info_str = get_test_info_str()

    if info_str is None:
        err_msg = 'UI Str for PipelineExecuteResultsRequest is None'
        return get_failed_precondition_response(err_msg)

    # --------------------------------
    # Is this valid JSON?
    # --------------------------------
    try:
        info_dict = json.loads(info_str, object_pairs_hook=OrderedDict)
    except json.decoder.JSONDecodeError as err_obj:
        err_msg = 'Failed to convert UI Str to JSON: %s' % (err_obj)
        return get_failed_precondition_response(err_msg)

    # --------------------------------
    # convert the JSON string to a gRPC request
    # --------------------------------
    try:
        req = Parse(info_str, core_pb2.PipelineExecuteResultsRequest())
    except ParseError as err_obj:
        err_msg = 'Failed to convert JSON to gRPC: %s' % (err_obj)
        return get_failed_precondition_response(err_msg)

    if settings.TA2_STATIC_TEST_MODE:

        template_info = get_predict_file_info_dict()

        template_str = get_grpc_test_json(
            'test_responses/execute_results_ok.json', template_info)

        embed_util = FileEmbedUtil(template_str)
        if embed_util.has_error:
            return get_failed_precondition_response(embed_util.error_message)

        return embed_util.get_final_results()

        #return get_grpc_test_json('test_responses/execute_results_ok.json',
        #                          dict())

    # --------------------------------
    # Get the connection, return an error if there are channel issues
    # --------------------------------
    core_stub, err_msg = TA2Connection.get_grpc_stub()
    if err_msg:
        return get_failed_precondition_response(err_msg)

    #print('req: %s' % req)

    # --------------------------------
    # Send the gRPC request - returns a stream
    # --------------------------------
    try:
        reply = core_stub.GetExecutePipelineResults(req)
    except grpc.RpcError as ex:
        return get_failed_precondition_response(str(ex))
    except Exception as ex:
        return get_failed_precondition_response(str(ex))

    #print('reply', reply)
    """
    if reply and str(reply) == VAL_GRPC_STATE_CODE_NONE:
        err_msg = ('Unknown gRPC state.'
                   ' (Was an ExecutePipeline request sent?)')
        return get_failed_precondition_response(err_msg)
    """
    try:
        print(MessageToJson(reply))
    except:
        print('failed unary convert to JSON')
    #print('reply: %s' % reply)

    # --------------------------------
    # Convert the reply to JSON and send it on
    # --------------------------------
    results = map(MessageToJson, reply)
    result_str = '[' + ', '.join(results) + ']'

    embed_util = FileEmbedUtil(result_str)
    if embed_util.has_error:
        return get_failed_precondition_response(embed_util.error_message)

    return embed_util.get_final_results()
    def test_get_model_metadata_rest(self, model_version_policy_models,
                                     start_server_model_ver_policy, model_name,
                                     throw_error):
        """
        <b>Description</b>
        Execute GetModelMetadata request using REST API interface
        hosting multiple models

        <b>input data</b>
        - directory with 2 models in IR format
        - docker image

        <b>fixtures used</b>
        - model downloader
        - input data downloader
        - service launching

        <b>Expected results</b>
        - response contains proper response about model metadata for both
        models set in config file:
        model resnet_v1_50, pnasnet_large
        - both served models handles appropriate input formats

        """
        print("Downloaded model files:", model_version_policy_models)

        print("Getting info about model")
        versions = [1, 2, 3]
        expected_outputs_metadata = [{
            'detection_out': {
                'dtype': 1,
                'shape': [1, 1, 200, 7]
            }
        }, {
            'detection_out': {
                'dtype': 1,
                'shape': [1, 1, 200, 7]
            }
        }, {
            'age': {
                'dtype': 1,
                'shape': [1, 1, 1, 1]
            },
            'gender': {
                'dtype': 1,
                'shape': [1, 2, 1, 1]
            }
        }]
        expected_inputs_metadata = [{
            'data': {
                'dtype': 1,
                'shape': [1, 3, 300, 300]
            }
        }, {
            'data': {
                'dtype': 1,
                'shape': [1, 3, 1024, 1024]
            }
        }, {
            'new_key': {
                'dtype': 1,
                'shape': [1, 3, 62, 62]
            }
        }]
        for x in range(len(versions)):
            print("Getting info about model version:".format(versions[x]))
            expected_input_metadata = expected_inputs_metadata[x]
            expected_output_metadata = expected_outputs_metadata[x]
            rest_url = 'http://localhost:5560/v1/models/{}/' \
                       'versions/{}/metadata'.format(model_name, versions[x])
            result = requests.get(rest_url)
            print(result.text)
            if not throw_error[x]:
                output_json = result.text
                metadata_pb = get_model_metadata_pb2. \
                    GetModelMetadataResponse()
                response = Parse(output_json,
                                 metadata_pb,
                                 ignore_unknown_fields=False)
                input_metadata, output_metadata = model_metadata_response(
                    response=response)

                print(output_metadata)
                assert model_name == response.model_spec.name
                assert expected_input_metadata == input_metadata
                assert expected_output_metadata == output_metadata
            else:
                assert 404 == result.status_code
示例#4
0
def parse_proto(json_message):
    message = KafkaServingMessage()
    Parse(json_message, message)
    return message
示例#5
0
def parse_replay(replay_player_path, sampled_action_path, reward, race,
                 enemy_race, stat):
    with open(
            os.path.join(FLAGS.parsed_replay_path, 'GlobalInfos',
                         replay_player_path)) as f:
        global_info = json.load(f)

    feat = SpatialFeatures(
        Parse(global_info['game_info'], sc_pb.ResponseGameInfo()))

    states = [
        obs for obs in stream.parse(
            os.path.join(FLAGS.parsed_replay_path, 'SampledObservations',
                         replay_player_path), sc_pb.ResponseObservation)
    ]

    # Sampled Actions
    with open(sampled_action_path) as f:
        sampled_action = json.load(f)
    sampled_action_id = [id // FLAGS.step_mul + 1 for id in sampled_action]
    # Actions
    with open(
            os.path.join(FLAGS.parsed_replay_path, 'Actions',
                         replay_player_path)) as f:
        actions = json.load(f)
    actions = [
        None if len(actions[idx]) == 0 else Parse(actions[idx][0],
                                                  sc_pb.Action())
        for idx in sampled_action_id
    ]

    assert len(states) == len(actions)

    spatial_states_np, global_states_np = [], []
    for state, action in zip(states, actions):
        action_id = -1
        if action is not None:
            try:
                func_id = feat.reverse_action(action).function
                func_name = FUNCTIONS[func_id].name
                if func_name.split('_')[0] in {
                        'Build', 'Train', 'Research', 'Morph', 'Cancel',
                        'Halt', 'Stop'
                }:
                    action_id = func_id
            except:
                pass

        obs = feat.transform_obs(state.observation)
        spatial_states_np.append(
            np.concatenate([obs['screen'], obs['minimap']], axis=0))

        global_states_np.append(
            np.hstack([
                obs['player'] / (stat['max'] + 1e-5), obs['score'], [reward],
                [stat['action_id'][action_id]]
            ]))

    spatial_states_np = np.asarray(spatial_states_np)
    global_states_np = np.asarray(global_states_np)

    spatial_states_np = spatial_states_np.reshape([len(states), -1])
    sparse.save_npz(
        os.path.join(FLAGS.parsed_replay_path, 'SpatialFeatureTensor',
                     replay_player_path + '@S'),
        sparse.csc_matrix(spatial_states_np))
    sparse.save_npz(
        os.path.join(FLAGS.parsed_replay_path, 'SpatialFeatureTensor',
                     replay_player_path + '@G'),
        sparse.csc_matrix(global_states_np))
示例#6
0
        )
        subprocess.run(
            f"protoc --plugin=protoc-gen-custom=../plugin.py --custom_out=. {os.path.basename(filename)}",
            shell=True,
        )

    for filename in json_files:
        # Reset the internal symbol database so we can import the `Test` message
        # multiple times. Ugh.
        sym = symbol_database.Default()
        sym.pool = DescriptorPool()

        parts = get_base(filename).split("-")
        out = filename.replace(".json", ".bin")
        print(f"Using {parts[0]}_pb2 to generate {os.path.basename(out)}")

        imported = importlib.import_module(f"{parts[0]}_pb2")
        input_json = open(filename).read()
        parsed = Parse(input_json, imported.Test())
        serialized = parsed.SerializeToString()
        preserve = "casing" not in filename
        serialized_json = MessageToJson(parsed, preserving_proto_field_name=preserve)

        s_loaded = json.loads(serialized_json)
        in_loaded = json.loads(input_json)

        if s_loaded != in_loaded:
            raise AssertionError("Expected JSON to be equal:", s_loaded, in_loaded)

        open(out, "wb").write(serialized)
示例#7
0
 def from_json(json_data):
     pbdata = xrd_pb2.TokenMetadata()
     Parse(json_data, pbdata)
     return TokenMetadata(pbdata)
示例#8
0
def _update_malwr_info_json(params):
    _update_malwr_info_proto(Parse(json.dumps(params), UpdateMalwrInfoMessage()))
示例#9
0
def _add_file_type_json(params):
    _add_file_type_proto(Parse(json.dumps(params), AddFileTypeMessage()))
示例#10
0
def _update_hybrid_info_json(params):
    _update_hybrid_info_proto(Parse(json.dumps(params), UpdateHybridInfoMessage()))
示例#11
0
def _update_reverseit_info_json(params):
    _update_reverseit_info_proto(Parse(json.dumps(params), UpdateReverseItInfoMessage()))
示例#12
0
def _update_vt_info_json(params):
    _update_vt_info_proto(Parse(json.dumps(params), UpdateVtInfoMessage()))
示例#13
0
def _clear_behv_feature_json(params):
    _clear_behv_feature_proto(Parse(json.dumps(params), ClearBehvFeatureMessage()))
示例#14
0
def _add_host_os_json(params):
    _add_host_os_proto(Parse(json.dumps(params), AddHostOsMessage()))
示例#15
0
def get_search_solutions_results(raven_json_str, user_obj, websocket_id=None):
    """
    Send a GetSearchSolutionsResultsRequest to the GetSearchSolutionsResults command
    """
    if user_obj is None:
        return err_resp("The user_obj cannot be None")
    if not raven_json_str:
        err_msg = 'No data found for the GetSearchSolutionsResultsRequest'
        return err_resp(err_msg)

    # --------------------------------
    # Make sure it's valid JSON
    # --------------------------------
    raven_json_info = json_loads(raven_json_str)
    if not raven_json_info.success:
        return err_resp(raven_json_info.err_msg)

    # --------------------------------
    # convert the JSON string to a gRPC request
    #   Done for error checking; call repeated in celery task
    # --------------------------------
    try:
        req = Parse(raven_json_str,
                    core_pb2.GetSearchSolutionsResultsRequest())
    except ParseError as err_obj:
        err_msg = 'Failed to convert JSON to gRPC: %s' % (err_obj)
        return err_resp(err_msg)

    # --------------------------------
    # Save the request to the db
    # --------------------------------
    stored_request = StoredRequest(\
                    user=user_obj,
                    workspace='(not specified)',
                    request_type='GetSearchSolutionsResults',
                    is_finished=False,
                    request=raven_json_info.result_obj)
    stored_request.save()

    # In test mode, return canned response
    #
    if settings.TA2_STATIC_TEST_MODE:
        resp_str = get_grpc_test_json(\
                        'test_responses/GetSearchSolutionsResultsResponse_ok.json',
                        dict())

        resp_info = json_loads(resp_str)
        if not resp_info.success:
            return err_resp(resp_info.err_msg)

        # Save the stored response
        #
        StoredResponse.add_response(\
                        stored_request.id,
                        response=resp_info.result_obj)

        StoredRequestUtil.set_finished_ok_status(stored_request.id)
        # Return the stored **request** (not response)
        #
        return ok_resp(stored_request.as_dict())

    stream_and_store_results.delay(raven_json_str,
                                   stored_request.id,
                                   'core_pb2.GetSearchSolutionsResultsRequest',
                                   'GetSearchSolutionsResults',
                                   websocket_id=websocket_id)

    return ok_resp(stored_request.as_dict())
示例#16
0
def _del_file_type_json(params):
    _del_file_type_proto(Parse(json.dumps(params), DelFileTypeMessage()))
 def from_json(json_data):
     pbdata = qrl_pb2.EncryptedEphemeralMessage()
     Parse(json_data, pbdata)
     return EncryptedEphemeralMessage(pbdata)
示例#18
0
def _clone_host_os_json(params):
    _clone_host_os_proto(Parse(params, CloneHostOsMessage()))
示例#19
0
def copy_proto(proto):
    newproto = type(proto)()
    Parse(json.dumps(proto_to_dict(proto)), newproto)
    return newproto
示例#20
0
def _add_platform_json(params):
    _add_platform_proto(Parse(json.dumps(params), AddPlatformMessage()))
示例#21
0
def start_session(raven_json_str=None):
    """Start session command
    This command sends a UserAgent and the protocol version
    to the TA2 service
    """
    if raven_json_str is None:
        err_msg = 'No data found.  Please send a "user_agent"'
        return get_failed_precondition_sess_response(err_msg)
        # Default if the user_agent is not from the UI
        #raven_dict = dict(user_agent=settings.TA2_GPRC_USER_AGENT)

    # The UI has sent JSON in string format that contains the user_agent
    try:
        raven_dict = json.loads(raven_json_str)
    except json.decoder.JSONDecodeError as err_obj:
        err_msg = 'Failed to convert UI Str to JSON: %s' % (err_obj)
        return get_failed_precondition_sess_response(err_msg)

    # check for a user_agent
    #
    if not KEY_USER_AGENT_FROM_UI in raven_dict:
        return get_failed_precondition_sess_response(ERR_MSG_NO_USER_AGENT)

    # The protocol version always comes from the latest
    # version we have in the repo (just copied in for now)
    #
    raven_dict['version'] = TA2Connection.get_protocol_version()

    # --------------------------------
    # Convert back to string for TA2 call
    # --------------------------------
    content = json.dumps(raven_dict)

    # --------------------------------
    # convert the JSON string to a gRPC request
    # --------------------------------
    try:
        req = Parse(content, core_pb2.SessionRequest())
    except ParseError as err_obj:
        err_msg = 'Failed to convert JSON to gRPC: %s' % (err_obj)
        return get_failed_precondition_sess_response(err_msg)

    # In test mode, check if the incoming JSON is legit (in line above)
    # -- then return canned response
    #
    if settings.TA2_STATIC_TEST_MODE:
        rnd_session_id = random_info.get_alphanumeric_string(7)
        info_dict = dict(session_id=rnd_session_id,
                         api_version=TA3TA2Util.get_api_version())

        return get_grpc_test_json('test_responses/startsession_ok.json',
                                  info_dict)

        #if random.randint(1,10) == 3:
        #    return get_grpc_test_json('test_responses/startsession_badassertion.json')
        #else:
        #    return get_grpc_test_json('test_responses/startsession_ok.json', d)

    # --------------------------------
    # Get the connection, return an error if there are channel issues
    # --------------------------------
    core_stub, err_msg = TA2Connection.get_grpc_stub()
    if err_msg:
        return get_failed_precondition_sess_response(err_msg)

        #return dict(status=core_pb2.FAILED_PRECONDITION,
        #            details=err_msg)

    # --------------------------------
    # Send the gRPC request
    # --------------------------------
    try:
        reply = core_stub.StartSession(req)
    except Exception as ex:
        return get_failed_precondition_sess_response(str(ex))

    # --------------------------------
    # Convert the reply to JSON and send it back
    # --------------------------------
    return MessageToJson(reply)
示例#22
0
def _del_platform_json(params):
    _del_platform_proto(Parse(json.dumps(params), DelPlatformMessage()))
示例#23
0
def pipeline_create(info_str=None):
    """Send the pipeline create request via gRPC"""
    if info_str is None:
        info_str = get_test_info_str()

    if info_str is None:
        err_msg = 'UI Str for %s is None' % PIPELINE_CREATE_REQUEST
        return get_failed_precondition_response(err_msg)

    # --------------------------------
    # Convert info string to dict
    # --------------------------------
    try:
        info_dict = json.loads(info_str, object_pairs_hook=OrderedDict)
    except json.decoder.JSONDecodeError as err_obj:
        err_msg = 'Failed to convert UI Str to JSON: %s' % (err_obj)
        return get_failed_precondition_response(err_msg)

    if KEY_CONTEXT_FROM_UI not in info_dict:
        return get_failed_precondition_response(ERR_NO_CONTEXT)

    if KEY_SESSION_ID_FROM_UI not in info_dict[KEY_CONTEXT_FROM_UI]:
        return get_failed_precondition_response(ERR_NO_SESSION_ID)

    # --------------------------------
    # convert the JSON string to a gRPC request
    # --------------------------------
    try:
        req = Parse(info_str, core_pb2.PipelineCreateRequest())
    except ParseError as err_obj:
        err_msg = 'Failed to convert JSON to gRPC: %s' % (err_obj)
        return get_failed_precondition_response(err_msg)

    if settings.TA2_STATIC_TEST_MODE:

        template_info = get_predict_file_info_dict(info_dict.get('task'))

        template_str = get_grpc_test_json('test_responses/createpipeline_ok.json',
                                          template_info)

        # These next lines embed file uri content into the JSON
        embed_util = FileEmbedUtil(template_str)
        if embed_util.has_error:
            return get_failed_precondition_response(embed_util.error_message)

        return embed_util.get_final_results()
        #return get_grpc_test_json('test_responses/createpipeline_ok.json',
        #                          template_info)

    # --------------------------------
    # Get the connection, return an error if there are channel issues
    # --------------------------------
    core_stub, err_msg = TA2Connection.get_grpc_stub()
    if err_msg:
        return get_failed_precondition_response(err_msg)

    # --------------------------------
    # Send the gRPC request
    # --------------------------------
    messages = []

    try:
        for reply in core_stub.CreatePipelines(req):
            user_msg = MessageToJson(reply)
            print(user_msg)
            messages.append(user_msg)
    except Exception as ex:
        return get_reply_exception_response(str(ex))

    print('end of queue. make message list')

    result_str = '['+', '.join(messages)+']'

    print('embed file contents')
    embed_util = FileEmbedUtil(result_str)
    if embed_util.has_error:
        print('file embed error')
        return get_failed_precondition_response(embed_util.error_message)

    print('return results')
    return embed_util.get_final_results()
示例#24
0
def _add_sample_relationship_type_json(params):
    _add_sample_relationship_type_proto(Parse(json.dumps(params), AddSampleRelationshipTypeMessage()))
示例#25
0
def request_generator(message_bodies: List[dict], request_dataclass: Any):
    for message_body in message_bodies:
        yield Parse(json.dumps(message_body), request_dataclass())
示例#26
0
def _del_sample_relationship_type_json(params):
    _del_sample_relationship_type_proto(Parse(json.dumps(params), DelSampleRelationshipTypeMessage()))
示例#27
0
文件: route.py 项目: zhshw/vpp-agent
 def validate(self):
     route = Route()
     Parse(json.dumps(self.values), route)
     return MessageToJson(route, indent=None)
示例#28
0
def _add_overall_malicious_type_json(params):
    _add_overall_malicious_type_proto(Parse(json.dumps(params), AddOverallMaliciousTypeMessage()))
示例#29
0
文件: Block.py 项目: grx7/QRL
 def from_json(json_data):
     pbdata = qrl_pb2.Block()
     Parse(json_data, pbdata)
     return Block(pbdata)
示例#30
0
def _append_behv_feature_json(params):
    _append_behv_feature_proto(Parse(json.dumps(params), AppendBehvFeatureMessage()))