Exemple #1
0
    def scan(
            self,
            table_name: str,
            table_namespace: str = "default",
            start_row: Optional[str] = None,
            stop_row: Optional[str] = None,
            prefix: Optional[str] = None,
            columns: Optional[Mapping[str, List[str]]] = None,
            min_time: Optional[int] = None,
            max_time: Optional[int] = None,
            max_versions: Optional[int] = None
    ) -> Iterator[Result]:

        if start_row is not None:
            _start_row = StringValue(value=start_row)
        else:
            _start_row = None

        if stop_row is not None:
            _stop_row = StringValue(value=stop_row)
        else:
            _stop_row = None

        if prefix is not None:
            _prefix = StringValue(value=prefix)
        else:
            _prefix = None

        if min_time is not None:
            _min_time = Int64Value(value=min_time)
        else:
            _min_time = None

        if max_time is not None:
            _max_time = Int64Value(value=max_time)
        else:
            _max_time = None

        if columns is not None:
            _columns = self._convert_columns(columns)
        else:
            _columns = []

        if max_versions is not None:
            _max_versions = Int32Value(value=max_versions)
        else:
            _max_versions = None

        query = ScanRequest(
            table=Table(name=table_name, namespace=table_namespace),
            start_row=_start_row,
            stop_row=_stop_row,
            prefix=_prefix,
            columns=_columns,
            min_time=_min_time,
            max_time=_max_time,
            max_versions=_max_versions
        )

        return self._client.Scan(query)
Exemple #2
0
    def get(self,
            row: str,
            table_name: str,
            table_namespace: str = "default",
            columns: Mapping[str, List[str]] = None,
            min_time: Optional[int] = None,
            max_time: Optional[int] = None,
            max_versions: Optional[int] = None) -> 'Result':
        if min_time is not None:
            _min_time = Int64Value(value=min_time)
        else:
            _min_time = Int64Value()

        if max_time is not None:
            _max_time = Int64Value(value=max_time)
        else:
            _max_time = Int64Value()

        if max_versions is not None:
            _max_versions = Int32Value(value=max_versions)
        else:
            _max_versions = Int32Value()

        query = Get(
            table=Table(name=table_name, namespace=table_namespace),
            row=row,
            columns={f: Columns(columns=c)
                     for f, c in columns.items()},
            max_versions=_max_versions,
            min_time=_min_time,
            max_time=_max_time)

        return Result(self._client.get(query))
 def to_protobuf(self):
     """
     Return the object serialized as a protobuf message
     """
     opts = dict(count=self.count)
     if self.true_count > 0:
         opts["true_count"] = Int64Value(value=self.true_count)
     if self.null_count > 0:
         opts["null_count"] = Int64Value(value=self.null_count)
     return Counters(**opts)
Exemple #4
0
    def to_protobuf(self, null_count=0):
        """
        Return the object serialized as a protobuf message
        """
        opts = dict(count=self.count)
        if self.true_count > 0:
            opts["true_count"] = Int64Value(value=self.true_count)

        # TODO: remove this logic once we deprecate null_count form the protobuf schema
        if null_count > 0:
            opts["null_count"] = Int64Value(value=null_count)
        return Counters(**opts)
Exemple #5
0
    def scan(self,
             table_name: str,
             table_namespace: str = "default",
             start_row: Optional[str] = None,
             stop_row: Optional[str] = None,
             prefix: Optional[str] = None,
             columns: Mapping[str, List[str]] = None,
             min_time: Optional[int] = None,
             max_time: Optional[int] = None,
             max_versions: Optional[int] = None) -> 'Result':

        if start_row is not None:
            _start_row = StringValue(value=start_row)
        else:
            _start_row = None

        if stop_row is not None:
            _stop_row = StringValue(value=stop_row)
        else:
            _stop_row = None

        if prefix is not None:
            _prefix = StringValue(value=prefix)
        else:
            _prefix = None

        if min_time is not None:
            _min_time = Int64Value(value=min_time)
        else:
            _min_time = None

        if max_time is not None:
            _max_time = Int64Value(value=max_time)
        else:
            _max_time = None

        if max_versions is not None:
            _max_versions = Int32Value(value=max_versions)
        else:
            _max_versions = None

        query = Scan(
            table=Table(name=table_name, namespace=table_namespace),
            start_row=_start_row,
            stop_row=_stop_row,
            prefix=_prefix,
            columns={f: Columns(columns=c)
                     for f, c in columns.items()},
            min_time=_min_time,
            max_time=_max_time,
            max_versions=_max_versions)

        return Result(self._client.scan(query))
    def predict(self, idImage: str) -> dict:
        """[Open GRPC serve for TF ]

            Args:
                idImage (str): [id of image input]

            Returns:
                [dictionary]: [return formated dictionary ready to be sent as a JSON]
            """
        #call pre_process
        input_tensor = self.pre_process(idImage)
        max = 256 * 128 * 128 * 10 * 10  #Max data sent by grpc
        channel = grpc.insecure_channel(
            settings.TENSORFLOW_SERVING_ADDRESS + ':' +
            settings.TENSORFLOW_SERVING_PORT,
            options=[('grpc.max_message_length', max),
                     ('grpc.max_send_message_length', max),
                     ('grpc.max_receive_message_length', max)])
        version = Int64Value(value=1)  # version hardcodee
        model_spec = ModelSpec(version=version,
                               name=self.get_model_name(),
                               signature_name='serving_default')
        grpc_request = PredictRequest(model_spec=model_spec)
        grpc_request.inputs[self.get_input_name()].CopyFrom(input_tensor)
        stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
        result = stub.Predict(grpc_request, 10)
        # call post_process
        formated_result = self.post_process(result)
        return formated_result
Exemple #7
0
def my_service(request, context):
    value = int(request.fields["value"].number_value)
    if value == 666:
        return Status(StatusCode.FAILED_PRECONDITION, "Cant be zero")
    if value == 10:
        raise RuntimeError("Unexpected error")
    reply = Int64Value(value=value)
    return reply
 def LoadHistory(self, request: RequestLoadHistory) -> ResponseLoadHistory:
     return ResponseLoadHistory(history=[
         HistoryMessage(mid=None,
                        prev_mid=None,
                        sender_peer=None,
                        message=None,
                        date=0,
                        forward=None,
                        reply=None,
                        edited_at=Int64Value(value=0))
     ], )
Exemple #9
0
    def get(
            self,
            row: str,
            table_name: str,
            table_namespace: str = "default",
            columns: Optional[Mapping[str, List[str]]] = None,
            min_time: Optional[int] = None,
            max_time: Optional[int] = None,
            max_versions: Optional[int] = None
    ) -> 'Result':
        if min_time is not None:
            _min_time = Int64Value(value=min_time)
        else:
            _min_time = Int64Value()

        if max_time is not None:
            _max_time = Int64Value(value=max_time)
        else:
            _max_time = Int64Value()

        if max_versions is not None:
            _max_versions = Int32Value(value=max_versions)
        else:
            _max_versions = Int32Value()

        if columns is not None:
            _columns = self._convert_columns(columns)
        else:
            _columns = []

        query = GetRequest(
            table=Table(name=table_name, namespace=table_namespace),
            row=row,
            columns=_columns,
            max_versions=_max_versions,
            min_time=_min_time,
            max_time=_max_time
        )

        return self._client.Get(query)
Exemple #10
0
 def GetSearchStats(self, request: SearchId, context: grpc.ServicerContext) -> SearchStats:
     try:
         search = self._manager.get_search(request)
         retriever_stats = search.retriever.get_stats()
         selector_stats = search.selector.get_stats()
         passed_objects = selector_stats.passed_objects
         return SearchStats(totalObjects=retriever_stats.total_objects,
                            processedObjects=retriever_stats.dropped_objects + selector_stats.processed_objects,
                            droppedObjects=retriever_stats.dropped_objects + selector_stats.dropped_objects,
                            passedObjects=Int64Value(value=passed_objects) if passed_objects is not None else None,
                            falseNegatives=retriever_stats.false_negatives + selector_stats.false_negatives)
     except Exception as e:
         logger.exception(e)
         raise e
Exemple #11
0
def get_dataset_range(basedir, dataset, cameras, model):
    filename = os.path.join(basedir, dataset, '{}_2d_dataset'.format(model))
    reader = ProtobufReader(filename)
    begin, end, first_read = 0, 0, True
    while True:
        sequence_id = reader.next(Int64Value())
        for camera in cameras:
            _ = reader.next(ObjectAnnotations())
        if not sequence_id:
            break
        if first_read:
            begin = sequence_id.value
            first_read = False
        end = sequence_id.value
    return begin, end
Exemple #12
0
def grpc_client(dataset, host, port, model_name, model_label, model_version=0):
    channel = grpc.insecure_channel(f'{host}:{port}')
    stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

    results = []
    for d in tqdm_notebook(dataset):
        request = predict_pb2.PredictRequest()
        request.model_spec.name = model_name
        request.model_spec.signature_name = "serving_default"
        if not model_version:
            request.model_spec.version_label = model_label
        else:
            tmp_version = Int64Value()
            tmp_version.value = model_version
            request.model_spec.version.CopyFrom(tmp_version)
        request.inputs["dense_input"].CopyFrom(tf.make_tensor_proto(d, shape=(1, 28, 28), dtype="float32"))
        results.append(stub.Predict(request, timeout=10))
    
    return results
Exemple #13
0
def main(cxt):
    pathOut = cxt.subtask.output_path
    pathOut_user = cxt.subtask.output_user_path

    fOut_user = open(pathOut_user, 'r')
    fOut = open(pathOut, 'r')
    diffResult = tolerantDiffAt(fOut_user, fOut)
    fOut_user.close()
    fOut.close()

    logger.info('diff result: {}'.format(diffResult))

    resp = subtask_response_pb2.SubtaskResponse()
    resp.meta['lineno'].Pack(Int64Value(value=diffResult + 1))

    if diffResult >= 0:
        resp.verdict = HojVerdict.WA.value
    else:
        resp.verdict = HojVerdict.AC.value
    return resp
    def GetModelMetadata(self, request: GetModelMetadataRequest, context):
        '''
        get model metadata
        '''
        # get model name
        model_spec = request.model_spec
        if model_spec is None:
            raise Exception("model_spec is required")

        model_name = model_spec.name
        logger.info("get model metadata for model: {}".format(model_name))

        # query using process pool
        model_info = self.worker_pool.apply(_get_metadata, args=(model_name, ))

        # response
        return GetModelMetadataResponse(
            model_spec=ModelSpec(name=model_name,
                                 version=Int64Value(
                                     value=int(model_info['version'])),
                                 signature_name='predict'))
Exemple #15
0
def judgeSingleSubtask(task, paths, checker_args):
    infile, outfile = paths

    log_file = open(TMP_RUNLOG_PATH, 'w+')
    # the file is possibly not owned by the user executing task (via sudo),
    # and latter writing will fail
    os.chmod(TMP_RUNLOG_PATH, 0o666)
    log_file_fd = log_file.fileno()

    cmd_task_str = cmd_task_tpl.format(
        cwd=shlex.quote(path.realpath(SANDBOX_PATH)),
        time=math.ceil(task.time_limit / 1000),
        mem=math.ceil(task.mem_limit * 1024),
        log_fd=log_file_fd,
        fd_close_from=log_file_fd + 1,
        exec=PROG_EXEC_PATH
    )
    cmd_task = shlex.split(cmd_task_str)

    f_in = open(infile, 'r')
    # No, you really can't trust the user's output
    f_out_user = open(TMP_USEROUT_PATH, 'w+b')

    time_task = time.perf_counter()

    logger.debug('Starting subproc for subtask: %r', cmd_task)

    subp_task, (is_stdout_ole, _) = pipes.run_with_pipes(
        cmd_task,
        cwd=path.dirname(__file__),
        stdin=f_in,
        pipe_stdout=(f_out_user, USER_OUTPUT_LIM),
        stderr=subprocess.DEVNULL,
        pass_fds=(log_file_fd,)
    )

    logger.debug('Ending subproc for subtask after %.0fms', (time.perf_counter() - time_task) * 1000)

    process_failed = (subp_task.returncode != 0)
    if process_failed:
        logger.debug('Subtask {} with return code %d'.format(color('failed', fg='yellow')), subp_task.returncode)

    f_in.close()
    f_out_user.close()

    # parse output and filter out the STATs key-value pair

    # get size of the log
    # TODO: interrupt if the log file is empty. the worker probably fails to start up
    log_file.seek(0, os.SEEK_END)
    sz = log_file.tell()

    log_file.seek(0)
    log_dict = {}
    for ln in log_file:
        mat = re.match(r'\[S\]\[\d+?\] __STAT__:0 (?:\d+?:)?([\w]+)\s+=\s+(.*)', ln)
        if mat is None:
            # TODO: triage the message to separate file
            logger.debug('SANDBOX >>> %s', ln[:-1])
            continue
        log_dict[mat.group(1)] = mat.group(2)
    log_file.close()

    logger.debug('captured stat dict:\n%s', pformat(log_dict))

    log_used_keys = [
        'cgroup_memory_failcnt',
        'cgroup_memory_max_usage',
        'exit_normally',
        'time'
    ]

    for k in log_used_keys:
        if k not in log_dict:
            logger.error('Cannot find key "%s" form log, which is mandatory', k)
            print(color('===== SER =====', fg='white', style='negative') +
                ' MISSING_KEY')
            return HojVerdict.SERR, log_dict

    time_used = int(log_dict['time'])
    mem_used = int(log_dict['cgroup_memory_max_usage'])
    is_seccomp_violating = (log_dict.get('seccomp_violation', '') != 'false')

    if is_seccomp_violating:
        print(color('===== RF =====', fg='yellow', style='negative'))
        return HojVerdict.RF, log_dict

    if is_stdout_ole:
        # looks like nsjail ignores SIGPIPE and let children continue to run
        # until TLE, because of the pid-namespace :(
        print(color('===== OLE =====', style='negative'))
        return HojVerdict.OLE, log_dict

    # check if the process ends with error
    verdict = None

    if log_dict['cgroup_memory_failcnt'] != '0':
        verdict = HojVerdict.MLE
    elif (log_dict['exit_normally'] == 'false' and time_used >= task.time_limit):
        verdict = HojVerdict.TLE
    elif process_failed:
        verdict = HojVerdict.RE

    if verdict is not None:
        print(color('===== {:3} ====='.format(verdict.name), fg='magenta', style='negative') +
            ' REPORTED_BY_SANDBOX')
        return verdict, log_dict

    cxt = protos.subtask_context_pb2.SubtaskContext(
        # TODO: fill in counter info
        subtask={
            'time_limit': task.time_limit,
            'mem_limit': task.mem_limit,
            'input_path': infile,
            'output_path': outfile,
            'output_user_path': TMP_USEROUT_PATH,
        },
        stat={
            'time_used': time_used,
            'mem_used': mem_used,
        },
        log_dict=log_dict,
    )

    subp_checker = subprocess.run(
        checker_args,
        input=cxt.SerializeToString(),
        stdout=subprocess.PIPE
        # TODO: triage stderr; use alt. way to execute (if possible) to integrate log
    )

    resp = protos.subtask_response_pb2.SubtaskResponse()
    if subp_checker.returncode == 0:
        # the method name is confusing; it is in fact a byte string
        try:
            resp.ParseFromString(subp_checker.stdout)
        except:
            logger.exception('Error occurred when attempting to parse the response from the checker')
            resp.verdict = HojVerdict.SERR.value
    else:
        logger.error('The checker exits abnormally with return code %d', subp_checker.returncode)
        resp.verdict = HojVerdict.SERR.value

    # alternative way for a Python file; faster
    # import runpy
    # tolerantDiff = runpy.run_path(checker_path)
    # resp = tolerantDiff['main'](cxt)

    if resp.verdict == HojVerdict.WA.value:
        lineno_wrap = Int64Value(value=-1)
        # lineno only makes sense in tolerantDiff
        if 'lineno' in resp.meta:
            resp.meta['lineno'].Unpack(lineno_wrap)
        print(color('===== WA  =====', fg='red', style='negative') +
              '  @ line {}'.format(lineno_wrap.value))
        return HojVerdict.WA, log_dict
    elif resp.verdict != HojVerdict.AC.value:
        print(color('===== {:3} ====='.format(HojVerdict(resp.verdict).name), fg='blue', style='negative') +
            ' REPORTED_BY_CHECKER')
        return HojVerdict(resp.verdict), log_dict

    print(color('===== AC  =====', fg='green', style='negative'))
    return HojVerdict(resp.verdict), log_dict
        sid = 0
        while True:
            sks = reader.next(Skeletons())
            if not sks:
                log.info("[Done][{}] camera \'{}\' with {} sequences", dataset,
                         camera, sid + 1)
                break

            objs = ObjectAnnotations()
            # panoptic dataset HD cameras resolution
            objs.resolution.width = width
            objs.resolution.height = height
            for sk in sks.skeletons:
                obj = objs.objects.add()
                for part in sk.parts:
                    type_str = SkeletonPartType.Name(part.type)
                    if type_str == 'UNKNOWN' or type_str == 'BACKGROUND':
                        continue
                    keypoint = obj.keypoints.add()
                    keypoint.id = HumanKeypoints.Value(type_str)
                    keypoint.score = part.score
                    keypoint.position.x = part.x
                    keypoint.position.y = part.y

            sequence_id = Int64Value()
            sequence_id.value = sid
            writer.insert(sequence_id)
            writer.insert(objs)
            sid += 1
Exemple #17
0
print(f"Pegged at:\n{response.order.pegged_order}")

#####################################################################################
#                        A M E N D   P E G G E D   O R D E R                        #
#####################################################################################

# __prepare_amend_pegged_order:
# Prepare the amend pegged order message
amend = vac.vega.OrderAmendment(
    market_id=marketID,
    party_id=pubkey,
    order_id=orderID,
    size_delta=-25,
    time_in_force=vac.vega.Order.TimeInForce.TIME_IN_FORCE_GTC,
    pegged_reference=vac.vega.PEGGED_REFERENCE_BEST_BID,
    pegged_offset=Int64Value(value=-100))
order = vac.api.trading.PrepareAmendOrderRequest(amendment=amend)
prepared_order = trading_client.PrepareAmendOrder(order)
blob_base64 = base64.b64encode(prepared_order.blob).decode("ascii")
# :prepare_amend_pegged_order__

print(f"Amendment prepared for order ID: {orderID}")

# __sign_tx_pegged_amend:
# Sign the prepared pegged order transaction for amendment
# Note: Setting propagate to true will also submit to a Vega node
response = wallet_client.signtx(blob_base64, pubkey, True)
helpers.check_response(response)
# :sign_tx_pegged_amend__

print("Signed pegged order amendment and sent to Vega")
Exemple #18
0
        filename = os.path.join(options['data_folder'], dataset,
                                '{}_2d_detector_{}'.format(model, camera))
        readers[camera] = ProtobufReader(filename)

    filename = os.path.join(options['data_folder'], dataset,
                            '{}_2d_detector'.format(model))
    writer = ProtobufWriter(filename)

    begin, end = get_dataset_range(options['data_folder'], dataset, cameras,
                                   model) if get_range else (-1, -1)

    log.info("[Starting][{}]{} -> {}", dataset, begin, end)
    reading = True
    while reading:
        detections = {}
        sequence_id = Int64Value()
        write = False
        for camera in cameras:
            sequence_id = readers[camera].next(Int64Value())
            if not sequence_id:
                reading = False
                break
            objs = readers[camera].next(ObjectAnnotations())

            if (sequence_id.value >= begin
                    and sequence_id.value <= end) or not get_range:
                write = True
                detections[camera] = objs
                log.info('[{}][{}][{}][{} skeletons]', dataset, camera,
                         sequence_id.value, len(detections[camera].objects))
            elif sequence_id.value > end:
def int64Value(field):
    if field is None:
        return None
    else:
        return Int64Value(value=field)
def get_wrapper_int64_value_list(value):
    return Int64Value(value=value)
from utils.io import ProtobufReader
from utils.options import load_options
from is_wire.core import Logger

log = Logger()
options = load_options()

with open('panoptic_datasets.json', 'r') as f:
  panoptic_data = json.load(f)
datasets = panoptic_data['datasets']
model = panoptic_data['model']
source = 'detector' # can be either 'detector' or 'dataset'

for dataset in datasets:
  dataset_folder = os.path.join(options['data_folder'], dataset)
  
  # read 3D information
  filename = os.path.join(dataset_folder, '{}_3d_dataset'.format(model))
  reader = ProtobufReader(filename)
  filename = os.path.join(dataset_folder, '{}_3d_{}_grouped'.format(model, source))
  detections_reader = ProtobufReader(filename)
  
  while True:
    sid = reader.next(Int64Value())
    sid_detections = detections_reader.next(Int64Value())
    if not sid or not sid_detections:
      break
    objs = reader.next(ObjectAnnotations())
    objs_detected = detections_reader.next(ObjectAnnotations())
    
    log.info("{} -> {}", sid.value, sid_detections.value)
from is_msgs.image_pb2 import ObjectAnnotations
from google.protobuf.wrappers_pb2 import Int64Value
from utils.io import ProtobufReader
from utils.options import load_options
from is_wire.core import Logger

log = Logger()
options = load_options()

with open('panoptic_datasets.json', 'r') as f:
    panoptic_data = json.load(f)
datasets = panoptic_data['datasets']
model = panoptic_data['model']
source = 'dataset'  # can be either 'detector' or 'dataset'

for dataset in datasets:
    dataset_folder = os.path.join(options['data_folder'], dataset)

    # read 3D information
    filename = os.path.join(dataset_folder, '{}_3d_{}'.format(model, source))
    reader = ProtobufReader(filename)
    while True:
        sequence_id = reader.next(Int64Value())
        if not sequence_id:
            break
        objs = reader.next(ObjectAnnotations())
        if not objs:
            break
        n_skeletons = len(objs.objects)
        log.info("[{}] Sequence {} with {} skeletons", dataset, sequence_id,
                 n_skeletons)
 def GetReferencedEntitites(self, request: RequestGetReferencedEntitites) -> ResponseGetReferencedEntitites:
     return ResponseGetReferencedEntitites(groups=[Group(id=1, access_hash=1, data=None)],
                                           users=[User(id=1, access_hash=1, data=UserData(nick=StringValue(value="nick")))],
                                           messages=[HistoryMessage(mid=None, prev_mid=None, sender_peer=None,
                                                                    message=None, date=0, forward=None, reply=None,
                                                                    edited_at=Int64Value(value=0))])