Exemplo n.º 1
0
 def load_from_bytes(self, byt):
     sub_pickle = ploads(byt)
     if 'children' in sub_pickle:
         self.children = sub_pickle['children']
         self.grid = sub_pickle['grid']
     else:
         self.points = sub_pickle['points']
Exemplo n.º 2
0
def loads(pobj):
    """Return the object found in the string 'pobj' or None
    if the string can not be loaded"""
    try:
        obj = ploads(pobj)
    except (ValueError, EOFError):
        obj = None
    return obj
Exemplo n.º 3
0
    def predict(self,model_cont, x_pred):

        train_status=model_cont['train_status']

        if train_status == "trained":

            x_pred = np.array(x_pred)
            clf = ploads(model_cont['learned_model'])
            y = clf.predict(x_pred)

        else:
            y = False

        return y
Exemplo n.º 4
0
   def frompickle(in_pickle_dumps):
      """ Create a new `Root` from a pickled dumps.

      :param in_pickle_dumps: (bytes) a pickled `Root` dumps
      :return: (obj) a new Root object
      :raise Err:
      """
      new_obj = ploads(in_pickle_dumps)
      if new_obj.__class__ is Root:
         return new_obj
      else:
         raise Err('Root.frompickle()', [
            'Error: `in_pickle_dumps` does not seem to be a `Root` object: Got type: <{}>'.format(type(new_obj))
         ])
Exemplo n.º 5
0
    def frompickle(in_pickle_dumps):
        """ Create a new `LconfRoot` from a pickled dumps.

      :param in_pickle_dumps: (bytes) a pickled `LconfRoot` dumps
      :return: (obj) a new LconfRoot object
      :raise Err:
      """
        new_obj = ploads(in_pickle_dumps)
        if new_obj.__class__ is LconfRoot:
            return new_obj
        else:
            raise Err('LconfRoot.frompickle()', [
                'Error: `in_pickle_dumps` does not seem to be a `LconfRoot` object: Got type: <{}>'
                .format(type(new_obj))
            ])
Exemplo n.º 6
0
def predict():
    import json
    if 'json_args' in request.form:
        args = pd.read_json(request.form['json_args'])
        if len(args.columns) == 1 and len(args.values) > 1:
            # convert to series
            args = args.squeeze()

    else:
        args = ploads(base64.b64decode(request.form['args']))
    try:
        predictions = app.model.predict(args)
        for v in predictions:
            PREDICTIONS.labels(v).inc()
        return json.dumps(predictions.tolist())
    except ValueError as ve:
        return str(ve)
    except Exception as e:
        return str(e)
Exemplo n.º 7
0
    def _load_from_store(self, name, data):
        if len(data) > 0:
            out = ploads(gzip.decompress(data))
            for n in out:
                spacing = self.root_spacing / math.pow(2, len(n))
                aabb = self.root_aabb
                for i in n:
                    aabb = split_aabb(aabb, int(i))
                node = Node(n, aabb, spacing)
                node.load_from_bytes(out[n])
                self.node_bytes[n] = out[n]
                self.nodes[n] = node
        else:
            spacing = self.root_spacing / math.pow(2, len(name))
            aabb = self.root_aabb
            for i in name:
                aabb = split_aabb(aabb, int(i))
            node = Node(name, aabb, spacing)
            self.nodes[name] = node

        return self.nodes[name]
Exemplo n.º 8
0
    def predict(self, model_id):
        try:
            conn = MongoClient(HOST_NAME)

            print('\nConnection Successful')
            mlaas_db = conn[DB_NAME]
            models = mlaas_db[COLL_NAME]
            model_cont = models.find_one({"_id": ObjectId(model_id)})
            assert model_cont, "Invalid model ID"
            model_id = model_cont['_id']

            train_status = model_cont['train_status']

            if train_status == "trained":
                clf = ploads(model_cont['learned_model'])
                y = clf.predict(x_pred)
                print("Predicted Value:", y)
        except ConnectionFailure:
            print("\nCould not connect to server. \
                Raised the following exception:\n{}".format(conn_e))
        return y
Exemplo n.º 9
0
def test_py_pickle():
   """ Tests: test_py_pickle frompickle
   """
   print('::: TEST: test_py_pickle()')
   edict_with_all = _get_orig__edict_with_all()
   new_reobj_all__pdumps = pdumps(edict_with_all)
   new_reobj_all = ploads(new_reobj_all__pdumps)

   ok_(edict_with_all == new_reobj_all, msg=None)
   ok_(isinstance(new_reobj_all, Edict), msg=None)
   ok_(edict_with_all.extra_data == new_reobj_all.extra_data, msg=None)
   ok_(new_reobj_all.extra_data['edict extra2'] == 'edict extra_value2', msg=None)

   ok_(edict_with_all['edict1'] == new_reobj_all['edict1'], msg=None)
   ok_(isinstance(new_reobj_all['edict1'], Edict), msg=None)
   ok_(edict_with_all['edict1'].extra_data == new_reobj_all['edict1'].extra_data, msg=None)
   ok_(new_reobj_all['edict1'].extra_data['edict_obj.edict1 extra2'] == 'edict_obj.edict1 extra_value2', msg=None)

   ok_(edict_with_all['rdict1'] == new_reobj_all['rdict1'], msg=None)
   ok_(isinstance(new_reobj_all['rdict1'], Rdict), msg=None)
   ok_(edict_with_all['rdict1'].extra_data == new_reobj_all['rdict1'].extra_data, msg=None)
   ok_(new_reobj_all['rdict1'].extra_data['edict_obj.rdict1 extra2'] == 'edict_obj.rdict1 extra_value2', msg=None)

   ok_(edict_with_all['edictf1'] == new_reobj_all['edictf1'], msg=None)
   ok_(isinstance(new_reobj_all['edictf1'], RdictF), msg=None)
   ok_(edict_with_all['edictf1'].extra_data == new_reobj_all['edictf1'].extra_data, msg=None)
   ok_(new_reobj_all['edictf1'].extra_data['edict_obj.edictf1 extra2'] == 'edict_obj.edictf1 extra_value2', msg=None)

   ok_(edict_with_all['edictio1'] == new_reobj_all['edictio1'], msg=None)
   ok_(isinstance(new_reobj_all['edictio1'], RdictIO), msg=None)
   ok_(edict_with_all['edictio1'].extra_data == new_reobj_all['edictio1'].extra_data, msg=None)
   ok_(new_reobj_all['edictio1'].extra_data['edict_obj.edictio1 extra2'] == 'edict_obj.edictio1 extra_value2', msg=None)
   ok_(edict_with_all['edictio1'].key_order == new_reobj_all['edictio1'].key_order, msg=None)
   ok_(new_reobj_all['edictio1'].key_order == ['edictio_inner1', 'edictio_inner2', 'edictio_inner3'], msg=None)
   ok_(edict_with_all['edictio1'].extra_key_order == new_reobj_all['edictio1'].extra_key_order, msg=None)
   ok_(new_reobj_all['edictio1'].extra_key_order == ['edictio_inner2', 'edictio_inner3', 'edictio_inner1'], msg=None)

   ok_(edict_with_all['edictfo1'] == new_reobj_all['edictfo1'], msg=None)
   ok_(isinstance(new_reobj_all['edictfo1'], RdictFO), msg=None)
   ok_(edict_with_all['edictfo1'].extra_data == new_reobj_all['edictfo1'].extra_data, msg=None)
   ok_(new_reobj_all['edictfo1'].extra_data['edict_obj.edictfo1 extra2'] == 'edict_obj.edictfo1 extra_value2', msg=None)
   ok_(edict_with_all['edictfo1'].key_order == new_reobj_all['edictfo1'].key_order, msg=None)
   ok_(new_reobj_all['edictfo1'].key_order == ['edictfo_inner1', 'edictfo_inner2', 'edictfo_inner3'], msg=None)
   ok_(edict_with_all['edictfo1'].extra_key_order == new_reobj_all['edictfo1'].extra_key_order, msg=None)
   ok_(new_reobj_all['edictfo1'].extra_key_order == ['edictfo_inner2', 'edictfo_inner3', 'edictfo_inner1'], msg=None)

   ok_(edict_with_all['edictfo2_1'] == new_reobj_all['edictfo2_1'], msg=None)
   ok_(isinstance(new_reobj_all['edictfo2_1'], RdictFO2), msg=None)
   ok_(edict_with_all['edictfo2_1'].extra_data == new_reobj_all['edictfo2_1'].extra_data, msg=None)
   ok_(new_reobj_all['edictfo2_1'].extra_data['edict_obj.edictfo2_1 extra2'] == 'edict_obj.edictfo2_1 extra_value2', msg=None)
   ok_(edict_with_all['edictfo2_1'].key_order == new_reobj_all['edictfo2_1'].key_order, msg=None)
   ok_(new_reobj_all['edictfo2_1'].key_order == ['edictfo2_inner1', 'edictfo2_inner2', 'edictfo2_inner3'], msg=None)
   ok_(edict_with_all['edictfo2_1'].extra_key_order == new_reobj_all['edictfo2_1'].extra_key_order, msg=None)
   ok_(new_reobj_all['edictfo2_1'].extra_key_order == ['edictfo2_inner2', 'edictfo2_inner3', 'edictfo2_inner1'], msg=None)

   ok_(edict_with_all['elist1'] == new_reobj_all['elist1'], msg=None)
   ok_(isinstance(new_reobj_all['elist1'], Elist), msg=None)
   ok_(edict_with_all['elist1'].extra_data == new_reobj_all['elist1'].extra_data, msg=None)
   ok_(new_reobj_all['elist1'].extra_data['edict_obj.elist1 extra2'] == 'edict_obj.elist1 extra_value2', msg=None)

   ok_(edict_with_all['rlist1'] == new_reobj_all['rlist1'], msg=None)
   ok_(isinstance(new_reobj_all['rlist1'], Rlist), msg=None)
   ok_(edict_with_all['rlist1'].extra_data == new_reobj_all['rlist1'].extra_data, msg=None)
   ok_(new_reobj_all['rlist1'].extra_data['edict_obj.rlist1 extra2'] == 'edict_obj.rlist1 extra_value2', msg=None)

   ok_(edict_with_all['rlistf1'] == new_reobj_all['rlistf1'], msg=None)
   ok_(isinstance(new_reobj_all['rlistf1'], RlistF), msg=None)
   ok_(edict_with_all['rlistf1'].extra_data == new_reobj_all['rlistf1'].extra_data, msg=None)
   ok_(new_reobj_all['rlistf1'].extra_data['edict_obj.rlistf1 extra2'] == 'edict_obj.rlistf1 extra_value2', msg=None)

   ok_(edict_with_all['etuple1'] == new_reobj_all['etuple1'], msg=None)
   ok_(isinstance(new_reobj_all['etuple1'], Etuple), msg=None)
   ok_(edict_with_all['etuple1'].extra_data == new_reobj_all['etuple1'].extra_data, msg=None)
   ok_(new_reobj_all['etuple1'].extra_data['edict_obj.etuple1 extra2'] == 'edict_obj.etuple1 extra_value2', msg=None)

   ok_(edict_with_all['lmatrix1'] == new_reobj_all['lmatrix1'], msg=None)
   ok_(isinstance(new_reobj_all['lmatrix1'], Lmatrix), msg=None)
   ok_(edict_with_all['lmatrix1'].extra_data == new_reobj_all['lmatrix1'].extra_data, msg=None)
   ok_(new_reobj_all['lmatrix1'].extra_data['edict_obj.lmatrix1 extra2'] == 'edict_obj.lmatrix1 extra_value2', msg=None)

   ok_(edict_with_all['lmatrixf1'] == new_reobj_all['lmatrixf1'], msg=None)
   ok_(isinstance(new_reobj_all['lmatrixf1'], LmatrixF), msg=None)
   ok_(edict_with_all['lmatrixf1'].extra_data == new_reobj_all['lmatrixf1'].extra_data, msg=None)
   ok_(new_reobj_all['lmatrixf1'].extra_data['edict_obj.lmatrixf1 extra2'] == 'edict_obj.lmatrixf1 extra_value2', msg=None)

   # some data checks
   ok_(edict_with_all['edictfo1']['edictfo_inner2'] == new_reobj_all['edictfo1']['edictfo_inner2'] and new_reobj_all['edictfo1']['edictfo_inner2'] == 'edictfo_inner2 value', msg=None)
   ok_(edict_with_all['rlist1'][1] == new_reobj_all['rlist1'][1] and new_reobj_all['rlist1'][1] == 'rlist_inner value2', msg=None)

   ok_(edict_with_all['lmatrixf1'].this_column_values('name') == new_reobj_all['lmatrixf1'].this_column_values('name') and new_reobj_all['lmatrixf1'].this_column_values('name') == ['darkorange', 'flesh', 'firebrick 3'], msg=None)
   ok_(edict_with_all['lmatrixf1'][1][2] == new_reobj_all['lmatrixf1'][1][new_reobj_all['lmatrixf1'].column_names_idx_lookup['green']] and new_reobj_all['lmatrixf1'][1][2] == 125, msg=None)

   # Change original
   edict_with_all['etuple1'].replace_extra_data({'edict_obj.etuple1 UPDATED': 'UPDATED'})

   ok_(edict_with_all['etuple1'] == new_reobj_all['etuple1'], msg=None)
   ok_(isinstance(new_reobj_all['etuple1'], Etuple), msg=None)
   ok_(edict_with_all['etuple1'].extra_data != new_reobj_all['etuple1'].extra_data, msg=None)
   ok_(new_reobj_all['etuple1'].extra_data['INFO'] == 'edict_obj.etuple1 inner', msg=None)
   edict_with_all = {}
   ok_(isinstance(new_reobj_all['etuple1'], Etuple), msg=None)
   ok_(new_reobj_all['etuple1'].extra_data['INFO'] == 'edict_obj.etuple1 inner', msg=None)

   ok_(isinstance(new_reobj_all['lmatrix1'], Lmatrix), msg=None)
   ok_(new_reobj_all['lmatrix1'].extra_data['INFO'] == 'edict_obj.lmatrix1 inner', msg=None)
   ok_(new_reobj_all['lmatrix1'].column_names == ('name', 'red', 'green', 'blue'), msg=None)
Exemplo n.º 10
0
 def unpickle(self, chunk):
     # python issue #14436! we should received record as dict
     # and add formatter for server log!
     dict_ = ploads(chunk)  # !to dict!
     dict_["level"] = getattr(logging, dict_["levelname"])
     return logging.LogRecord(**dict_)
Exemplo n.º 11
0
def main():
	inp = jloads(stdin.readline())
	func,source = ploads(ploads(inp['rdd_pickle']))
	rdd = RDD(prev_func = func, source=source)
	partition_numbers = inp['partition_numbers']
	worker_pool.map(lambda n: do_task_and_print(rdd,n), partition_numbers)