예제 #1
0
    def processPipe(self):
        # Read data from pipe to worker thread, if available
        #
        while self.Sync.pipeCli.poll():
            data = self.Sync.pipeCli.recv()
            if data[0] == mpr.PipeValType.toCli_log:
                # Handle log data -> write to history
                #
                self.log(data)

            elif data[0] == mpr.PipeValType.toCli_displayInfo:
                # Handle display information data -> update GUI
                #
                self.Stage = pickle._loads(data[1])
                self.isLCrUsed = (self.Conf.useLCr
                                  and (self.Stage.scrDevType
                                       == stg.ScrDevType.DLPLCR4500EVM))
                self.updateAll()
                self.updateDisplayInfo()

            elif data[0] == mpr.PipeValType.toCli_IODevInfo:
                self.isIODevReady = data[1][0]
                if self.isIODevReady == None:
                    self.isIODevReady = False
                self.lastIOInfo = data[1]
                self.updateIOInfo()

            else:
                # ***************************
                # ***************************
                # TODO: Other types of data need to be processed
                # ***************************
                # ***************************
                pass
예제 #2
0
 def get_function(self, function_bytes):
     eggroll_serdes.bytes_security_check(function_bytes)
     try:
         return cloudpickle.loads(function_bytes)
     except:
         import pickle
         return pickle._loads(function_bytes)
예제 #3
0
 def _create_functor(self, task_info: processor_pb2.TaskInfo):
     if task_info.function_bytes == b'blank':
         return None
     try:
         return cloudpickle.loads(task_info.function_bytes)
     except:
         import pickle
         return pickle._loads(task_info.function_bytes)
예제 #4
0
def loads(data):
    data = to_bytes(data)
    try:
        return pickle.loads(data)
    except:
        if is_py3:
            return pickle._loads(data)
        raise
예제 #5
0
 def loads(self, s, *, fix_imports=True, encoding="ASCII", errors="strict"):
     if self._pickle_module == "P":
         return pickle._loads(s,
                              fix_imports=fix_imports,
                              encoding=encoding,
                              errors=errors)
     elif self._pickle_module == "C":
         return pickle.loads(s,
                             fix_imports=fix_imports,
                             encoding=encoding,
                             errors=errors)
     raise ValueError("Invalid pickle module")
예제 #6
0
def get_params_redis(shapes):
    i = -1
    params = []
    keys = []
    for s in range(len(shapes)):
        keys.append(s)
    values = asyncio.get_event_loop().run_until_complete(
        multi_get_key_redis(keys))
    for shape in shapes:
        i = i + 1
        param_np = pc._loads(values[i]).reshape(shape)
        param_tensor = torch.nn.Parameter(torch.from_numpy(param_np))
        params.append(param_tensor)
    return params
예제 #7
0
def client_connection_thread(conn, addr):
    # welcomes the new client
    send_message(conn, b'Welcome to the Server, please login\n')
    while True:
        # tells client to send a command
        send_message(conn, b'please input a command')
        data = receive_message(conn)
        # if nothing received then ends link
        if not data:
            break
        # gets the data
        data_split = pickle._loads(data)
        data_split[0] = data_split[0].upper()
        print(data_split[0])
        if data_split[0] == 'DISCONNECT':
            print('Connection from ', addr, ' has disconnected')
            conn.close()
            break
        else:
            server_commands(data_split, conn)
예제 #8
0
    def fromFile(cls, filename):
        """
        Load a suffix array instance from filename, a file created by
        toFile.
        Accept any filename following the _open conventions.
        """
        self = cls.__new__(cls)  #new instance which does not call __init__

        start = _time()

        savedData = _loads(_open(filename, "r").read())

        # load common attributes
        self.string, self.unit, self.voc, self.vocSize, self.SA, features = savedData[:
                                                                                      6]
        self.length = len(self.SA)

        # determine token delimiter
        if self.unit == UNIT_WORD:
            self.tokSep = " "
        elif self.unit in (UNIT_CHARACTER, UNIT_BYTE):
            self.tokSep = ""
        else:
            raise Exception("Unknown unit type identifier:", self.unit)

        # recompute tokId based on voc
        self.tokId = dict((char, iChar) for iChar, char in enumerate(self.voc))
        self.nbSentences = self.string.count(self.tokId.get("\n", 0))

        # Load features
        self.features = []
        for featureName, (featureValues,
                          featureDefault) in zip(features, savedData[6:]):
            self.addFeatureSA((lambda _: featureValues),
                              name=featureName,
                              default=featureDefault)

        self.fromFileTime = _time() - start
        if _trace: print >> _stderr, "fromFileTime %.2fs" % self.fromFileTime
        return self
예제 #9
0
    def fromFile(cls, filename):
        """
        Load a suffix array instance from filename, a file created by
        toFile.
        Accept any filename following the _open conventions.
        """
        self = cls.__new__(cls) #new instance which does not call __init__

        start=_time()

        savedData=_loads(_open(filename, "r").read())

        # load common attributes
        self.string, self.unit, self.voc, self.vocSize, self.SA, features = savedData[:6]
        self.length=len(self.SA)

        # determine token delimiter
        if self.unit==UNIT_WORD:
            self.tokSep=" "
        elif self.unit in (UNIT_CHARACTER, UNIT_BYTE):
            self.tokSep=""
        else:
            raise Exception("Unknown unit type identifier:", self.unit)

        # recompute tokId based on voc
        self.tokId=dict((char, iChar) for iChar,char in enumerate(self.voc))
        self.nbSentences = self.string.count(self.tokId.get("\n", 0))


        # Load features
        self.features=[]
        for featureName, (featureValues, featureDefault) in zip(features, savedData[6:]):
            self.addFeatureSA((lambda _: featureValues), name=featureName, default=featureDefault)

        self.fromFileTime=_time()-start
        if _trace: print >> _stderr, "fromFileTime %.2fs"%self.fromFileTime
        return self
예제 #10
0
    def load(self, name):
        """
            Labeled images are stored on a persistent database with a unique name. 
            load() overwrites existing.
        """
        obj, self.name, self.owner = LabeledImage.db_cursor.execute(
            'SELECT I.Object,I.name,I.owner FROM LabeledImage I WHERE I.name =?',
            (name, )).fetchone()

        resolved_obj = pickle._loads(obj)

        self.Image = resolved_obj['ImBytes']  # image is loaded
        #next, get rules with same order as it was added
        self.Rules = [None] * len(resolved_obj['Rules'])

        for rule in resolved_obj['Rules']:
            expr_type = rule['expr_type']
            expr = rule['expr']
            shape_type = rule['shape_type']
            radius = rule['radius']
            action = rule['action']
            points = rule['points']  # another dictionary list
            point_list = [None] * len(
                points)  # points also needed to save order
            for point in points:
                x1 = point['x1']
                y1 = point['y1']
                posp = point['position']
                point_list[posp] = (x1, y1)
            match_expr = MatchExpr(expr_type, expr)
            if shape_type == Shape.CIRCLE:
                shape = Shape(shape_type, radius, *point_list)
            else:
                shape = Shape(shape_type, *point_list)
            pos = rule['position']
            self.Rules[pos] = (match_expr, shape, action)
        return self
예제 #11
0
def Main():
    while True:

        host = socket.gethostname()  # Get local machine name
        port = 12345  # Reserve a port for your service.

        s = socket.socket()  # Create a socket object
        s.bind((host, port))  # Bind to the port

        s.listen(5)  # Now wait for client connection.
        c, addr = s.accept()  # Establish connection with client.
        print('Got connection from' + str(addr))
        while True:
            data = c.recv(1024)
            if not data:
                break
            commands = pickle._loads(data)
            if commands[0].upper() == 'DISCONNECT':
                c.close()
                break
            else:
                serverCommands(commands)

        c.close()  # Close the connection
예제 #12
0
파일: nodes.py 프로젝트: xxoolm/Ryven
 def update_event(self, inp=-1):
     self.set_output_val(0, pickle._loads(self.input(0)))
예제 #13
0
 def read(self, buffer):
     return pickle._loads(buffer)
예제 #14
0
 def get_function(self, function_bytes):
     try:
         return cloudpickle.loads(function_bytes)
     except:
         import pickle
         return pickle._loads(function_bytes)
예제 #15
0
def roundtrip(val):
    pickled = pickle_function.dumps(val)
    # use _loads (the python implementation) for better stacktraces
    return pickle._loads(pickled)
예제 #16
0
파일: Wrapper.py 프로젝트: SteelHawX/PitE
 def unwrap(data):
     message = pickle._loads(data)
     return message
예제 #17
0
def upload_events_from_pickle_to_sql(project='tvcbook', remark='production'):
    #所有的文件
    filelist = []
    dirpath = os.path.join('data_export', project, remark, 'events')
    for maindir, subdir, file_name_list in os.walk(dirpath):
        # print("1:",maindir) #当前主目录
        # print("2:",subdir) #当前主目录下的所有目录
        # print("3:",str(file_name_list))  #当前主目录下的所有文件
        # file_name_list.sort()
        # subdir.sort()
        for filename in file_name_list:
            apath = os.path.join(maindir, filename)  #合并成一个完整路径
            filelist.append(apath)
    #   print(file_name_list)
    filelist.sort()
    # print(filelist)
    for pkl in filelist:
        # print(pkl)
        with open(pkl, "rb") as f2:
            results = pickle._loads(f2.read())
        # p = multiprocessing.Pool(processes = 3)
        for item in results:
            # # print(item)
            try:
                itemdict = json.loads(item)
                all_json = {
                    "properties": itemdict,
                    "distinct_id": itemdict["distinct_id"],
                    "event": itemdict["event"],
                    "type": "track"
                }
                # first_id = itemdict['first_id'] if 'first_id' in itemdict else None
                # second_id = itemdict['second_id'] if 'second_id' in itemdict else None
                # unionid = itemdict['unionid'] if 'unionid' in itemdict else None
                # id = itemdict['id'] if 'id' in itemdict else None
                ip_city, ip_is_good = get_addr(itemdict["$ip"])
                ip_asn, ip_asn_is_good = get_asn(itemdict["$ip"])
                if ip_is_good == 0:
                    ip_city = '{}'
                if ip_asn_is_good == 0:
                    ip_asn = '{}'
                print(all_json)
                created_at = time.mktime(
                    time.strptime(itemdict["time"].split('.')[0],
                                  '%Y-%m-%d %H:%M:%S'))
                # all_json = json.dumps(itemdict,ensure_ascii=False)
                insert_data(project='tvcbook',
                            data_decode=all_json,
                            User_Agent=None,
                            Host=None,
                            Connection=None,
                            Pragma=None,
                            Cache_Control=None,
                            Accept=None,
                            Accept_Encoding=None,
                            Accept_Language=None,
                            ip=itemdict["$ip"] if "$ip" in itemdict else None,
                            ip_city=ip_city,
                            ip_asn=ip_asn,
                            url=None,
                            referrer=itemdict["$referrer"]
                            if "$referrer" in itemdict else None,
                            remark=remark,
                            ua_platform=itemdict["$lib"]
                            if "$lib" in itemdict else None,
                            ua_browser=itemdict["$browser"]
                            if "$browser" in itemdict else None,
                            ua_version=itemdict["$browser_version"]
                            if "$browser_version" in itemdict else None,
                            ua_language=None,
                            ip_is_good=ip_is_good,
                            ip_asn_is_good=ip_asn_is_good,
                            created_at=created_at)
            except Exception:
                error = traceback.format_exc()
                write_to_log(filename='import_from_sa',
                             defname='upload_events_from_pickle_to_sql',
                             result=error)
        # f2.close()
        #   p.apply_async(func=insert_data,kwds={
        #     "project":"tvcbook",
        #     "data_decode":all_json,
        #     "User_Agent":None,
        #     "Host":None,
        #     "Connection":None,
        #     "Pragma":None,
        #     "Cache_Control":None,
        #     "Accept":None,
        #     "Accept_Encoding":None,
        #     "Accept_Language":None,
        #     "ip":itemdict["$ip"] if "$ip" in itemdict else None,
        #     "ip_city":ip_city,
        #     "ip_asn":ip_asn,
        #     "url":None,
        #     "referrer":itemdict["$referrer"] if "$referrer" in itemdict else None,
        #     "remark":'production',
        #     "ua_platform":itemdict["$lib"] if "$lib" in itemdict else None,
        #     "ua_browser":itemdict["$browser"] if "$browser" in itemdict else None,
        #     "ua_version":itemdict["$browser_version"] if "$browser_version" in itemdict else None,
        #     "ua_language":None,
        #     "ip_is_good":ip_is_good,
        #     "ip_asn_is_good":ip_asn_is_good,
        #     "created_at":created_at})
        #   # insert_data
        # p.close()
        # p.join()
        os.remove(pkl)
예제 #18
0
def upload_users_from_pickle_to_sql(project='tvcbook', remark='production'):
    dirpath = os.path.join('data_export', project, remark, 'users')
    filepath = os.path.join(dirpath, 'users_all.pkl')
    with open(filepath, "rb") as f2:
        results = pickle._loads(f2.read())
    a = 1
    for item in results:
        print(a)
        a += 1
        # print('a',item)
        data_rebuild = {
            "properties": {},
            "lib": {},
            "distinct_id": "",
            "event": "",
            "type": "profile_set"
        }
        # print(item)
        try:
            item = json.loads(item)
            # print(item["first_id"])
            if len(item["first_id"]) == 16:
                data_rebuild['lib']['$lib'] = 'js'
            elif len(item['first_id']) >= 39 and len(item['first_id']) <= 46:
                data_rebuild['lib']['$lib'] = 'MiniProgram'
            elif len(item['first_id']) >= 51 and len(item['first_id']) <= 64:
                data_rebuild['lib']['$lib'] = 'js'
            else:
                data_rebuild['lib']['$lib'] = 'unknow'

            if 'second_id' in item:
                data_rebuild["distinct_id"] = item['second_id']
                data_rebuild["map_id"] = item['first_id']
                data_rebuild["original_id"] = item['first_id']
                if 'userid' in item:
                    # data_rebuild["properties"]["user_id"] = item['userid']
                    data_rebuild["properties"]["userId"] = item['userid']
                if 'name' in item:
                    data_rebuild["properties"]["name"] = item['name']
                if 'realname' in item:
                    data_rebuild["properties"]["realname"] = item['realname']
                if 'sex' in item:
                    data_rebuild["properties"]["sex"] = item['sex']
                if 'verification_type' in item:
                    data_rebuild["properties"]["verification_type"] = item[
                        'verification_type']
                if 'company' in item:
                    data_rebuild["properties"]["company"] = item['company']
            # print(item)
            else:
                data_rebuild["distinct_id"] = item['first_id']
                # data_rebuild["map_id"] = item['first_id']
                # data_rebuild["original_id"] = item['first_id']
                if 'userid' in item:
                    # data_rebuild["properties"]["user_id"] = item['userid']
                    data_rebuild["properties"]["userId"] = item['userid']
                if 'name' in item:
                    data_rebuild["properties"]["name"] = item['name']
                if 'realname' in item:
                    data_rebuild["properties"]["realname"] = item['realname']
                if 'sex' in item:
                    data_rebuild["properties"]["sex"] = item['sex']
                if 'verification_type' in item:
                    data_rebuild["properties"]["verification_type"] = item[
                        'verification_type']
                if 'company' in item:
                    data_rebuild["properties"]["company"] = item['company']
                if 'viptype' in item:
                    data_rebuild["properties"]["viptype"] = item['viptype']
            print(data_rebuild)
            insert_user(project='tvcbook',
                        data_decode=data_rebuild,
                        created_at=0)
        except Exception:
            error = traceback.format_exc()
            write_to_log(filename='import_from_sa',
                         defname='upload_users_from_pickle_to_sql',
                         result=error)
    os.remove(filepath)
예제 #19
0
def loads(s, **kwargs):
    return _loads(s)
예제 #20
0
 def loads(s):
     if not isinstance(s, bytes):
         s = s.encode(errors='ignore')
     return pickle._loads(s)
ls = [
    b'C6yOrzSFsfy4bQ172sS2PRmpTmGa8euo+xg',
    b'rTDAVInfyDn+WO72sS2PRmpTmGykx74Kz/HC4='
]
ls.append(str(keyobj)[2:-1])
jsondict = {'pubkey': ls[2], 'data': ls[0], 'key': ls[1]}

#

jd = js.dumps(jsondict)
ds = js.loads(jd)
vpubkey = bytes(ds['pubkey'], 'utf-8')
vpubkeyORIGINAL = vpubkey.decode('unicode-escape').encode('ISO-8859-1')
print(vpubkeyORIGINAL)
VOTER_PUBLIC_KEY = pk._loads(vpubkeyORIGINAL)
print(VOTER_PUBLIC_KEY)
print(VOTER_PUBLIC_KEY == keys['public'])

# jsondict['pubkey'] = str(keyobj)
# #print(jsondict)
#
# news = jsondict['pubkey']
# newnew = news[2:-1]
# b = bytes(newnew,'utf-8')
# print()
# newbie = b.decode('unicode-escape').encode('ISO-8859-1')
# print(newbie)
#
# ch = pk._loads(newbie)
#
예제 #22
0
 def test_pickle(self):
     logger = TestLogger(log_level=logging.ERROR)
     logger = pickle._loads(pickle._dumps(logger))
     self.assertIsInstance(logger._log, logging.Logger)
     self.assertEqual(logger._log.level, logging.ERROR)
예제 #23
0
def deserialize_data(data):
    return pickle._loads(data)
예제 #24
0
def loads(s, **kwargs):
  return _loads(s)
예제 #25
0
import pickle
import pprint
# pprint모둘은 리스트를 보기좋게 출력해준다
# pickle은 파이선 객체를 bytes타입으로 직렬화를 처리하는 모듈
data = [{
    'name': '한사람',
    'age': 27
}, {
    'name': '두사람',
    'age': 33
}, {
    'name': '세사람',
    'age': 18
}]

print(data)
pprint.pprint(data)

data_string = pickle.dumps(data)
print("pickle : ", data_string)

load_string = pickle._loads(data_string)
print('읽기 : ', load_string)