def load(self, filename): try: file = open(filename, 'rb') if len(self.buffer) == 0: self.buffer = pickle.loads(pickle.load(file)) else: buf = pickle.loads(pickle.load(file)) self.merge(buf) file.close() return True except Exception as e: return False
def main_loop(self): running = True while running: #[address,message] = self.socket_sub.recv_multipart() #print_with_rank(self.num, "Waiting for a message to arrive") [address, message] = self.psub.recv_multipart() message = pickle.loads(message) #print_with_rank(self.num,"addr: "+str(address)+", message >"+str(message)+"<") if message == "Done": running = False print_with_rank(self.num, "going home") elif message == "Update": print_with_rank(self.num, " update") elif message[0] == "Init_SV": print_with_rank(self.num, "Init_SV received") self._process_init_sv(message) elif message[0] == "Initialize_GEs": self._process_initialize_ges(message) elif message[0] == "Update": self._process_update(message) elif message[0] == "Reply_constellation": self._process_reply_constellation(message) elif message[0] == "Request_constellation": self._process_request_constellation(message) elif message[0] == "Add_Front": self._add_front(message)
def remote_calc_motion_phases(datafile, id): new_data = [] datafile = load(datafile) for d in datafile: d = pickle.loads(d) contacts, velocities = extract_contact_velocity_info(d) normalised_block_fn = normalise_block_function(contacts) velocities = np.reshape(velocities, (-1, 3, contacts.shape[1])) velocities = np.sqrt(np.sum(velocities ** 2, axis=1)) sin_block_fn = np.sin(normalised_block_fn) cos_block_fn = np.cos(normalised_block_fn) sin_vec = sin_block_fn * velocities cos_vec = cos_block_fn * velocities ttas = calc_tta(contacts) for i, f in enumerate(d["frames"]): for j, jo in enumerate(f): jo["sin_normalised_contact"] = sin_block_fn[j,i] jo["cos_normalised_contact"] = cos_block_fn[j,i] jo["phase_vec"] = np.asarray([sin_vec[j,i], cos_vec[j,i]]) jo["tta"] = ttas[j,i] new_data.append(pickle.dumps(d)) return new_data, id
def retrieveData(self): if len(self.bufferList) > 0: recvData = self.bufferList[0] self.bufferList = self.bufferList[1:] return pickle.loads(recvData) else: return None
def fetch_thread(i): global outf global grad_q s3 = boto3.resource('s3') print("Fetcher %d started" % i) my_bucket = s3.Bucket('camus-pywren-489') num = 0 start_time = time.time() while time.time() - start_time < total_time: lst = my_bucket.objects.filter(Prefix='gradient_%d/' % i).all() for obj in lst: s = time.time() obj_res = obj.get() grad = pickle.loads(obj_res['Body'].read()) grad_q.put(grad) obj.delete() num += 1 print("Fetched: %d, took: %f, thread: %d" % (num, time.time() - s, i)) if time.time() - start_time > total_time: return
def _deserialize_dict(cls, data): ''' deserializes a dictionary :param data: data to deserialize ''' return cPickle.loads(zlib.decompress(b64decode(data.encode())))
def reconstruct(db: orator.DatabaseManager, ds_info: "DatasetInfo", fms: FMSInterface) -> dict: # create group_datasets group_datasets = tools.get_items_from_db_table( db, "GroupDataset", ["DatasetId", "=", ds_info.id]) # create items items = {} for group_dataset in group_datasets: # create iota_groups iota_groups = tools.get_items_from_db_table( db, "IotaGroup", ["GroupId", "=", group_dataset["GroupId"]]) # create groups print("Reconstructing dataset...") bar = ProgressBar(len(iota_groups)) # create group group = {} for iota_group in iota_groups: # create iota iota = tools.get_items_from_db_table( db, "Iota", ["IotaId", "=", iota_group["IotaId"]])[0] # read value and attach to group group[iota["Key"]] = pickle.loads(iota["Value"]) # update progress bar.increment() # attach completed group to items items = {**items, **group} return items
def _pickle_values(self, cols, values, serialize=True): ''' Converts blob type columns into pickled blobs. Also error-checks submitted columns. Inputs: cols: list of columns associated with each value values: list or tuple of values to potentially convert Output: list of values with python objects pickled into blobs ''' if not all([c in self.columns for c in cols]): raise ValueError('Not all submitted columns are in database.') out_values = list() for colname, val in zip(cols, values): if self.isblob[colname]: if serialize: out_values.append(pickle.dumps(val)) else: if val is not None: out_values.append(pickle.loads(val)) else: out_values.append(None) else: out_values.append(val) return out_values
def CMP2D(Atrium,TempAtrium, e, timeperiod, pace_rate): t = 0 while t < timeperiod: #TempAtrium[i] = Atrium[i].copy() TempAtrium = Pickle.loads(Pickle.dumps(Atrium, -1)) #TempAtrium = json.loads(json.dumps(Atrium)) #print(TempAtrium) for i in range(L): for j in range(L): if TempAtrium[i,j][0] != 4 and TempAtrium[i,j][0] != 0: Atrium[i,j][0] = Atrium[i,j][0] + 1 if TempAtrium[i,j][0] == 0: for s in TempAtrium[i,j][2]: if TempAtrium[s][0] == 4: if TempAtrium[s][1] == False: # if dysfunctional if e < rnd.uniform(0,1): Atrium[s][0] = 0 if TempAtrium[s][1] == True: Atrium[s][0] = 0 Atrium[i,j][0] = 1 if t in np.arange(0,timeperiod,pace_rate): for i in range(len(Atrium)): if TempAtrium[i,0][0] == 4: if TempAtrium[i,0][1] == False: # if dysfunctional if e < rnd.uniform(0,1): Atrium[i,0][0] = 0 if Atrium[i,0][1] == True: Atrium[i,0][0] = 0 #print(Atrium) #print(TempAtrium) # print(TempAtrium1) t +=1 return Atrium
def check_connection(n_actors): """ Wait for actors to connect before publishing params. """ ctx = zmq.Context() socket = ctx.socket(zmq.ROUTER) socket.bind("tcp://*:52002") connected = set() finished = set() while True: identity, null, data = socket.recv_multipart() actor_id, signal = pickle.loads(data) socket.send_multipart((identity, null, b'')) if signal == 1: connected.add(actor_id) print("Received handshake signal from actor {}".format(actor_id)) else: finished.add(actor_id) if len(connected) == (n_actors) and (len(connected) == len(finished)): # '+1' is needed to wait for evaluator to be connected break socket.close() ctx.term() print("Successfully connected with all actors!") return True
def newAttr(*args, **kwargs): # 包装 # print("before print") st = time.time() out = attr(*args, **kwargs) sendData = pickle.dumps(out) # print(sendData) s = socket.socket() # 创建 socket 对象 # host = socket.gethostname() # 获取本地主机名 # host = "127.0.0.1" # port = 8501 # 设置端口号 s.connect((host, port)) sendData += b"$$$$" s.send(sendData) recvData = s.recv(1024) pred = pickle.loads(recvData) # pred = pred.data.max(1, keepdim=True)[1] print(pred) s.close() ed = time.time() print('transfer time spent:', ed - st) return pred
def genetic_algorithm(database, population, hidden, selection, deletion, crossover, mutation1, mutation2): #print(TEST: genetic genetic_algorithm( ) called) birds_data = database.select_bird(population) fitness = database.select_fitness(population) sample = [] j = 0 for i in birds_data: sample.append([i[0], pickle.loads(i[1]), fitness[j][1]]) j += 1 #sorting by fitness sample.sort(key=lambda x: x[2], reverse=True) #selection evolution operator parents = selection_method(sample, population, selection) #crossover evolution operator children = crossover_method(parents, population, crossover) #mutation evolution operator children = mutation_method(children, population, hidden, mutation1, mutation2) #reinsertation evolution operator sample = reinsertion_method(children, sample, population, hidden, deletion) #update the new neural networks in database for i in sample: database.update_net(pickle.dumps(i[1]), i[0]) return
def load_binary(self): try: h_key = self.create_key("binaries") s_key = self._storage_engine._sessions_set_key YLogger.debug(self, "Loading binary brain from redis [%s]", s_key) start = datetime.datetime.now() gc.disable() YLogger.debug(self, "Fetching binaries [%s]", h_key) binaries = self.load(h_key) aiml_parser = pickle.loads(binaries) gc.enable() stop = datetime.datetime.now() diff = stop - start YLogger.debug(self, "Brain load took a total of %.2f sec", diff.total_seconds()) return aiml_parser except Exception as excep: YLogger.exception(self, "Failed to load binary", excep) gc.enable() return None
def start(): config = DataConfig() histories = sorted(glob.glob(config.history_location+"*.pickle")) data = {} for hist in histories: file = open(hist, 'rb') h = pickle.loads(pickle.load(file)) for k, v in h.items(): if k not in data.keys(): data[k] = [] for item in v: data[k].append(item) legend = [] plt.subplot(2, 1, 1) for kv in data.items(): legend.append(kv[0]) plt.plot(kv[1]) plt.legend(legend) for i, kv in enumerate(data.items()): plt.subplot(2, 3, i+4) plt.title(kv[0]) plt.plot(kv[1]) plt.tight_layout() plt.show()
def visualize(vis_dir, vis_num, dataset, parts, colors): for vis_id, (img_file, annos) in enumerate(dataset, start=1): if (vis_id >= vis_num): break image = cv2.cvtColor( cv2.imread(img_file.numpy().decode("utf-8"), cv2.IMREAD_COLOR), cv2.COLOR_BGR2RGB) annos = cpickle.loads(annos.numpy()) ori_img = image vis_img = image.copy() kpts_list = annos[0] for kpts in kpts_list: x, y, v = kpts[0::3], kpts[1::3], kpts[2::3] for part_idx in range(0, len(parts)): if (x[part_idx] < 0 or y[part_idx] < 0): continue color = colors[part_idx] vis_img = cv2.circle(vis_img, (int(x[part_idx]), int(y[part_idx])), radius=6, color=color, thickness=-1) fig = plt.figure(figsize=(8, 8)) a = fig.add_subplot(1, 2, 1) a.set_title("original image") plt.imshow(ori_img) a = fig.add_subplot(1, 2, 2) a.set_title("visualized image") plt.imshow(vis_img) plt.savefig(f"{vis_dir}/{vis_id}_vis_mpii.png") plt.close('all')
def new_entity(self, index, class_id, serial): """Create new entity. :returns: Created entity. """ if self._entities[index]: self._entities[index] = None baseline = self.parser.instance_baselines[class_id] cls = BaseEntity # Find an entity class based on which tables are in its baseline. # Class IDs don't seem to be consistent across demo files so all # you can do is assume if an entity baseline has 'DT_Team' it must # be a Team. for table in baseline: if table in self.class_map: cls = self.class_map[table] break new_baseline = pickle.loads(pickle.dumps(baseline)) assert baseline == new_baseline entity = cls(self.parser, index, class_id, serial, new_baseline) self._entities[index] = entity return entity
def compute(self): code_list = self.get_code_list() df = pd.DataFrame() for code in code_list: df_byte = self.redis.get(CStock.get_redis_name(code)) if df_byte is None: continue df = df.append(_pickle.loads(df_byte)) num = len(df) if 0 == num: return pd.DataFrame() _price = df.price.astype(float).sum() / num _volume = df.volume.astype(float).sum() / num _amount = df.turnover.astype(float).sum() / num ctime = datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') data = { 'code': [self.code], 'name': [self.get('name')], 'time': [ctime], 'price': [_price], 'amount': [_amount], 'volume': [_volume] } df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) df = df.set_index('time') return df
def parse(cls, message_bytes): """Parses the raw bytes of the message. Parameters ---------- message_bytes : bytes raw bytes of the body of the message Returns ------- (str, str) Method and body of the message """ splitat = cls.method_size method, body = message_bytes[:splitat], message_bytes[splitat:] # Decode method method = method.decode(cls.encoding) # Decode body if method == "DATA": body = pickle.loads(body) elif method == "DONE": body = body.decode('unicode-escape') else: body = body.decode(cls.encoding) if method == "GRAB": body = int(body) return (method, body)
def main(): if request.method == 'GET' or 'uid' not in session: uid = uuid.uuid4() session['dialog'] = "" session['uid'] = uid DialogSystemWrapper(uid, multi) print(f"New session started with uuid {uid}.") else: user_action = request.json['msg'].strip() if user_action.lower() == 'reset' or user_action.lower() == 'restart': uid = uuid.uuid4() session['dialog'] = "" session['uid'] = uid DialogSystemWrapper(uid, multi) print(f"New session started with uuid {uid}.") else: system = DialogSystemWrapper.get(session['uid']) system.run_turn(user_action) # return dialog as html turn_info = cPickle.loads(zlib.decompress(session['turn_info'])) return jsonify({ 'sys_utterance': session['sys_act'], 'turn_info': turn_info })
def searchLocationDistance(): if request.method == 'POST': data = request.form res = [] start_time = time.time() for i in range(1,101,5): sql = "SELECT * FROM(SELECT *,(((acos(sin((" + data['latitude'] + "*(22/7)/180)) * sin((latitude*(22/7)/180))+cos((" + data['latitude'] + "*(22/7)/180)) * cos((latitude*(22/7)/180)) * cos(((" + data['longitude'] + " - longitude)*(22/7)/180))))*180/(22/7))*60*1.1515*1.609344) as distance FROM earthquake) t WHERE distance <= "+ str(i) print(sql) hash = hashlib.sha224(sql.encode('utf-8')).hexdigest() key = "sql_cache:" + hash if (R_SERVER.get(key)): print("This was return from redis") result = cPickle.loads(R_SERVER.get(key)) else: cursor.execute(sql) row = cursor.fetchone() result = [] while row: row1 = [str(i) for i in row] result.append(row1) row = cursor.fetchone() # Put data into cache for 1 hour R_SERVER.set(key, cPickle.dumps(list(result))) # R_SERVER.expire(key, TTL); res.append({'range': str(i) + " km", 'results': result}) total_time = time.time() - start_time return jsonify(["total_time: " + str(total_time)] + res)
async def speak(self, message, user_conf): speech_message = message.content for m in message.mentions: speech_message = re.sub('<@!' + str(m.id) + '>', str(m.display_name), speech_message) speech_message = re.sub( r'(http|https):\/\/([\w\-]+\.)+[\w\-]+(\/[\w\-.\/?%&=]*)?', 'URL', speech_message) data = { 'text': speech_message, 'speaker': self.talker, 'speed': str(user_conf['speed']), 'volume': str(user_conf['volume']), 'pitch': str(user_conf['pitch']) } response = requests.post('https://api.VoiceText.jp/v1/tts', data=data, auth=(self.vtext_key, '')) f = open('vtext.wav', 'wb') f.write(response.content) f.close() source = discord.FFmpegPCMAudio('vtext.wav') message.guild.voice_client.play(source) print('play: ' + speech_message) wf = wave.open('vtext.wav', 'r') await asyncio.sleep(float(wf.getnframes()) / wf.getframerate()) if self.lines: print('pop lines') line = self.lines.pop() data = pickle.loads(self.redis.hget('active_users', line['user'])) await self.speak(line['message'], data)
def loads_qs_query(data): payload = signing.loads(data) model = apps.get_model(payload['model_label']) queryset = model.objects.none() queryset.query = cpickle.loads( zlib.decompress(base64.b64decode(payload['query']))) return queryset
def peek(self): with self._get_conn() as conn: cursor = conn.execute(self._peek) try: return loads(str(cursor.fetchone()[0])) except StopIteration: return None
def do_POST(self): if self.path == '/record': content_length = int(self.headers.get('content-length')) game_record = _pickle.loads( self.rfile.read(content_length)) with reservoir_lock: reservoir.push(simplejson.dumps(game_record.to_dict())) self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() log_file.write('[{}] received a game record\n'.format( datetime.datetime.now(datetime.timezone.utc))) log_file.flush() if update_record_num > 0: with new_record_count_lock: with reservoir_lock: if reservoir.len() >= RECENT_GAMES: new_record_count[0] += 1 else: self.send_response(400) self.send_header('Content-type', 'text/html') self.end_headers()
def get(index_type=None, redis=None): redis = create_redis_obj() if redis is None else redis df_byte = redis.get(ct.COMBINATION_INFO) if df_byte is None: return pd.DataFrame() df = _pickle.loads(df_byte) if index_type is None: return df return df[[df.cType == index_type]]
def get(self, attribute): df_byte = self.redis.get(ct.STOCK_INFO) if df_byte is None: return None df = _pickle.loads(df_byte) if len(df.loc[df.code == self.code][attribute].values) == 0: return None return df.loc[df.code == self.code][attribute].values[0]
def update_prios(buffer, data): """ support function to update priorities to buffer """ idxes, prios = pickle.loads(data) buffer.update_priorities(idxes, prios) idxes, prios = None, None
def popleft(self, sleep_wait=True): keep_pooling = True wait = 0.1 max_wait = 2 tries = 0 with self._get_conn() as conn: id = None while keep_pooling: conn.execute(self._write_lock) cursor = conn.execute(self._popleft_get) try: id, obj_buffer = cursor.fetchone() keep_pooling = False except StopIteration: conn.commit() # unlock the database if not sleep_wait: keep_pooling = False continue tries += 1 sleep(wait) wait = min(max_wait, tries / 10 + wait) if id: conn.execute(self._popleft_del, (id, )) return loads(obj_buffer) return None
def _3d_data_aug_fn(depth_list, cam, ground_truth2d, ground_truth3d): """Data augmentation function.""" # Argument of depth image dep_img = sio.loadmat(depth_list)['depthim_incolor'] dep_img = dep_img / 1000.0 # 深度图以毫米为单位 dep_img = tl.prepro.drop(dep_img, keep=np.random.uniform(0.5, 1.0)) # TODO:可以继续添加不同程度的遮挡 cam = cPickle.loads(cam) annos2d = list(cPickle.loads(ground_truth2d))[:n_pos] annos2d = np.array(annos2d) annos3d = list(cPickle.loads(ground_truth3d))[:n_pos] annos3d = np.array(annos3d) / 100.0 # 三维点坐标以厘米为单位 # create voxel occupancy grid from the warped depth map voxel_grid, voxel_coords2d, voxel_coordsvis, trafo_params = create_voxelgrid( cam, dep_img, annos2d, (xdim, ydim, zdim), 1.2) voxel_coords3d = (annos3d - trafo_params['root']) / trafo_params['scale'] # Argument of voxels and keypoints coords2d, coords3d, coordsvis = voxel_coords2d.tolist( ), voxel_coords3d.tolist(), voxel_coordsvis.tolist() rotate_matrix = tl.prepro.transform_matrix_offset_center( tl.prepro.affine_rotation_matrix(angle=(-15, 15)), x=xdim, y=xdim) voxel_grid = tl.prepro.affine_transform(voxel_grid, rotate_matrix) coords2d = keypoints_affine(coords2d, rotate_matrix) coords3d = keypoints_affine(coords3d, rotate_matrix) if np.random.uniform(0, 1.0) > 0.5: voxel_grid = np.flip(voxel_grid, axis=0) coords2d, coordsvis = keypoint_flip(coords2d, (xdim, ydim), 0, coordsvis) coords3d, coordsvis = keypoint_flip(coords3d, (xdim, ydim, zdim), 0, coordsvis) voxel_coords2d, voxel_coords3d, voxel_coordsvis = np.array( coords2d), np.array(coords3d), np.array(coordsvis) heatmap_kp, voxel_coordsvis = get_kp_heatmap(voxel_coords2d, (xdim, ydim), sigma, voxel_coordsvis) voxel_kp = np.tile(np.expand_dims(heatmap_kp, 2), [1, 1, zdim, 1]) voxel_grid = np.expand_dims(voxel_grid, -1) input_3d = np.concatenate((voxel_grid, voxel_kp), 3) input_3d = np.array(input_3d, dtype=np.float32) result_3d = np.array(voxel_coords3d, dtype=np.float32) mask_vis = np.array(voxel_coordsvis, dtype=np.float32) return input_3d, result_3d, mask_vis
def _data_aug_fn(image, ground_truth): """Data augmentation function.""" ground_truth = cPickle.loads(ground_truth) ground_truth = list(ground_truth) annos = ground_truth[0] mask = ground_truth[1] h_mask, w_mask, _ = np.shape(image) # mask mask_miss = np.ones((h_mask, w_mask), dtype=np.uint8) for seg in mask: bin_mask = maskUtils.decode(seg) bin_mask = np.logical_not(bin_mask) mask_miss = np.bitwise_and(mask_miss, bin_mask) ## image data augmentation # randomly resize height and width independently, scale is changed image, annos, mask_miss = keypoint_random_resize(image, annos, mask_miss, zoom_range=(0.8, 1.2)) # random rotate image, annos, mask_miss = keypoint_random_rotate(image, annos, mask_miss, rg=15.0) # random left-right flipping image, annos, mask_miss = keypoint_random_flip(image, annos, mask_miss, prob=0.5) # random resize height and width together image, annos, mask_miss = keypoint_random_resize_shortestedge( image, annos, mask_miss, min_size=(hin, win), zoom_range=(0.95, 1.6)) # random crop image, annos, mask_miss = keypoint_random_crop(image, annos, mask_miss, size=(hin, win)) # with padding # generate result maps including keypoints heatmap, pafs and mask h, w, _ = np.shape(image) height, width, _ = np.shape(image) heatmap = get_heatmap(annos, height, width) vectormap = get_vectormap(annos, height, width) resultmap = np.concatenate((heatmap, vectormap), axis=2) image = np.array(image, dtype=np.float32) img_mask = mask_miss.reshape(hin, win, 1) image = image * np.repeat(img_mask, 3, 2) resultmap = np.array(resultmap, dtype=np.float32) mask_miss = cv2.resize(mask_miss, (hout, wout), interpolation=cv2.INTER_AREA) mask_miss = np.array(mask_miss, dtype=np.float32) return image, resultmap, mask_miss
def get_data(key, a=False): s3 = boto3.resource('s3') t0 = time.time() obj = s3.Object('camus-pywren-489', key) t1 = time.time() body = obj.get()['Body'].read() data = pickle.loads(body) return data
def heidi(): colorList = request.form.getlist('color[]') orderDim = request.form.getlist('orderDim') otherDim = request.form.getlist('otherDim') datasetName = session['filename'] obj = models.Dataset.query.filter_by(name=datasetName).first() cleaned_file = cPickle.loads(obj.content) print(orderDim) """ if(len(orderDim)>0): sorted_data_index = orderAPI.order(cleaned_file, orderDim, orderMeasure='knn_bfs') sorted_data_index = list(sorted_data.index) """ del cleaned_file['classLabel'] cleaned_file.index = cleaned_file['id'] del cleaned_file['id'] paramObj = cPickle.loads(models.sessionvars.query.filter_by(dataset=datasetName).first().paramObj) #CODE TO ORDER POINTS BASED ON ORDER DIM (CAN DO LATER) #GET HEIDI IMAGE FROM DATABASE #REORDER THE POINTS IN IMAGE (CAN DO LATER) #CREATE THE COMBINED IMAGE FOR SELECTED SUBSPACES. compositeImg = heidi_api.getSelectedSubspaces(datasetName,colorList, orderDim) #session['compositeImg'] =cPickle.dumps(compositeImg) patterns_df = heidi_classes.getAllPatterns_block( 0, 1,cPickle.loads(obj.content), paramObj, orderDim) session['selectedColors'] = colorList obj = models.sessionvars.query.filter_by(dataset=datasetName).first() jaccard_matrix = cPickle.loads(obj.jaccardMatrix) jaccard_matrix2 = cPickle.loads(obj.jaccardMatrix2) return render_template('dimension_new.html', title = 'visual tool', \ user = current_user, paramObj = paramObj, \ image = 'imgs/compositeImg.png', \ jaccard_matrix = jaccard_matrix.reset_index().to_json(orient='records'), \ jaccard_matrix2 = jaccard_matrix2.reset_index().to_json(orient='records') \ ) #return redirect(url_for('heidi_controllers.interactive_heidi')) return "hello" + str(colorList) + str(orderDim) + str(otherDim)
def populate(cls): if cls.fonts is not None: return start = time.time() from cube.constants.application import config_directory fonts_dir = os.path.join(config_directory(), "fonts") if not os.path.exists(fonts_dir): os.makedirs(fonts_dir) fonts_file = os.path.join(fonts_dir, "fonts.lst") if os.path.exists(fonts_file): cube.debug("Loading fonts from cache file '%s'" % fonts_file) try: with open(fonts_file, 'rb') as f: cls.fonts = pickle.loads(f.read()) except Exception as e: cube.warn("font cache file '%s' is not valid, it will be removed:" % fonts_file, e) os.unlink(fonts_file) else: cube.info("Finding fonts on your system, this may take a while...") cls.fonts = {} for font_dir in cls.font_directories(): for root, dirs, files in os.walk(font_dir): for f in files: path = os.path.join(root, f) if font.is_valid(path): try: cls.fonts[path] = font.get_infos(path) except: cube.error("ignoring font file", path) if not cls.fonts: raise Exception("Couldn't find any font on your system !") cube.info(len(cls.fonts), "font infos fetched in %f seconds" % (time.time() - start)) cube.debug("Saving fonts into cache file '%s'" % fonts_file) with open(fonts_file, 'wb') as f: f.write(pickle.dumps(cls.fonts))
def loads(s): return pkl.loads(s)
import matplotlib.pyplot as plt plt.ion() fig, ax = plt.subplots() x, y = [],[] sc = ax.scatter(x,y) plt.xlim(-200,200) plt.ylim(-200,200) plt.draw() # Socket to talk to server context = zmq.Context() socket = context.socket(zmq.SUB) port = "5556" socket.connect ("tcp://localhost:%s" % port) topicfilter = "10001" socket.setsockopt_string(zmq.SUBSCRIBE, '') total_value = 0 while True: data = pickle.loads(socket.recv_pyobj()) print(np.shape(data)) obs,d=np.shape(data) for i in range(0,obs): x.append(data[i,0]) y.append(data[i,1]) sc.set_offsets(np.c_[x,y]) fig.canvas.draw_idle() plt.pause(0.1)
def run(self, filepath, context): start_time = time.time() handle = open(filepath, "rb") total = pickle.loads(zlib.decompress(handle.read())) indexi = 0 vertices = total["vertices"] faces = total["faces"] entitys = total["allhistory"] origins = total["origins"] self.materials = total["materials"] self.textures = total["textures"] total = None extralist = {} self.tempdir = os.path.dirname(os.path.realpath(__file__)) + os.sep + "textures" try: os.makedirs(self.tempdir) except Exception: print("some dir error should be ok !") filelist = [f for f in os.listdir(self.tempdir)] for f in filelist: try: os.remove(f) except Exception: print("file removal trouble no biggy") if self.textures: for texture in self.textures: fileh = open(self.tempdir + os.sep + texture + ".png", "wb") fileh.write(base64.b64decode(self.textures[texture])) temp = {} for material in self.materials: if "textures" in self.materials[material] and len(self.materials[material]["textures"]) > 0: for texpath in self.materials[material]["textures"]: img = bpy.data.images.load(self.tempdir + os.sep + texpath + ".png") cTex = bpy.data.textures.new("ColorTex", type="IMAGE") cTex.image = img temp[texpath] = cTex self.textures = temp print(self.textures) for mat in vertices: if mat in vertices and mat in faces and mat in origins: self.createMeshFromData(mat, origins[mat], vertices[mat], faces[mat]) faces[mat] = None vertices[mat] = None else: print(str(mat) + "not in faces, vertices or origins !") for value in entitys: aentity = entitys[value] if len(aentity["positions"]) > 0: firstloc = aentity["positions"][0]["pos"] firstloc = firstloc[0], firstloc[1] + 2, firstloc[2] headloc = firstloc[0], firstloc[1] + 1, firstloc[2] bpy.ops.mesh.primitive_cube_add(location=headloc) head = bpy.context.object head.rotation_mode = "XYZ" head.scale = (0.25, 0.25, 0.25) bpy.ops.mesh.primitive_cube_add(location=firstloc) ob = bpy.context.object ob.rotation_mode = "XYZ" ob.scale = (0.25, 0.75, 0.25) mat = bpy.data.materials.new("PKHG") mobtype = aentity["type"] if mobtype == "50": ob.name = "creeper" mat.diffuse_color = (0.0, 1.0, 0.0) elif mobtype == "51": ob.name = "skeleton" mat.diffuse_color = (1.0, 1.0, 1.0) elif mobtype == "52": ob.name = "spider" mat.diffuse_color = (0.2, 0.1, 0.1) elif mobtype == "54": ob.name = "zombol" mat.diffuse_color = (0.0, 0.3, 0.0) elif mobtype == "55": ob.name = "slime" mat.diffuse_color = (0.5, 1, 0.5) elif mobtype == "58": ob.name = "enderman" mat.diffuse_color = (0.5, 0.0, 0.5) elif mobtype == "90": ob.name = "pig" mat.diffuse_color = (0.5, 0.4, 0.4) elif mobtype == "65": ob.name = "bat" mat.diffuse_color = (1, 0.5, 0.2) elif mobtype == "91": ob.name = "sheep" mat.diffuse_color = (1, 1, 1) elif mobtype == "92": ob.name = "cow" mat.diffuse_color = (1, 0.2, 0.1) elif mobtype == "94": ob.name = "squid" mat.diffuse_color = (0.2, 0.2, 1) elif mobtype == "101": ob.name = "rabbit" mat.diffuse_color = (0.5, 0.1, 0.05) elif len(mobtype) > 10 or mobtype == "player": if mobtype == "player": ob.name = "player: RECORDER" mat.diffuse_color = (1, 0, 0) else: if "type" in aentity: ob.name = "player: " + aentity["type"] else: ob.name = "player: unknown" mat.diffuse_color = (1, 0.6, 0.4) else: mat.diffuse_color = (0.0, 0.0, 0.0) ob.name = str(mobtype) ob.active_material = mat bpy.ops.object.select_all(action="DESELECT") ob.select = True head.select = True put_on_layers = lambda x: tuple((i in x) for i in range(20)) bpy.context.scene.objects.active = ob bpy.ops.object.parent_set() maincam = bpy.data.cameras.new("Camera") maincam.clip_start = 1 maincam.clip_end = 5000 cam_ob = bpy.data.objects.new("Camera", maincam) cam_ob.rotation_euler = (0, math.radians(180), 0) selfycam = bpy.data.cameras.new("Camera") selfycam.clip_start = 1 selfycam.clip_end = 5000 selfy_cam_ob = bpy.data.objects.new("Camera", selfycam) selfy_cam_ob.rotation_euler = (0, 0, 0) selfy_cam_ob.location = (0, 0, 25) selfy_cam_ob.layers[:] = put_on_layers({2}) cam_ob.layers[:] = put_on_layers({2}) ob.layers[:] = put_on_layers({2}) head.layers[:] = put_on_layers({2}) selfy_cam_ob.parent = head cam_ob.parent = head bpy.context.scene.objects.link(cam_ob) bpy.context.scene.objects.link(selfy_cam_ob) for posses in aentity["positions"][1:]: frame_num = int((posses["time"] / 20) * 25) bpy.context.scene.frame_set(frame_num) ob.location = (posses["pos"][0], posses["pos"][2], posses["pos"][1] + 0.75) yaw = posses["yawpichhead"][1] head.rotation_euler = (math.radians(posses["yawpichhead"][1]), 0, 0) ob.rotation_euler = (math.radians(90), 0, math.radians(posses["yawpichhead"][0])) ob.hide = not bool(posses["alive"]) ob.hide_render = not bool(posses["alive"]) ob.keyframe_insert("hide") ob.keyframe_insert("hide_render") ob.keyframe_insert(data_path="location") ob.keyframe_insert(data_path="rotation_euler") if ob.animation_data: for fc in ob.animation_data.action.fcurves: fc.extrapolation = "LINEAR" for kp in fc.keyframe_points: kp.interpolation = "LINEAR" print("Script finished after {} seconds".format(time.time() - start_time)) return {"FINISHED"}
def deserialise(data): # type (sqlite3.Binary) -> Any return cpickle.loads(bytes(data))