def prepare(self, image_file, props_file, save_dir=None): props_file = Path(props_file) props = json_load(str(props_file)) air = props['air'] spacing = props['resampled_spacing'] statstics = props['normalize_statstics'] print('Cropping image...') image, _, meta = load_crop(image_file, None, air=air) print('Resample image to target spacing...') image, _, meta = resample_normalize(image, None, meta, spacing=spacing, statstics=statstics) if save_dir: save_dir = Path(save_dir) if not save_dir.exists(): save_dir.mkdir(parents=True) data_fname = '%s_data.npz' % meta['case_id'] np.savez(str(save_dir / data_fname), image=image) meta_fname = '%s_meta.json' % meta['case_id'] json_save(str(save_dir / meta_fname), meta) return {'image': image, 'meta': meta}
def main(): global config global config_file config_file = "config.json" if os.path.isfile(config_file): config = json_load(config_file) else: config['host'] = 'localhost' config['port'] = 45000 config['uoffset'] = 0 json_save(config_file, config) # creating socket for connection try: ssock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.error: print("failed to create socket\n") sys.exit(-1) host = config['host'] port = config['port'] # bind socket try: ssock.bind((host, port)) except socket.error: print("failed to bind socket\n") sys.exit(-1) # listen for connections ssock.listen(5) ccount = 0 while True: conn, addr = ssock.accept() # creating thread for each client cthread = Thread(name="client_" + str(ccount), target=serve, args=(conn, addr)) cthread.daemon = True cthread.start() ccount += 1
def start(self): global config global config_file global client_file global clients config_file = "config.json" client_file = "clients.json" if os.path.isfile(config_file): with open(config_file, "rb") as file: config = json.load(file) else: config['host'] = 'localhost' config['port'] = 45000 json_save(config_file, config) # connect with server self.server = init_conn((config['host'], config['port'])) # open listening port for peers queue = Queue.Queue() self.lsock = listen(queue) self.lhost, self.lport = queue.get() lthread = Thread(name="lthread", target=accept_peer, args=(self.lsock, )) lthread.daemon = True lthread.start() greeting = tk.Label( text="Please provide at least one file to server.\n\r") greeting.pack() self.add_file()
def main(): output_dir = "data/entities" coin_names = get_top_coins_and_coin_names_by_mkt_cap() exchanges = get_exchanges_list() companies_descriptions = get_crypto_companies() companies_descriptions = remove_duplicated_entities( exchanges, companies_descriptions) utils.json_save(coin_names, os.path.join(output_dir, "coin_names.json")) utils.json_save(exchanges, os.path.join(output_dir, "exchanges.json")) utils.json_save(companies_descriptions, os.path.join(output_dir, "companies_descriptions.json"))
params = { "lang": "EN", "lTs": to_ts, "api_key": api_key } response = requests.get(endpoint, params=params).json() data = response["Data"] earliest_ts = data[-1]["published_on"] return data, earliest_ts all_news_articles = [] counter = 0 earliest_ts = end_ts while earliest_ts > start_ts: news_articles, earliest_ts = fetch_news_articles(earliest_ts, api_key) all_news_articles.extend(news_articles) counter += 1 if counter % 10 == 0: utils.json_save(all_news_articles, output_fp) print(counter) time.sleep(1)
'max_spacing': np.max(spacings, axis=0).tolist(), 'max_shape': np.max(shapes, axis=0).tolist(), 'min_spacing': np.min(spacings, axis=0).tolist(), 'min_shape': np.min(shapes, axis=0).tolist(), 'mean_spacing': np.mean(spacings, axis=0).tolist(), 'mean_shape': np.mean(shapes, axis=0).tolist(), 'median_spacing': np.median(spacings, axis=0).tolist(), 'median_shape': np.median(shapes, axis=0).tolist(), 'modality_statstics': modality_statstics, } if props_file is not None: props_file = Path(props_file) props = json_load(str(props_file)) props = {**props, **new_props} json_save(str(props_file), props) return new_props def analyze_raw_cases(image_dir, label_dir, props_file=None, data_range=None): image_dir = Path(image_dir) label_dir = Path(label_dir) image_files = [ path for path in sorted(image_dir.iterdir()) if path.is_file() ] label_files = [ path for path in sorted(label_dir.iterdir()) if path.is_file() ] assert len(image_files) == len(label_files),\
def communicate(conn, client, buffer, prev_cmd): global config global config_file global all_files if "\0" not in buffer: return "", prev_cmd else: idx = buffer.index("\0") msg = buffer[:idx - 1] buffer = buffer[idx + 1:] # message split lines = msg.split("\n") fields = lines[0].split(" ") cmd = fields[0] if cmd == "HELLO": config['uoffset'] += 1 json_save(config_file, config) conn_clients[client] = "u" + str(config['uoffset']) if conn_clients[client] not in clients: clients[conn_clients[client]] = {} clients[conn_clients[client]]['host'] = fields[1] clients[conn_clients[client]]['port'] = fields[2] clients[conn_clients[client]]['is_connected'] = 1 # json_save(clients_file, clients) send_msg(conn, "HI {}\n\0".format(conn_clients[client])) return communicate(conn, client, buffer, "HI") elif cmd == "LIST": if conn_clients[client] not in clients: clients[conn_clients[client]] = {} clients[conn_clients[client]]['files'] = lines[1:] save_files_dict(all_files, lines[1:]) print("ALL FILES") print(all_files) json_save(clients_file, clients) send_msg(conn, "ACCEPTED\n\0") return buffer, "ACCEPTED" elif cmd == "SEARCH:": filename = fields[1] if filename in all_files: msg = "FOUND: \n" prev_cmd = "FOUND" for file in all_files[filename]: if clients[file['client'][:-1]]['is_connected']: msg += construct_file_str_1(file) + "\n" msg += "\0" else: msg = "NOT_FOUND\n\0" prev_cmd = "NOT_FOUND" send_msg(conn, msg) return buffer, prev_cmd elif cmd == "BYE": print("should update connected clients\n") clients[conn_clients[client]]['is_connected'] = 0 json_save(clients_file, clients) send_msg(conn, "BYE\n\0") return buffer, prev_cmd else: print("invalid command was received\n") send_msg(conn, "ERROR\n\0") sys.exit(-1)
def main(): page = 0 while True: page += 1 payload = { 'ss': 1, 'page': page, } user_agent = generate_user_agent() headers = { 'User-Agent': user_agent, } print(f'PAGE: {page}') response = requests.get(HOST + ROOT_PATH, params=payload, headers=headers) random_sleep() response.raise_for_status() html = response.text soup = BeautifulSoup(html, 'html.parser') class_ = 'card card-hover card-visited wordwrap job-link' cards = soup.find_all('div', class_=class_) if not cards: cards = soup.find_all('div', class_=class_ + ' js-hot-block') result = [] if not cards: break for card in cards: tag_a = card.find('h2').find('a') title = tag_a.text href = tag_a['href'] path = requests.get(HOST + href, headers=headers) text = path.text soup = BeautifulSoup(text, 'html.parser') company = soup.find(class_='').find('b').text try: salary = soup.find(class_='').find('b').text except AttributeError: salary = 'No information' try: description = soup.find(id='job-description').find_all( ['p', 'b', 'li']) except AttributeError: description = 'No information' result.append([ f'Ссылка: {href},\n' f'Вакансия: {title},\n' f'Компания: {company},\n' f'Зарплата: {salary},\n' f'Описание: {description}\n' ]) save_db(href, title, salary, company, description) json_db = ({{ 'Ссылка': href, 'Вакансия': title, 'Компания': company, 'Зарплата': salary, 'Описание': description.replace('\n', ''), }}) json_save(json_db) save_info(result)
def json_save_data(cdata, filename, folder=None): folder = f'{config.preprocessedfolder}/{folder}' if folder is not None else f'{config.preprocessedfolder}' if not os.path.exits(folder): os.mkdirs(folder) utils.json_save(cdata, f'{folder}/{filename}')