def textcnn(args): print('Build model') vocab_size = int(args.vocab_size) embedding_size = int(args.embedding_size) dropout_prob = float(args.dropout_prob) l2_reg_scala = float(args.l2_reg_scala) length = int(args.length) filter_sizes = str_to_list(args.filter_sizes) filter_nums = str_to_list(args.filter_nums) main_input = Input(shape=(length, ), dtype='int32') embed = Embedding(vocab_size + 1, embedding_size)(main_input) cnn_outs = [] for i, filter_size in enumerate(filter_sizes): filter_num = filter_nums[i] cnn = Convolution1D(filter_num, filter_size, padding='valid', strides=1, activation='relu')(embed) cnn = MaxPool1D(pool_size=length - filter_size + 1)(cnn) cnn_outs.append(cnn) out = concatenate(cnn_outs, axis=-1) flat = Flatten()(out) drop = Dropout(dropout_prob)(flat) main_output = Dense(2, activation='softmax')(drop) model = Model(inputs=main_input, outputs=main_output) model.summary() return model
def get_page_info_list(comic_folder: str, comic_info: RawConfigParser, delete_scheduled_posts: bool, publish_all_comics: bool) -> Tuple[List[Dict], int]: date_format = comic_info.get("Comic Settings", "Date format") tz_info = timezone(comic_info.get("Comic Settings", "Timezone")) local_time = datetime.now(tz=tz_info) print(f"Local time is {local_time}") page_info_list = [] scheduled_post_count = 0 auto_detect_comic_images = get_option( comic_info, "Comic Settings", "Auto-detect comic images", option_type=bool, default=False ) theme = get_option(comic_info, "Comic Settings", "Theme", default="default") for page_path in glob(f"your_content/{comic_folder}comics/*/"): filepath = f"{page_path}info.ini" if not os.path.exists(f"{page_path}info.ini"): print(f"{page_path} is missing its info.ini file. Skipping") continue page_info = read_info(filepath, to_dict=True) post_date = tz_info.localize(datetime.strptime(page_info["Post date"], date_format)) if post_date > local_time and not publish_all_comics: scheduled_post_count += 1 # Post date is in the future, so delete the folder with the resources if delete_scheduled_posts: print(f"Deleting {page_path}") shutil.rmtree(page_path) else: if not page_info.get("Filename", ""): if not auto_detect_comic_images: raise FileNotFoundError(f"Comic image filename must be provided in {page_path}info.ini") image_files = [] for filename in os.listdir(page_path): if filename == "thumbnail.jpg": continue if re.search(r"\.(jpg|jpeg|png|tif|tiff|gif|bmp|webp|webv|svg|eps)$", filename): image_files.append(filename) if len(image_files) != 1: raise FileNotFoundError( f"Found {len(image_files)} images when attempting to auto-detect image files in {page_path}. " f"({image_files}) When using the 'Auto-detect comic images' option, you must not have any " f"image file in your comic folder other than your comic page and your archive thumbnail " f"(thumbnail.jpg)." ) page_info["Filename"] = image_files[0] page_info["page_name"] = os.path.basename(os.path.normpath(page_path)) page_info["Storyline"] = page_info.get("Storyline", "") page_info["Characters"] = utils.str_to_list(page_info.get("Characters", "")) page_info["Tags"] = utils.str_to_list(page_info.get("Tags", "")) hook_result = run_hook(theme, "extra_page_info_processing", [comic_folder, comic_info, page_path, page_info]) if hook_result: page_info = hook_result print(page_info) page_info_list.append(page_info) page_info_list = sorted( page_info_list, key=lambda x: (strptime(x["Post date"], date_format), x["page_name"]) ) return page_info_list, scheduled_post_count
def compute(self, msg): msg = msg.split(' ; ') s = str_to_list(msg[0]) a = str_to_list(msg[1]) r = float(msg[2]) s_ = str_to_list(msg[3]) d = int(msg[4]) self.push(s, a, r, s_, d)
def train(): # Reading train and test csv file train_df = pd.read_csv(os.path.join(PATH, TRAIN_CSV)) test_df = pd.read_csv(os.path.join(PATH, TEST_CSV)) train_df, test_df = str_to_list(train_df), str_to_list(test_df) train_df['pts'] = train_df.apply( lambda x: combine_list(x.pts_x, x.pts_y), axis=1) test_df['pts'] = test_df.apply( lambda x: combine_list(x.pts_x, x.pts_y), axis=1) train_df.pts = train_df.pts.apply(lambda x: correction(x)) test_df.pts = test_df.pts.apply(lambda x: correction(x)) print(f"train shape : {train_df.shape} and test shape : {test_df.shape}") train_generator = DataGenerator(train_df, BATCH_SIZE, path=os.path.join(PATH, TRAIN_FOLDER), is_valid=False) test_generator = DataGenerator(test_df, BATCH_SIZE*2, path=os.path.join(PATH, TEST_FOLDER), is_valid=True) # Initialize Model print("Loading Model ...") model = KeypointModel() print(model.summary(110)) learning_rate = 0.001 adam = optimizers.Adam(lr=learning_rate) model.compile(optimizer=adam, loss='mae', metrics=['mse']) cbks = [ModelCheckpoint(f"./weights/{WEIGHT_FILENAME}", monitor='val_loss', verbose=1, save_best_only=True, mode='min'), ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=3, verbose=1, mode='min', min_delta=0.0001, min_lr=1e-5), EarlyStopping(monitor='val_loss', patience=5, verbose=1, restore_best_weights=False)] model.fit_generator( generator=train_generator, steps_per_epoch=len(train_generator), epochs=50, verbose=1, callbacks=cbks, validation_data=test_generator, validation_steps=len(test_generator))
def textcnn(args): config = args["config"] config["filters_size"] = str_to_list(config["filters_size"]) config["filters_num"] = str_to_list(config["filters_num"]) config["l2_reg_lambda"] = float(config["l2_reg_lambda"]) config["dropout_prob"] = float(config["dropout_prob"]) config["class_nums"] = int(config["class_nums"]) config["init_learning_rate"] = float(config["init_learning_rate"]) config["learning_rate_decay"] = float(config["learning_rate_decay"]) return config
def parse_file_points(filename): list_points_parse = [] f = open(filename, 'r') for p in f.read().splitlines(): vectors = p.split(';') x_str = utils.remove_char(vectors[0]).split(',') x = utils.str_to_list(x_str) y_str = utils.remove_char(vectors[1]).split(',') y = utils.str_to_list(y_str) x = utils.quicksort(x) if len(x) != len(y): continue list_points_parse.append([x, y]) f.close() return list_points_parse
def configure(self): config = ConfigParser.ConfigParser() try: config.read(config_path) except IOError: log.error('Error reading configuration from %s' % config_path) return False log.info("Reading configuration from %s" % config_path) self.smtp_server = self.get_config(config, 'smtp', 'server', self.smtp_server) self.recipients = self.get_config(config, 'smtp', 'recipients', self.recipients) if type(self.recipients) == str: self.recipients = str_to_list(self.recipients) # Limits configuration self.max_time = int(self.get_config(config, 'limits', 'max_time')) self.max_email = int(self.get_config(config, 'limits', 'max_email')) self.domain = self.get_config(config, 'limits', 'domain') if self.domain is None: try: self.domain = self.fqdn.split('.', 1)[1] except IndexError: self.domain = 'localdomain' log.info("Domain: %s " % self.domain) self.whitelist = self.get_config(config, 'limits', 'whitelist', self.whitelist) if type(self.whitelist) == str: self.whitelist = str_to_list(self.whitelist) log.info("Whitelist: %s " % self.whitelist) connection_string = self.get_config(config, 'database', 'connection_string') try: connection = create_engine(connection_string, pool_size=25, max_overflow=25, pool_recycle=30) except Exception, e: log.error( 'Database access error, connection string used: %s. Error: %s' % (connection_string, e)) return False
def show_line(self): original, _, pynori_splited = self.data.iloc[self.index] pynori_splited = " ".join(str_to_list(pynori_splited)) self.original_textedit.setText(original) self.splited_textedit.setText(pynori_splited) self.index_lineedit.setText(str(self.index)) self.userdict_textedit.setText("\n".join(self.userdict))
def _make_events(self) -> tuple: ignore = set(str_to_list(self._cfg['ignore_events'])) def make(data: tuple) -> tuple: return tuple(x for x in data if x not in ignore) return (make(EVENTS[0]), self._cb_pause), (make(EVENTS[1]), self._cb_unpause)
def __start(self): self._lists = { 'to_black': set(), 'black': set(utils.str_to_list(self.cfg.gt('plugins', 'blacklist'))), 'white': set(utils.str_to_list(self.cfg.gt('plugins', 'whitelist'))), 'on_failure': self.cfg.gt('plugins', 'blacklist_on_failure'), } self._ignore = { 'black': set(), 'white': set(), } self._init_all() self._start_all() self._update_blacklist() self._init.clear() self._target, self._lists, self._ignore = None, None, None
def yolo_extractor(input_name): if 'yolo-extracted' in os.listdir(): shutil.rmtree('yolo-extracted') os.mkdir('yolo-extracted') df = pd.read_csv('yolo.csv') frame = cv2.imread(input_name) for img_no, label in enumerate(df['corners']): img_name = input_name.split('.')[0] + '.' + str(img_no)+'.jpg' x1,y1,x2,y2 = str_to_list(label) x1,y1,x2,y2 = max(0, x1), max(0, y1), max(0, x2), max(0, y2) img = frame[y1:y2, x1:x2] cv2.imwrite(os.path.join(root, 'yolo-extracted', img_name), img)
def image_extractor(input_name): if 'extracted' in os.listdir(): shutil.rmtree('extracted') os.mkdir('extracted') df = pd.read_csv('annotations.csv') frame = cv2.imread(input_name) for img_no, label in enumerate(df['bbox']): img_name = input_name.split('.')[0] + '.' + str(img_no)+'.jpg' x,y,w,h = str_to_list(label) x,y,w,h = max(0, x), max(0, y), max(0, w), max(0, h) img = frame[y:h, x:w] cv2.imwrite(os.path.join(root, 'extracted', img_name), img)
def append_from_str(self, input_str): result = str_to_list(input_str, ' ') if len(result) != 3: self.__table['ERROR'] = self.addrflag('MALFORMED', 'ENTRY') else: if result[2] == DNS_table.flag.NS.value and self.__ts_hostname == '__NONE__': self.__ts_hostname = result[0] msg = 'ts_hostname assigned as \'{}{}{}\'.'.format(K.color.bold.WHT, result[0], K.NRM) log(logstat.LOG, funcname(), msg) elif result[2] == DNS_table.flag.A.value: self.__table[result[0]] = self.addrflag(result[1], result[2]) msg = '{}\'{} : ({}, {}){}\' added to table from string {}\'{}\'{}.'.format(K.color.bold.WHT, result[0], result[1], result[2], K.NRM, K.color.bold.WHT, input_str, K.NRM) log(logstat.LOG, funcname(), msg)
def append_from_str(self, input_str): result = str_to_list(input_str, ' ') if len(result) != 3: self.__table['ERROR'] = self.addrflag('MALFORMED', 'ENTRY') else: if result[ 2] == DNS_table.flag.NS.value and self.__ts_hostname == '__NONE__': self.__ts_hostname = result[0] print('[dns_module]: ts_hostname assigned as \'{}\'.'.format( result[0])) elif result[2] == DNS_table.flag.A.value: self.__table[result[0]] = self.addrflag(result[1], result[2]) print( '[dns_module]: \'{} : ({}, {})\' added to table from string \'{}\'.' .format(result[0], result[1], result[2], input_str))
def get(self, request, **kwargs): cart = Cart(request) object = Product.objects.all() block_num = self.kwargs.get('block_num', None) slab_ids = self.request.GET.get('slab_ids') if block_num: object = object.filter(block_num=block_num).first() if slab_ids: slab_list = object.get_slab_list(slab_ids=str_to_list(slab_ids), object_format=True) else: slab_list = object.get_slab_list(object_format=True) context = { 'slab_list': slab_list, 'object': object, 'slab_ids': cart.cart.get('slab_ids') } return render(request, self.template_name, context)
def thread_query(self): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(('localhost', 12801)) sock.listen(5) connection, address = sock.accept() with connection: msg = '' while True: data = connection.recv(8192) if not data: break msg += data.decode() if '\n' in msg: state, msg = msg.split('\n', 1) send_data = str(list(self.select_action(str_to_list(state)))) connection.sendall(send_data.encode()) sock.close()
def query_servers(rs_hostname, rs_portno, hostname_list, ts_portno): client_ipaddr = '' client_hostname = '' cl_sock_rs = 0 cl_sock_ts = 0 rs_ipaddr = '' ts_hostname = '' ts_ipaddr = '' ts_binding = ('', '') resolved_list = [] queried_hostname = '' msg_in = '' msg_out = '' data_in = '' data_out = '' delimiter = ' ' reply_elems = [] ts_connected = False try: cl_sock_rs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) except EnvironmentError: print('[client]: ERROR - client socket open error.\n') exit() cl_sock_rs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) print('[client]: Opened new datagram socket.\n') rs_binding = (rs_hostname, rs_portno) try: socket.gethostbyname(rs_hostname) cl_sock_rs.connect(rs_binding) except EnvironmentError: print( '[client]: ERROR - Unable to connect to RS server \'{}\'\n'.format( rs_hostname)) exit() client_hostname = socket.gethostname() client_ipaddr = socket.gethostbyname(client_hostname) print('[client]: Client hostname is \'{}\'.'.format(client_hostname)) print('[client]: Client IP address is \'{}\'.\n'.format(client_ipaddr)) for elem in hostname_list: ts_not_found = False queried_hostname = elem print('{}\n[client]: Querying hostname \'{}\'...\n{}\n'.format( CHAIN_LINK, queried_hostname, CHAIN_LINK)) rs_ipaddr = socket.gethostbyname(rs_hostname) msg_out = queried_hostname data_out = msg_out.encode('utf-8') cl_sock_rs.send(data_out) print( '[client]: outgoing to RS server \'{}\' at \'{}\': \'{}\''.format( rs_hostname, rs_ipaddr, queried_hostname)) try: data_in = cl_sock_rs.recv(128) except EnvironmentError: print( '[client]: ERROR - RS server by hostname \'{}\' not available.' .format(rs_binding[0])) return resolved_list msg_in = data_in.decode('utf-8') print('[client]: incoming from RS server \'{}\' at \'{}\': \'{}\''. format(rs_hostname, rs_ipaddr, msg_in)) reply_elems = str_to_list(msg_in, delimiter) if len(reply_elems) == 3: if reply_elems[2] == DNS_table.flag.A.value: resolved_list.append(msg_in) elif reply_elems[2] == DNS_table.flag.NS.value: ts_hostname = reply_elems[0] print( '[client]: Redirecting query \'{}\' to TS server by hostname \'{}\'.' .format(queried_hostname, ts_hostname)) try: cl_sock_ts = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) except EnvironmentError: print('[client]: ERROR - client socket open error.\n') continue print('[client]: Opened new datagram socket.') ts_binding = (ts_hostname, ts_portno) try: socket.gethostbyname(rs_hostname) cl_sock_ts.connect(ts_binding) except EnvironmentError: print( '[client]: ERROR - Unable to connect to TS server \'{}\'.\n' .format(ts_hostname)) continue ts_connected = True ts_ipaddr = socket.gethostbyname(ts_hostname) msg_out = queried_hostname data_out = msg_out.encode('utf-8') cl_sock_ts.send(data_out) print( '[client]: outgoing to TS server \'{}\' at \'{}\': \'{}\''. format(ts_hostname, ts_ipaddr, queried_hostname)) try: data_in = cl_sock_ts.recv(128) except EnvironmentError: print( '[client]: ERROR - TS server by hostname \'{}\' not available.' .format(rs_binding[0])) ts_connected = False if ts_connected: msg_in = data_in.decode('utf-8') print( '[client]: incoming from TS server \'{}\' at \'{}\': \'{}\'' .format(ts_hostname, ts_ipaddr, msg_in)) reply_elems = str_to_list(msg_in, delimiter) if len(reply_elems) == 3 and reply_elems[1] != '-': pass elif len(reply_elems ) == 5 and reply_elems[2] == 'Error:HOST': pass else: print( '[client]: NOTE - message from \'{}\' (at \'{}\'), \'{}\' is malformed. Appending to resolved_list anyway.' .format(ts_hostname, ts_ipaddr, msg_in)) resolved_list.append(msg_in) cl_sock_ts.close() else: print( '[client]: message from \'{}\' (at \'{}\'), \'{}\' is malformed.' .format(rs_hostname, rs_ipaddr, msg_in)) print('') return resolved_list
def get_allow_models(self) -> list: return utils.str_to_list(self.gt('models', 'allow'))
def get_extra_comics_list(comic_info: RawConfigParser) -> List[str]: if comic_info.has_option("Comic Settings", "Extra comics"): return utils.str_to_list(comic_info.get("Comic Settings", "Extra comics")) return []
def get_requirements(theme: str) -> Set[str]: requirements_path = f"your_content/themes/{theme}/scripts/requirements.txt" if os.path.exists(requirements_path): with open(requirements_path) as f: return set(str_to_list(f.read().replace("\r", ""), delimiter="\n")) return set()
self.p = p def encrypt(self, m): res = [] for i in range(len(m)): j = i % len(self.k) res.append((m[i] + self.k[j]) % self.p) return res def decrypt(self, c): res = [] for i in range(len(c)): j = i % len(self.k) res.append((c[i] - self.k[j]) % self.p) return res if __name__ == "__main__": enc = str_to_list("wegvrjepqnwezbdzqetgzbigvpta") affine = Affine(exeu(5, 26), 13) res = affine.decrypt(enc) res = list_to_str(res) print(res) key = intstr_to_list("20170317") vigenere = Vigenere(key, 10) m = intstr_to_list("314159265358979323846264") res = vigenere.encrypt(m) res = list_to_intstr(res) print(res)
def cart_remove(request): cart = Cart(request) item = request.POST.get('item') cart.remove(str_to_list(item)) return redirect('cart:index')