def train(self, inputExamples, expected, iterations=BARSIZE): if self.initialized: currentIteration = 1 barIteration = 0 statusBar = ChargingBar("\x1b[4;36m"+"\t>> Training:", max=BARSIZE) interval = int(iterations/BARSIZE) if iterations > 100 else 1 errorMedia = 0 statusBar.start() while(currentIteration <= iterations): errorMedia = 0 prediction = self.__backPropagation(inputExamples, expected) errorMedia = self.cost_fun(prediction, expected) currentIteration += 1 if barIteration % interval == 0: statusBar.next() barIteration = 0 barIteration += 1 while(currentIteration < BARSIZE): currentIteration += 1 statusBar.next() statusBar.finish() self.trainingStatus = str(round(errorMedia,4)) + "TE" print("\x1b[1;33m"+"\t>> Error (Media) Cost: ", round(errorMedia,4)) print("\x1b[0;37m"+"=-"*35 + "=") else: print("<Error>: Empty Neural Network, use reset() or loadFromFile(file)") exit(1)
def upload_to_ya(self, upload_list): ya_obj = YaUpLoader(self.ya_token) ya_load_to = input('\nВведите путь до папки на ya_disk: ') print(ya_obj.check_folder(ya_load_to)) print(f'\nЗагружаем файлы на YaDisk') bar = ChargingBar('Countdown', max=len(upload_list[1])) hash_map = {} for photo in upload_list[1]: bar.start() file_name = photo['file_name'] if file_name in hash_map.keys(): last_name = file_name value = hash_map[last_name] + 1 file_name = file_name.split('.')[0] + '_' + str(value) + '.jpg' hash_map[last_name] = value else: hash_map[file_name] = 1 ya_file_to = ya_load_to + '/' + file_name url = photo['url'] res = requests.get(url).content ya_obj.upload(ya_file_to, res) bar.next() bar.finish()
def discover_net(self, ip_range=24): protocol = self.protocol base_ip = self.my_ip # print_figlet() if not protocol: protocol = "ICMP" else: if protocol != "ICMP": logging.warning( f"Warning: {protocol} is not supported by discover_net function! Changed to ICMP" ) if protocol == "ICMP": logging.info("Starting - Discover Hosts Scan") base_ip = base_ip.split('.') base_ip = f"{str(base_ip[0])}.{str(base_ip[1])}.{str(base_ip[2])}.0/{str(ip_range)}" hosts = list(ipaddress.ip_network(base_ip)) bar = ChargingBar("Scanning...", max=len(hosts)) sys.stdout = None bar.start() threads = [None] * len(hosts) results = [None] * len(hosts) for i in range(len(threads)): threads[i] = Thread(target=self.send_icmp, args=(hosts[i], results, i)) threads[i].start() for i in range(len(threads)): threads[i].join() bar.next() bar.finish() sys.stdout = sys.__stdout__ hosts_found = [i for i in results if i is not None] if not hosts_found: logging.warn('[[red]-[/red]]Not found any host') else: print("") logging.info(f'{len(hosts_found)} hosts founded') for host in hosts_found: logging.info(f'Host found: {host}') return True else: logging.critical("[[red]-[/red]]Invalid protocol for this scan") return False
def process_default(out_file, algorithm): global CONFIG csv_file = open(out_file, 'w', newline='') csv_writer = csv.writer(csv_file, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL) engine = create_engine(CONFIG['mysql']) print(f'Processing with {algorithm} algorithm...') stores = CONFIG['stores'] barcodes = CONFIG['barcodes'] periods = CONFIG['periods'] iter_cnt = len(stores) * len(barcodes) * len(periods) bar = ChargingBar('Waiting...', max=iter_cnt) bar.start() for store_id in stores: for barcode in barcodes: bar.message = f'[Store: {store_id}] {str(barcode).ljust(13, " ")}' bar.update() df_barcode = get_barcode_daily_sales(engine, store_id, barcode) for period in periods: forecast_from_date = arrow.get(period['date'], 'DD.MM.YYYY') forecast_before_date = forecast_from_date.shift( days=period['days']) forecast = do_forecast(algorithm, df_barcode, forecast_from_date, forecast_before_date) csv_writer.writerow([ store_id, barcode, period['date'], period['days'], forecast ]) bar.next() bar.finish() csv_file.close() print(f'\nDone. Result was written to {args.output}')
# Cargo datos data = pickle.load(open(this_file, 'rb')) progress_bar = ChargingBar("Evaluating file {} - {}/{}".format( this_filename, this_file_idx + 1, len(test_filepaths)), suffix="%(percent)d%%") if realistic_check: # Separo datos en pedazos sign = [ data['data'][i:i + slice_samples] for i in range(0, len(data['data']), slice_samples) ] progress_bar.max = len(sign) progress_bar.start() for this_slice_idx, this_slice in enumerate(sign): """ if this_slice_idx % (round(len(sign) / 20)) == 0: print("{}/{} slices evaluated.".format(this_slice_idx, len(sign))) """ progress_bar.next() # Si no pongo este if, el ultimo pedazo podria ser de distinto tamaño y fallar if len(this_slice) == slice_samples: # Analisis this_data = torch.Tensor(this_slice) this_data = this_data.view(1, 1, slice_samples) # Reshape
def process_short(out_file, in_file, algorithm): global CONFIG beg_date = arrow.get('2019-10-01', 'YYYY-MM-DD') end_date = arrow.get('2020-01-31', 'YYYY-MM-DD') write_stdout(f'Forecast from {beg_date} to {end_date}\n') # 123 days lines_count = 0 if wc and sed: write_stdout(f'Counting {in_file} non-blank lines... ') lines_count = int(wc(sed(r'/^\s*$/d', in_file), '-l')) print(lines_count) ops_count = lines_count * 123 out_csv_file = open(out_file, 'w', newline='') in_csv_file = open(in_file, 'r', newline='\n') csv_writer = csv.writer(out_csv_file, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL) csv_reader = csv.reader(in_csv_file, delimiter=',') engine = create_engine(CONFIG['mysql']) print(f'Processing short output with {algorithm} algorithm...') bar = None if ops_count > 0: bar = ChargingBar('Waiting...', max=ops_count) bar.start() i = 0 dfs = {} for row in csv_reader: if row is not None and len(row): store_id = int(row[0]) barcode = int(row[1]) key = f'{store_id}-{barcode}' try: df_barcode = dfs[key] except KeyError: df_barcode = get_barcode_daily_sales(engine, store_id, barcode) dfs[key] = df_barcode for j, d in enumerate(arrow.Arrow.range('day', beg_date, end_date)): forecast_from_date = d forecast_before_date = forecast_from_date.shift(days=5) forecast = do_forecast(algorithm, df_barcode, forecast_from_date, forecast_before_date) csv_writer.writerow([int(round(forecast))]) if bar: curr_op = i * 123 + j if curr_op % 5 == 0: bar.message = f'{curr_op} of {ops_count}' bar.update() bar.next() i += 1 bar.message = 'Done' bar.update() out_csv_file.close() in_csv_file.close()
def __init__(self, project_name, image_list=None, user_dir="/users.csv", user_list=None, ml_out_dir=None): """ Inits the object :param project_name: project name (EG "gold") :param image_list: Optional, list of image Ids to load to memory (If None, uses all images) :param user_dir: Optional (preset), Directory of user metadata. This file contains data for each user including name, grades, and etc... :param user_id: Optional, preset a user ID to only do operations on that user """ # init vars self.project_name = project_name images = os.listdir(config.WORKING_DIRECTORY + project_name + "/images/") self.image_list = [] self.user_list = [] self.module_list = [] self.nb_images = 0 self.nb_users = 0 self.ml_out_dir = ml_out_dir # load ImageData objects from files if image_list is None: bar = ChargingBar('Loading Image Data', max=len(images), stream=sys.stdout) else: bar = ChargingBar('Loading Image Data', max=len(image_list), stream=sys.stdout) bar.start() for image in images: id = image.split('_')[1] if image.startswith("image") and (image_list is None or id in image_list): self.image_list.append( image_data.Image_data(project_name, image, self, user_list)) self.nb_images += 1 bar.next() bar.finish() #map_array = [] #for image in images: # if image.startswith("image"): # map_array.append((project_name, image, self)) # self.nb_images += 1 #self.image_list = parallelize_image_init(map_array) # load UserData objects from files print "Loading user data to memory..." f = open(config.WORKING_DIRECTORY + project_name + user_dir, 'rb') csv_in = csv.reader(f) data = list(csv_in) for i in range(2, len(data)): y_vars = {} m_vars = {} x_vars = {} for j in range(len(data[i])): if data[0][j] == 'M': m_vars[data[1][j]] = data[i][j] elif data[0][j] == 'Y': y_vars[data[1][j]] = data[i][j] elif data[0][j] == 'X': x_vars[data[1][j]] = data[i][j] if user_list is None or m_vars['CYTOMINE ID'] in user_list: u = user_data.User_data(self.image_list, m_vars['CYTOMINE ID'], self, y_vars, m_vars, x_vars) self.user_list.append(u) self.nb_users += 1 f.close() for image in self.image_list: image.init_user_data_link(self.user_list) dir_l = os.listdir(config.WORKING_DIRECTORY + project_name + "/") mod_name = config.WORKING_DIRECTORY + project_name + "/timeline.csv" if "timeline.csv" in dir_l: f = open(mod_name, 'rb') csv_in = csv.reader(f) data = list(csv_in) for i in range(1, len(data)): row = data[i] m = module_data.Module_data(row, self.image_list, self, self.user_list) self.module_list.append(m) f.close()
if args.output: csv_file = open(args.output, 'w', newline='') csv_writer = csv.writer(csv_file, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL) config = yaml.safe_load(read_file(args.config)) print(f'Config loaded from {args.config}') engine = create_engine(config['mysql']) print(f'Processing...') stores = config['stores'] barcodes = config['barcodes'] periods = config['periods'] iter_cnt = len(stores) * len(barcodes) * len(periods) bar = ChargingBar('Waiting...', max=iter_cnt) bar.start() for s in range(len(stores)): store_id = stores[s] for b in range(len(barcodes)): barcode = barcodes[b] bar.message = f'[Store: {store_id}] {str(barcode).ljust(13, " ")}' bar.update() df_barcode = get_barcode_daily_sales(engine, store_id, barcode) for plot in range(len(periods)): period = periods[plot] today = arrow.get(period['date'], 'DD.MM.YYYY') beg = today.shift(days=1)