def load_processed_data_gc( gc_path='.', cities=('london', 'moscow', 'nyc', 'paris', 'vancouver', 'beijing', 'kyoto', 'seoul', 'singapore', 'tokyo'), input_shape=(H, W, C), show=None, verbose=0, ): ''' load a processed data from npy files laid in virtual env in Google Colaboratory, and show it if specified thus assumes there are the processed files in the working directory in the env argments - show: None or int (which specifies the number of images of each city will be showed) return: tuple whose element's shape is (city_name, city_data) - city_name: string - city_data: numpy array ''' data = [] for city_name in cities: if verbose: print('loading {} street data ...'.format(city_name)) try: file_path = os.path.join(gc_path, city_name + '.npy') tmp_data = np.load(file_path) except FileNotFoundError: i = input( 'file {} is not found, continue loading the data without {}\'s data or not ? (y or n) >' .format(file_path, city_name)) if i == 'y': print('continue loading without {}\'s data ...'.format( city_name)) continue else: print('stopping loading ...') return data data.append((city_name, tmp_data)) if show: for city_name, city_data in data: show_data(city_name, city_data, num_show=show) if verbose: print( 'returned: {} length tuple whose element\'s shape is (city_name, city_data)' .format(len(data))) for city_name, city_data in data: print('{}: {}'.format(city_name, city_data.shape)) return data
def handle_checkall(message): _logger.info("/checkall") user = db.get_user(message.from_user.id)[0] emails = [] if 'emails' not in user else user['emails'] if not emails: bot.reply_to(message, 'No has registrado emails todavía. Utiliza /newemail para ello.') flag_update_urls = False for email in emails: urls = [] if 'urls' not in email else email['urls'] urls_data = [url['url'] for url in urls] json_data = check_email(email['email']) lines = show_data(json_data) if lines: bot.reply_to(message, "%s con resultados..." % email['email']) for line in lines: registered = False if line[1] in urls_data: registered = True break if not registered: urls += [{'url': line[1]}] flag_update_urls =True line_str = ' '.join(line) bot.send_message(user['id'], line_str) email['urls'] = urls else: bot.send_message(user['id'], "%s Todo OK!" % email['email']) if flag_update_urls: db.register_emails(user['id'], emails)
def get(self, request, format=None): patient_id = request.data.get("patient_id") patient = user_models.Patient.objects.get(id=patient_id) blocks = user_models.Block.objects.filter(patient=patient) res = [] for block in blocks: res.append(show_data(block.id)) return Response(res)
option = int(input("Escolha a opção desejada: ")) if option == 1: state = str(input("Escolha um estado: ")) crawler.start() if state not in states_infos: crawler.search_by(state.capitalize()) states_infos[state] = crawler.collect_data() crawler.finish() show_data(state, states_infos.get(state)) if option == 2: for index, indicator in enumerate(INDICATORS_WISHED, start=1): print("{} - {}".format(index, indicator)) indicator_index = int(input("Escolha o indicador: ")) state_quant = int(input("Quantidade de estados:")) states = [str(input("Estado: ")) for _ in range(state_quant)] crawler.start() for state in states: if state not in states_infos: crawler.search_by(state)
def load_processed_data( dir_path, cities=('london', 'moscow', 'nyc', 'paris', 'vancouver', 'beijing', 'kyoto', 'seoul', 'singapore', 'tokyo'), input_shape=(H, W, C), show=None, verbose=0, ): ''' load a processed data from each jpg files and show it if specified argments - gc: used in Google Colaboratory return: tuple whose element's shape is (city_name, city_data) - city_name: string - city_data: numpy array ''' data = [] for city_name in cities: if verbose: print('loading {} street data ...'.format(city_name)) city_dirc_path = os.path.join(dir_path, 'data', 'processed', '*', city_name, '*') dirc_paths = glob.glob(city_dirc_path) if not dirc_paths: i = input( 'city_dirc_path {} does not seem to exist, continue loading the data without {}\'s data or not ? (y or n) >' .format(city_dirc_path, city_name)) if i == 'y': print('continue loading without {}\'s data ...'.format( city_name)) continue else: print('stopping loading ...') return data tmp_data = np.empty((1, *input_shape), dtype='float') for dirc_path in dirc_paths: files = glob.glob(os.path.join(dirc_path, '*.jpg')) _ = np.empty((len(files), *input_shape), dtype='float') if verbose: for i, f in tqdm(enumerate(files)): _[i] = np.asarray(Image.open(f)) / 255. else: for i, f in enumerate(files): _[i] = np.asarray(Image.open(f)) / 255. tmp_data = np.vstack((tmp_data, _)) data.append((city_name, tmp_data[1:])) if show: for city_name, city_data in data: show_data(city_name, city_data, num_show=show) if verbose: print( 'returned: {} length tuple whose element\'s shape is (city_name, city_data)' .format(len(data))) for city_name, city_data in data: print('{}: {}'.format(city_name, city_data.shape)) return data
def show_data(self): cols = self.data.shape[1] show_data(self.data[:, :cols - 1], self.data[:, cols - 1])
# resample the dataset using these new weights bootstrapped_X, bootstrapped_Y = self.resample(X, Y, weights) X, Y = get_classification_data(sd=10, m=50) adaboost = AdaBoost(n_models=20) adaboost.fit(X, Y) print("This is the final prediction:", adaboost.final_prediction(X)) print("This is the original labels:", Y) print(adaboost.final_prediction(X) == Y) print("Shape:", adaboost.final_prediction(X).shape) print("type:", type(adaboost.final_prediction(X))) visualise_predictions(adaboost.final_prediction, X) print(f'accuracy: {calc_accuracy(adaboost.final_prediction(X), Y)}') show_data(X, Y) print("Evaluate for a point: ", adaboost.final_prediction(np.array([[1, 1]]))) # %% import sklearn.ensemble adaBoost = sklearn.ensemble.AdaBoostClassifier() adaBoost.fit(X, Y) predictions = adaBoost.predict(X) calc_accuracy(predictions, Y) #visualise_predictions(adaBoost.predict, X,Y) #show_data(X, Y) print("Adaboosts sklearn predictions:", predictions) print(predictions.shape) print(type(predictions)) print(f'accuracy: {calc_accuracy(predictions, Y)}')
def check(bot=False, confile=False): if not bot and confile: Config = ConfigParser.ConfigParser() Config.read(confile) HSHBOT_TOKEN = Config.get('options', 'api_token') bot = telebot.TeleBot(HSHBOT_TOKEN) HSHBOT_ADMIN_ID = int(Config.get('options', 'admin_id') or False) elif bot: HSHBOT_ADMIN_ID = int(os.environ['HSHBOT_ADMIN_ID']) else: _logger.error("Cron execution error, bot and confile not found!") sys.exit() _logger.info("cron:: check()") if HSHBOT_ADMIN_ID: bot.send_message(HSHBOT_ADMIN_ID, '[INFO] Iniciando cron.check()...') users = db.get_users() emails_checked = {} try: for user in users: user_emails = [] if 'emails' not in user else user['emails'] if not user_emails: bot.send_message( user['id'], "Se ha ejecutado el checkeo de emails, y veo que no tienes ninguno registrado " "aún...\nRecuerda que puedes hacerlo con /newemail" ) flag_update_urls = False for user_email in user_emails: email_urls = [] if 'urls' not in user_email else user_email['urls'] email_urls_data = [email_url['url'] for email_url in email_urls] flag_notification = False if user_email['email'] not in emails_checked: try: json_data = check_email(user_email['email']) except Exception, e: if HSHBOT_ADMIN_ID: bot.send_message(HSHBOT_ADMIN_ID, '[ERROR] HeSidoHackeadoBot is down!') raise e emails_checked[user_email['email']] = show_data(json_data) if emails_checked[user_email['email']]: for line in emails_checked[user_email['email']]: registered = False if line[1] in email_urls_data: registered = True break if not registered: line_str = ' '.join(line) if not flag_notification: flag_notification = True bot.send_message(user['id'], "%s con resultados..." % user_email['email']) bot.send_message(user['id'], line_str) email_urls += [{'url': line[1]}] flag_update_urls = True user_email['urls'] = email_urls # if not flag_notification: # bot.send_message(user['id'], "%s Todo OK!" % user_email['email']) if flag_update_urls: db.register_emails(user['id'], user_emails) except Exception, e: pass