def handle_single_images(input_handler: InputHandler, number_of_cameras: int): Database.initialize() input_handler_copy = copy.deepcopy(input_handler) while True: try: image_index, image = input_handler_copy.get_frame(0) identifier = SubjectIdentifier(image_index) identifier.identify(image) except IndexError: break for i in range(1, number_of_cameras): input_handler_copy = copy.deepcopy(input_handler) successes = 0 fails = 0 while True: try: image_index, image = input_handler_copy.get_frame(i) identifier = SubjectIdentifier() subject = identifier.identify(image) if subject.name == image_index: successes += 1 else: fails += 1 print(image_index, "identified as", subject.name) except IndexError: print(successes * 100 / (successes + fails)) break
def install(request): #return HttpResponse(urlunquote(request.POST)) #return HttpResponse(request.POST.dict()) doc = json.loads(request.POST.get('data')) doc['timestamp_added'] = int(time.time()) database = Database() probe = 'dk_dtu_compute_questionnaire_survey' doc_id = database.insert(doc, collection = probe) return HttpResponse(json.dumps({'ok':str(doc_id)}), status = 200)
def __init__(self, name_to_save_in_db: Optional[str]=None, enable_feature_matching=False): self.__add_to_db = name_to_save_in_db self.__feature_matching_enabled = enable_feature_matching self.__subjects = Database.get_subjects() self.__subject = Database.get_subject(name_to_save_in_db) self.__first_run = True self.__scores = {} self.__best_match = None self.__new_histograms = None self.__sift = cv2.xfeatures2d.SIFT_create()
def upload(request): auth = authorization_manager.authenticate_token(request) if 'error' in auth: return HttpResponse(json.dumps(auth), status=401) user = auth['user'] database = Database() doc = urllib.unquote(request.REQUEST.get('doc')) doc = json.loads(doc) doc['user'] = user.username doc_id = database.insert(doc, collection='questionnaire') return HttpResponse(json.dumps({'ok':str(doc_id)}), status=200)
def upload(request): auth = authorization_manager.authenticate_token(request, 'connector_questionnaire.input_form_data') if 'error' in auth: return HttpResponse(json.dumps(auth), status=401) user = auth['user'] roles = None try: roles = [x.role for x in UserRole.objects.get(user=user).roles.all()] except: pass doc = urllib.unquote(request.REQUEST.get('doc')) doc = json.loads(doc) doc['user'] = user.username probe = 'dk_dtu_compute_questionnaire' backup.backupValue(data=doc, probe=probe, user=user.username) database = Database() doc_id = database.insert(doc, collection=probe, roles=roles) return HttpResponse(json.dumps({'ok':str(doc_id)}), status=200)
async def info(self, ctx): try: db = Database(ctx.guild.id) # Create a new instance info = await db.find_info() config_lang = info['logs_language'] if info is not None else 'en' except Exception: config_lang = 'en' owner = self.client.get_user(254515724804947969) await ctx.send( embed=misc_msg.info_en(ctx, owner, self.client) if config_lang == 'en' else misc_msg.info_en(ctx, owner, self.client)) await ctx.send(file=discord.File('assets/chaewon_muah.gif'))
async def change_prefix(self, ctx, new_prefix: str): db = Database(ctx.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? await ctx.send(embed=setup_not_found()) return info = await db.find_info() prefix = await db.find_prefix() await db.update_prefix(new_prefix) await ctx.send( f'Prefix changed from `{prefix}`` to `{new_prefix}`' if info['logs_language'] == 'en' else f'ได้ทำการเปลี่ยนเครื่องหมายนำหน้าจาก `{prefix}` เป็น `{new_prefix}` เรียบร้อยแล้ว' )
def parse_and_validate_symbols(user_symbols, exchange): """ Parse symbols from <from_symbol>/<to_symbol> format to list of dicts like: [{ symbol : <from_symbol><to_symbol> from_symbol : <from_symbol> to_symbol : <to_symbol> exchange : <exchange> }] Verify that symbols are valid by comparing to full set of exchange symbols. Parameters: ------------------- user_symbols: list of strings Market symbols provided by user. Example: [BTC/USDT, LTC/BTC] """ ex_symbols = f"SELECT symbol FROM all_symbols WHERE exchange = '{exchange}';" ex_symbols = list(Database().execute(ex_symbols).symbol) ins = [] for symbol in user_symbols: # Fail hard if any symbol is invalid. if symbol.replace('/','') not in ex_symbols: raise ImplementationError(f''' {symbol} is not traded on the {exchange} exchange. Check user_symbols in config/data_collection.py. Possible symbols are: {ex_symbols} ''') # Format for DB insert ind = symbol.find('/') add = { 'symbol' : symbol.replace('/',''), 'from_symbol' : symbol[:ind], 'to_symbol' : symbol[ind+1:], 'exchange' : exchange } ins.append(add) return ins
async def member_voice_chat_leave(self, member: discord.Member, before: discord.VoiceState, after: discord.VoiceState): if before.self_mute != after.self_mute: return if before.self_deaf != after.self_deaf: return if before.channel != after.channel: if before.channel is not None and after.channel is None: pass else: return db = Database(member.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == member.guild.id: logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] if before.channel != after.channel: channel = self.client.get_channel(before.channel.id) embed = None if isinstance(channel, discord.VoiceChannel): embed = (member_events_msg.member_leave_voice_chat_en( member, channel) if config_lang == 'en' else member_events_msg.member_leave_voice_chat_th( member, channel)) elif isinstance(channel, discord.StageChannel): embed = (member_events_msg.member_leave_stage_chat_en( member, channel) if config_lang == 'en' else member_events_msg.member_leave_stage_chat_th( member, channel)) await logs_channel.send(embed=embed) return
async def on_guild_role_delete(self, role): db = Database(role.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == role.guild.id: logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] # Set language version of embed message embed = (role_events_msg.guild_role_delete_en(role) if config_lang == 'en' else role_events_msg.guild_role_delete_th(role)) await logs_channel.send(embed=embed)
async def config(self, ctx): db = Database(ctx.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? await ctx.send(embed=setup_not_found()) return info = await db.find_info() prefix = await db.find_prefix() channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] embed = config_info_en(ctx, channel, prefix) if config_lang == 'en'\ else config_info_th(ctx, channel, prefix) await ctx.send(embed=embed)
def commit_failure(self, database, diff_id): diffusion = Diffusion(diff_externalid=diff_id, fullrequestId=self.req_id, requestStatus=REQ_STATUS.failed, Date=self._to_datetime(self.date_reception), rxnotif=True) with Database.get_app().app_context(): database.session.add(diffusion) database.session.commit() LOGGER.info("Committed %s dissemination status into database.", REQ_STATUS.failed) if os.path.isfile(self.request_file): Tools.remove_file(self.request_file, "JSON request", LOGGER) return REQ_STATUS.failed
async def on_guild_channel_delete(self, channel): db = Database(channel.guild.id) if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == channel.guild.id: logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] # Set language version of embed message embed = (channel_events_msg.guild_channel_remove_en(channel) if config_lang == 'en' else channel_events_msg.guild_channel_remove_th(channel)) await logs_channel.send(embed=embed)
def insert_custom_data(verbose=False): datasources = list(CustomData.__subclasses__()) for datasource in datasources: d = datasource() if verbose: print(f'Acquiring data for {datasource.__name__}') to_insert = d.get_data() if not to_insert.empty: if verbose: print(f'Found {len(to_insert)} rows to insert.') try: CreateTable(d.table_name, to_insert, verbose=verbose) Database().insert(d.table_name, to_insert, verbose=verbose) if verbose: print('Insert successful') except: print(f'Insert for {datasource.__name__} failed.')
def try_coffee_management(ctx: Context, database: Database): if not database.is_at_least_guest(user_object=ctx.user_object): return ctx coffee_score = get_coffee_score(ctx.text) if ctx.text == TAKE_PART or coffee_score == 1: if ctx.user_object.get('username') is None: ctx.intent = 'COFFEE_NO_USERNAME' ctx.response = 'Чтобы участвовать в random coffee, нужно иметь имя пользователя в Телеграме.' \ '\nПожалуйста, создайте себе юзернейм (ТГ > настройки > изменить профиль > ' \ 'имя пользователя) и попробуйте снова.\nВ случае ошибки напишите @cointegrated.' \ '\nЕсли вы есть, будьте первыми!' return ctx ctx.the_update = {"$set": {'wants_next_coffee': True}} ctx.response = 'Окей, на следующей неделе вы будете участвовать в random coffee!' ctx.intent = 'TAKE_PART' elif ctx.text == NOT_TAKE_PART or coffee_score == -1: ctx.the_update = {"$set": {'wants_next_coffee': False}} ctx.response = 'Окей, на следующей неделе вы не будете участвовать в random coffee!' ctx.intent = 'NOT_TAKE_PART' return ctx
async def on_member_remove(self, member): db = Database(member.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == member.guild.id: # Seek Logs Channel of guild logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] # Set language version of embed message embed = (member_events_msg.member_remove_en(member) if config_lang == 'en' else member_events_msg.member_remove_th(member)) await logs_channel.send(embed=embed)
async def channel(self, ctx, channel: discord.TextChannel): db = Database(ctx.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? await ctx.send(embed=setup_not_found()) return info = await db.find_info() old_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) await db.update_channel(channel.id) await channel.send( f'**Updated**: Logker changed logs channel from {old_channel.mention} to {channel.mention}.' if info['logs_language'] == 'en' else f'**อัพเดท**: Logker ได้เปลี่ยนช่องเก็บ logs จาก {channel.mention} ไปที่ ' f'{old_channel.mention} เรียบร้อยแล้ว')
def process_fips_chunk(chunk, stats_db_file): db = Database(stats_db_file) chunk_dbfile = os.path.join(DISTRIBUTED_TWEET_STATS_PATH, 'fips_{}_{}.db'.format(chunk[0], chunk[-1])) chunk_db = Database(chunk_dbfile) stats_tb = chunk_db.create_table('statistics', TWEET_STATS_COLUMNS) chunk_db.cursor.execute('BEGIN') for fips in chunk: results = db.select( 'SELECT * FROM statistics WHERE fips={}'.format(fips)) chunk_db.insert( 'INSERT INTO {} VALUES (?, ?, ?, ?, ?)'.format(stats_tb), results, many=True) chunk_db.connection.commit() chunk_db.connection.close() db.connection.close()
async def guild_channel_role_update(self, before: discord.abc.GuildChannel, after: discord.abc.GuildChannel): db = Database(before.guild.id) if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == after.guild.id: logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info[2] if before.changed_roles != after.changed_roles: before_total_roles = [] for role in before.changed_roles: if role not in after.changed_roles: before_total_roles.append(role) after_total_roles = [] for role in after.changed_roles: if role not in before.changed_roles: after_total_roles.append(role) embed = None if len(before_total_roles) == 1: # Set language version of embed message embed = (channel_events_msg.guild_channel_role_remove_en( after, before_total_roles) if config_lang == 'en' else channel_events_msg.guild_channel_role_remove_th( after, before_total_roles)) elif len(after_total_roles) == 1: # Set language version of embed message embed = (channel_events_msg.guild_channel_role_append_en( after, after_total_roles) if config_lang == 'en' else channel_events_msg.guild_channel_role_append_th( after, after_total_roles)) await logs_channel.send(embed=embed)
async def language(self, ctx): db = Database(ctx.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? await ctx.send(embed=setup_not_found()) return info = await db.find_info() new_lang = 'en' if info['logs_language'] == 'th' else 'th' # Create a new instance of update database await db.update_language(new_lang) await ctx.send( f'**Updated**: Logker language changed from' f'**Thai** :flag_th: to **English** :flag_gb:.' if new_lang == 'en' else f'**อัพเดท**: ภาษาของ Logker ได้เปลี่ยนจาก **อังกฤษ** :flag_gb: เป็น **ไทย** :flag_th: แล้ว' )
def monitorDissemination(ctx, requestId): # modify the namespace to comply with openwis client service try: client_ip = ctx.transport.req["HTTP_X_REAL_IP"] except KeyError: client_ip = ctx.transport.req.get("REMOTE_ADDR") LOGGER.info( "Received monitorDissemination request for requestId %s from ip %s", requestId, client_ip) ctx.descriptor.out_message._type_info[ 'disseminationStatus'].Attributes.sub_ns = "" host = Notification.get_hostname(client_ip) status, message = Database.get_diss_status(requestId + host) LOGGER.info("Status for for requestId %s is %s with message %s", requestId + host, status, message) # there is a character limit for the message # so we truncate message = message[:MSG_MAX_LENGTH] diss_resp = DisseminationStatus(requestId, status, message) return diss_resp
def test_register_user(self): """ Basic Registration Test """ self.app.post("/register", data=self.getUser(), follow_redirects=True) user = User.getByName("mdrahali") user.email_confirmation = True user.change_configuration = {} user.update_record() assert Database.count_record('users', {'username': "******"}) == 1 """ Invalid Username """ invalid_user = self.getUser() invalid_user['username'] = "******" response = self.app.post("/register", data=invalid_user, follow_redirects=True) assert "Username must contain only letters numbers or underscore" in str( response.data)
async def guild_afk_channel_update(self, before: discord.Guild, after: discord.Guild): db = Database(before.id) # Create a new instance if not await db.info_exists(): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == after.id: # Seek Logs Channel of guild logs_channel = self.client.get_guild(info['guild_id']).get_channel(info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] if before.afk_channel != after.afk_channel: # Set language version of embed message embed = ( guild_events_msg.guild_afk_channel_update_en(before, after) if config_lang == 'en' else guild_events_msg.guild_afk_channel_update_th(before, after) ) await logs_channel.send(embed=embed)
def get_ticker(from_symbol, to_symbol, insert=False): '''Get most recent price for currency pair from Binance API.''' # Call Binance API ticker = Binance().ticker(symbol=from_symbol + to_symbol) date = tb.DateConvert(datetime.utcnow()).date df = pd.DataFrame(ticker, columns=['symbol', 'price'], index=[0]) df['date'] = date df = df[['date', 'symbol', 'price']] df.price = pd.to_numeric(df.price) # Insert into MySQL server if insert: t = OrderedDict(pd.Series(df.T[0]).to_dict()) for key in t: t[key] = f"'{t[key]}'" if isinstance(t[key], str) else t[key] Database().insert('ticker', t) else: return df
def get_all_tickers(insert=False): '''Get current ticker price for all currency pairs in DB. Return as DataFrame or insert into DB.''' # Get symbols and ticker data pairs = get_pairs() tickers = Binance().all_tickers() symbols = [ row[1].from_symbol + row[1].to_symbol for row in pairs.iterrows() ] date = tb.DateConvert(datetime.utcnow()).date # Filter tickers temp = [] for ticker in tickers: if ticker['symbol'] in symbols: temp.append(ticker) tickers = temp df = pd.DataFrame(tickers, index=range(len(tickers))) df['date'] = date df = df[['date', 'symbol', 'price']] if insert: df.date = df.date.astype(str) df.symbol = df.symbol.astype(str) df.price = pd.to_numeric(df.price) df = df.to_dict(orient='records') for i, ticker in enumerate(df): for key in ticker: if isinstance(ticker[key], str): ticker[key] = f"'{ticker[key]}'" Database().insert('ticker', df) else: return df
def main(self): with Database() as db: if not len(db.select_city()): CityView().main() choice = [] while True: if len(choice) < 1 or not choice[0]: select = self.search() try: choice[0] = select except IndexError: choice.append(select) if not choice[0]: SettingView().main() continue LookupView().main(choice) choice[0] = None
def __init__(self): super().__init__() self.ui = Ui_Form() self.ui.setupUi(self) db = Database(host='localhost', database='eardoor', user='******', password='******', table='records') slm = QStringListModel() self.ui.records.setModel(slm) self.updater = Updater(self.ui, db, slm) self.camera = Camera(0, self.ui.camera.width(), self.ui.camera.height()) self.recognizer = Recognizer() self.fps = 50 self.timer = QTimer(self) self.timer.timeout.connect(self.update) self.timer.start(1000 // self.fps)
async def reaction_remove(self, reaction: discord.Reaction, user: discord.Member): if user.bot: return db = Database(user.guild.id) # Create a new instance if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == user.guild.id: logs_channel = self.client.get_guild(info['guild_id']).get_channel( info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] embed = (reaction_events_msg.reaction_remove_en(reaction, user) if config_lang == 'en' else reaction_events_msg.reaction_remove_th(reaction, user)) await logs_channel.send(embed=embed)
async def guild_channel_name_update(self, before: discord.abc.GuildChannel, after: discord.abc.GuildChannel): db = Database(before.guild.id) if not await db.info_exists( ): # Check if that server has Logker setup? return info = await db.find_info() if info['guild_id'] == after.guild.id: if before.name != after.name: logs_channel = self.client.get_guild( info['guild_id']).get_channel(info['channel_id']) config_lang = 'en' if info is None else info['logs_language'] # Set language version of embed message embed = (channel_events_msg.guild_channel_name_update_en( before, after) if config_lang == 'en' else channel_events_msg.guild_channel_name_update_th( before, after)) await logs_channel.send(embed=embed)
class Boss(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.log = logging.getLogger(__name__) self.config = Config() self.database = Database(self.config) self.build_queue = Queue(1) def run(self): workers = [] for worker_location in self.config.get("workers"): worker = Worker(worker_location, "image", self.build_queue) worker.start() workers.append(worker) self.log.info("Active workers are %s", workers) while True: build_job = self.database.get_build_job() if build_job: self.log.info("Found build job %s", build_job) self.build_queue.put(build_job) else: time.sleep(10)
def contest_request(self, id_request, *args, **kwargs): db = Database() contested_request = { 'id': id_request, 'status': Request.REQUEST_STATUS.get('contested') } request = Request(**contested_request) db.update(request) new_history = { 'id_request': request.id, 'old_status': Request.REQUEST_STATUS.get('waiting_approval'), 'new_status': Request.REQUEST_STATUS.get('contested'), 'timestamp': str(datetime.datetime.now().timestamp()) } history = History(**new_history) db.add(history) return { 'request': object_to_dict(request), 'msg': 'Request contested successfully' }
def start_request(self, address_id, *args, **kwargs): db = Database() new_request = { 'id_user': self.id, 'id_address': address_id, 'status': Request.REQUEST_STATUS.get('new') } request = Request(**new_request) db.add(request) new_history = { 'id_request': request.id, 'new_status': Request.REQUEST_STATUS.get('new'), 'timestamp': str(datetime.datetime.now().timestamp()) } history = History(**new_history) db.add(history) return { 'request': object_to_dict(request), 'msg': 'Request started successfully' }
def abandon_request(self, id_request, *args, **kwargs): db = Database() attended_request = { 'id': id_request, 'id_driver': None, 'status': Request.REQUEST_STATUS.get('new') } request = Request(**attended_request) db.update(request) new_history = { 'id_request': request.id, 'old_status': Request.REQUEST_STATUS.get('ongoing'), 'new_status': Request.REQUEST_STATUS.get('new'), 'timestamp': str(datetime.datetime.now().timestamp()) } history = History(**new_history) db.add(history) return { 'request': object_to_dict(request), 'msg': 'Request abandoned successfully' }
def identify(self, bounding_box_content: numpy.ndarray, mask: numpy.ndarray=None) -> Subject: bounding_box_content, mask = self.__extract_person(bounding_box_content, mask) image_hsv = cv2.cvtColor(bounding_box_content, cv2.COLOR_BGR2HSV) hue_channel, saturation_channel, _ = cv2.split(image_hsv) hue_images = self.__split_image(hue_channel) sat_images = self.__split_image(saturation_channel) image_bgr = bounding_box_content blue_channel, green_channel, red_channel = cv2.split(image_bgr) blue_images = self.__split_image(blue_channel) green_images = self.__split_image(green_channel) red_images = self.__split_image(red_channel) image_ycrcb = cv2.cvtColor(bounding_box_content, cv2.COLOR_BGR2YCrCb) _, cr_channel, cb_channel = cv2.split(image_ycrcb) cr_images = self.__split_image(cr_channel) cb_images = self.__split_image(cb_channel) masks = self.__split_image(mask) hue_histograms = self.__calculate_histograms(masks, 19, (0, 180), hue_images) sat_histograms = self.__calculate_histograms(masks, 16, (0, 256), sat_images) blue_histograms = self.__calculate_histograms(masks, 16, (0, 256), blue_images) green_histograms = self.__calculate_histograms(masks, 16, (0, 256), green_images) red_histograms = self.__calculate_histograms(masks, 16, (0, 256), red_images) cr_histograms = self.__calculate_histograms(masks, 16, (0, 256), cr_images) cb_histograms = self.__calculate_histograms(masks, 16, (0, 256), cb_images) new_histograms = SubjectHistograms(hue_histograms, sat_histograms, blue_histograms, green_histograms, red_histograms, cr_histograms, cb_histograms) self.__new_histograms = new_histograms if self.__feature_matching_enabled: new_descriptors = self.__get_descriptors((image_bgr, mask)) else: new_descriptors = None if self.__add_to_db: if self.__subject is None: self.__subject = Subject(self.__add_to_db, new_histograms, new_descriptors) Database.add_subject(self.__subject) else: self.__subject.add_histograms(new_histograms) if self.__feature_matching_enabled: self.__subject.add_descriptors(new_descriptors) for subject in self.__subjects: overall_score = subject.check_similarities(new_histograms) if self.__feature_matching_enabled: self.__do_feature_match(subject.descriptors[0], new_descriptors) try: self.__scores[subject.name][1] += overall_score except KeyError: self.__scores[subject.name] = {} self.__scores[subject.name][0], self.__scores[subject.name][1] = subject, overall_score if self.__best_match is None or self.__scores[subject.name][1] > self.__scores[self.__best_match.name][1]: self.__best_match = subject if self.__feature_matching_enabled: ordered_scores = OrderedDict(sorted(self.__scores.items(), key=lambda t: t[1][1], reverse=True)) counter = 0 highest_feature_match = 0 for subject_with_score in ordered_scores.values(): feature_match = self.__do_feature_match(subject_with_score[0].descriptors[0], new_descriptors) print(subject_with_score[0].name, feature_match) if feature_match > highest_feature_match: self.__best_match = subject_with_score[0] highest_feature_match = feature_match if counter > 8: break counter += 1 return self.__best_match
class OpenHumansETL: def __init__(self, logger, db_connection, master_token): """ Class to initialise downloading of files from OH, convert files into lists of dictionaries, and upload to db :param logger: logging object passed from parent script :param db_connection: database connection in the form of psycopg2.connect(...) """ self.logger = logger try: self.db = Database(db_connection) self.ingester = UpsertIngester(db_connection) self.oh = OHWrapper(logger=logger, files_directory=BULK_FILES_DIRECTORY, master_token=master_token) except Psycopg2Error: logger.error( f'Error occurred while initialising classes. Breaking script.: {traceback.format_exc()}' ) sys.exit(1) os.makedirs(BULK_FILES_DIRECTORY, exist_ok=True) def upload_to_db(self, directory=BULK_FILES_DIRECTORY): """ Finds all user folders in a given directory, finds files in each folder, passes to processing function :param directory: parent directory containing user folders """ user_folders = [x for x in next(os.walk(directory))[1]] for user_id in user_folders: try: user = self.db.get_user(user_id) user_files = self.oh.get_files_by_extension( f'{directory}/{user_id}', '.json') user_sharing = self.oh.get_user_sharing_flag(user_id) if user_sharing == 3: continue for filename in user_files: entity_name = [ k for k in ENTITY_MAPPER.keys() if k in filename ][0] last_index = user[entity_name + '_last_index'] try: self.process_file_load(user_id, filename, entity_name, last_index, user_sharing) except (JSONDecodeError, TypeError): self.logger.error( f'Incorrect json format found for user with ID {user_id} and file with name {filename}. {traceback.format_exc()}' ) except IndexError: self.logger.error( f'Index out of sync for user with ID {user_id} and file with name {filename}. {traceback.format_exc()}' ) except Psycopg2Error: self.logger.error( f'Insert error while working with ID {user_id} and file with name {filename}. {traceback.format_exc()}' ) except MemoryError: self.logger.error( f'Memory maxed while working with ID {user_id} and file with name {filename}. {traceback.format_exc()}' ) except IndexError: continue except Exception: self.logger.error( f'Error while working with user {user_id}: {traceback.format_exc()}' ) continue def process_file_load(self, user_id, file, entity, slice_index, sharing_flag): """ Navigates to slice point in json file, extracts records, passes to ingest function, updates user indexes :param user_id: OH ID of user, same as folder name :param file: local file to extract records from :param entity: table entity, either treatments, entries, devicestatus or profile :param slice_index: The last line records were downloaded from in the json file :param sharing_flag: Integer representing which data commons users would like to share files with """ lines = [] with open(file) as infile: # if slice_index != 0: # for i in range(slice_index - 1): # infile.readline() break_count = 0 for json_line in infile: if break_count >= 250: break try: line = json.loads(json_line) for k in line.keys(): if '\u0000' in str(line[k]): line[k] = line[k].replace('\u0000', '') lines.append({ **{ 'user_id': user_id, 'source_entity': sharing_flag }, **line }) break_count = break_count + 1 except JSONDecodeError: self.logger.error( f'JSONDecodeError while reading file {file}, user {user_id} and the following line: {json_line}' ) continue self.ingest(lines, ENTITY_MAPPER[entity]) self.db.update_user_index(user_id, entity, slice_index + len(lines)) if entity == 'devicestatus': status_metrics = [{ **{ 'device_status_id': device['_id'] }, **device['openaps'] } for device in lines if 'openaps' in device] self.ingest(status_metrics, ENTITY_MAPPER['status_metrics']) def ingest(self, lod, lod_params): """ Uses upsert_ingester.py to upload a list of dictionaries to a given table :param lod: List of dictionaries to be inserted :param lod_params: Parameters used for inserting to db, including mapped model object and table name """ temp_list = [] for item in lod: # for each record with lod_params['object'](item) as t: # convert record to model temp_list.append( vars(t) ) # extract defined variables from model and append to upload list if temp_list: self.ingester.add_target(target_data=temp_list, output_schema='openaps', table_name=lod_params['table'], primary_keys=lod_params['primary_keys'], date_format='YYYY-MM-DD HH24:MI:SS')
import json import time import requests from flask import Flask, jsonify, request from flask_cors import CORS from flask_redis import FlaskRedis import bcrypt from utils.database import Database app = Flask(__name__, instance_relative_config=True) app.config.from_object('config') app.config.from_pyfile('config.py') mysql = Database(app=app, autocommit=True) redis = FlaskRedis(app=app) import utils CORS(app) @app.route('/') def hello_world(): return 'Hello World! This is the backend API of Reseed (https://reseed.tongyifan.me). ' \ 'If you want to use it for development, please contact with me by telegram - ' \ '<a href="https://t.me/tongyifan">@tongyifan</a>' @app.route('/upload_json', methods=['POST'])
def run(input_handler: InputHandler, camera_name: str, add_to_db: Optional[str]): Database.initialize() im0 = input_handler.get_frame(camera_name).image im_gray0 = cv2.cvtColor(im0, cv2.COLOR_BGR2GRAY) im_draw = numpy.copy(im0) tl, br = util.get_rect(im_draw) cmt = CMT(im_gray0, tl, br, estimate_rotation=False) identifier = SubjectIdentifier(add_to_db) structuring_element = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) while True: try: # Read image im = input_handler.get_frame(camera_name).image im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY) im_draw = numpy.copy(im) cmt.process_frame(im_gray) # Display results # Draw updated estimate if cmt.has_result: cropped_image = im[cmt.tl[1] : cmt.bl[1], cmt.tl[0] : cmt.tr[0]] difference = cv2.absdiff(im_gray0, im_gray) blurred = cv2.medianBlur(difference, 3) display = cv2.compare(blurred, 6, cv2.CMP_GT) eroded = cv2.erode(display, structuring_element) dilated = cv2.dilate(eroded, structuring_element) cropped_mask = dilated[cmt.tl[1] : cmt.bl[1], cmt.tl[0] : cmt.tr[0]] cropped_mask[cropped_mask == 255] = 1 horizontal_center = cropped_mask.shape[1] // 2 vertical_center = cropped_mask.shape[0] // 2 cv2.ellipse( cropped_mask, (horizontal_center, vertical_center), (horizontal_center, vertical_center), 0, 0, 360, 3, -1, ) cv2.rectangle(im_draw, cmt.tl, cmt.br, (255, 0, 0), 4) subject = identifier.identify(cropped_image, cropped_mask) cv2.putText(im_draw, subject.name, cmt.tl, cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA) util.draw_keypoints(cmt.tracked_keypoints, im_draw, (255, 255, 255)) util.draw_keypoints(cmt.votes[:, :2], im_draw) util.draw_keypoints(cmt.outliers[:, :2], im_draw, (0, 0, 255)) cv2.imshow("main", im_draw) cv2.waitKey(1) im_gray0 = im_gray except IndexError: Database.save_db() exit(0)