def create_sample( source_dir, fname_1, fname_2, target_dir): # combine two raw images to a legal sample for our CNN complete_fname_1 = os.path.join(source_dir, fname_1) complete_fname_2 = os.path.join(source_dir, fname_2) if not (is_image_file(complete_fname_1) and is_image_file(complete_fname_2)): return 'Not image file: ', complete_fname_1, complete_fname_2 if not if_match(fname_1, fname_2): return 'Two images are not matched: ', fname_1, fname_2 try: img_1 = Image.open(complete_fname_1) img_2 = Image.open(complete_fname_2) img_1 = cut_pics(img_1) img_2 = cut_pics(img_2) except Exception as e: logger.writeerrorlog(e) return None im = merge_pics(img_1, img_2) save_file(im, fname_2, target_dir)
def __init__(self, uri): self.uri = uri engine = create_engine(uri) try: self.conn = engine.connect() except Exception as e: self.conn = None logger.writeerrorlog(e)
def executesql(): try: uri = dict(request.json).get('uri') sql = dict(request.json).get('sql') sqlquery = sql_query.SQLQuery(uri) res = sqlquery.executesql(sql) return json_success(json.dumps(res)) except Exception as e: logger.writeerrorlog(e) return json_result(code=500, result=str(e), msg='failue')
def adddbs(): try: session = db.session dict_rep = dict(request.json) core.Database.import_from_dict(session=session, dict_rep=dict_rep) session.commit() session.close() return json_result(result=dict_rep) except Exception as e: logger.writeerrorlog(e) return json_result(code=500, result=str(e))
def deletedbs(): try: id = dict(request.json).get('id') session = db.session o = session.query(core.Database).filter_by(id=id).first() session.delete(o) session.commit() session.close() return json_result(result=None) except Exception as e: logger.writeerrorlog(e) return json_result(code=500, result=str(e), msg='failue')
def prediction(): try: if request.method == 'POST': path = request.args.get('savepath') task_id = prediction_net.execute.delay(path) return json_result(200, msg={'task_id': str(task_id)}) else: task_id = request.args.get('task_id') task = prediction_net.execute.AsyncResult(task_id) return task_progress(task) except Exception as e: logger.writeerrorlog(e) return json_result(500, msg='prediction failure!')
def download_agora(): try: if request.method == 'POST': path = request.args.get('savepath') task_id = download.delay(path) return json_result(200, msg={'task_id': str(task_id)}) else: task_id = request.args.get('task_id') task = download.AsyncResult(task_id) return task_progress(task) except Exception as e: logger.writeerrorlog(e) return json_result(500, msg='download failure!')
def executesql(self, sql): try: starttime = datetime.now() res = self.conn.execute(sql) endtime = datetime.now() tmp = { 'execute_time': (endtime - starttime).total_seconds(), 'query_result': [dict(r) for r in res.fetchall()] } # 'query_result': [list(resdata) for resdata in res.fetchall()]} return tmp except Exception as e: logger.writeerrorlog(e) return None
def get_dbs(): try: session = db.session databases = session.query(core.Database).all() res = [] for database in databases: temp = dict() temp['name'] = database.database_name temp['id'] = database.id res.append(temp) session.close() return json_result(result=res) except Exception as e: logger.writeerrorlog(e) return json_result(code=500, msg=str(e))
def import_from_dict(cls, session, dict_rep): """Import obj from a dictionary""" unique_constrains = cls._unique_constrains() filters = [] # Using these filters to check if obj already exists # Add filter for unique constraints ucs = [ and_(*[ getattr(cls, k) == dict_rep.get(k) for k in cs if dict_rep.get(k) is not None ]) for cs in unique_constrains ] filters.append(or_(*ucs)) # Check if object already exists in DB, break if more than one is found try: obj_query = session.query(cls).filter(and_(*filters)) obj = obj_query.one_or_none() except MultipleResultsFound as e: logger.writeerrorlog('Error importing {0} \n {1} \n {2}'.format( cls.__name__, str(obj_query), dict_rep)) raise e if not obj: is_new_obj = True # Create new DB object obj = cls(**dict_rep) logger.writeinfolog('Importing new {0} {1}'.format( obj.__tablename__, str(obj))) session.add(obj) else: is_new_obj = False logger.writeinfolog('Updating {0} {1}'.format( obj.__tablename__, str(obj))) # Update columns for k, v in dict_rep.items(): setattr(obj, k, v)