def add_person(): if not request.json: raise InvalidUsage("No data supplied") param_list = ['id', 'data'] check_param(request.json, param_list) data_list = ['diagnostic', 'locator', 'sessionId', 'timestamp'] check_param(request.json['data'], data_list) timestamp_list = ["_seconds", "_nanoseconds"] check_param(request.json['data']['timestamp'], timestamp_list) try: report = IndividualReportFactory.build(request.json) try: session.add(report) session.commit() except: session.rollback() abort(500, "Could not insert to database") except TypeError: raise InvalidUsage( "Some parameter was wrongly typed (string, int, array).") except: message = ( "Could not create Individual Report. Probably malformed json. JSON:{%s}, %s", request.json) abort(400, message)
def init(): """Initialize database""" init_db() download_geocoding_file() try: upload_geo_data() except IntegrityError: session.rollback() abort(400, "Database already initialized!") return "Initialized DB and uploaded location data"
def report(): data = request.json print(data) # if token submitted # if token valid # insert report into database # else # generate new token # insert report into database # return token if 'token' in data.keys(): submitted_token = str(data['token']) if len(submitted_token) == 7: pass else: raise InvalidUsage("Invalid token") q = session.query(Token).filter_by(token=submitted_token).first() if q: try: report = ReportFactory.build(data) try: session.add(report) session.commit() except: session.rollback() abort(500, "Could not insert into database") except TypeError: raise InvalidUsage( "Some parameter was wrongly typed (string, int, array).") if report.has_comorbid: data['report']['comorbid']['parent_id'] = report.document_id try: comorbid = ComorbidFactory.build( data['report']['comorbid']) try: session.add(comorbid) session.commit() except: session.rollback() abort(500, "Could not insert into database") except TypeError: raise InvalidUsage( "Some parameter was wrongly typed (string, int, array)." ) return make_response("", 201) else: raise InvalidUsage("Provided token doesn't exist") elif 'report' in data.keys(): generated_token = generate_token() data['token'] = generated_token token = TokenFactory.build(generated_token) try: session.add(token) session.commit() except: session.rollback() abort(500, "Could not insert a new token into database") try: report = ReportFactory.build(data) try: session.add(report) session.commit() except: session.rollback() abort(500, "Could not insert into database") except TypeError: raise InvalidUsage( "Some parameter was wrongly typed (string, int, array).") if report.has_comorbid: data['report']['comorbid']['parent_id'] = report.document_id try: comorbid = ComorbidFactory.build(data['report']['comorbid']) try: session.add(comorbid) session.commit() except: session.rollback() abort(500, "Could not insert into database") except TypeError: raise InvalidUsage( "Some parameter was wrongly typed (string, int, array).") return make_response(jsonify({"token": generated_token}), 201) else: raise InvalidUsage("Required parameters are missing")
def download_hours_frame(_from_hour: str, _to_hour: str, worker_frame_minute: int = 10): if 60 % worker_frame_minute != 0: raise Exception("wrong worker period") _from = int(datetime.strptime(_from_hour, DATE_HOUR_PARAM_FORMAT).timestamp()) _to = int(datetime.strptime(_to_hour, DATE_HOUR_PARAM_FORMAT).timestamp()) now = datetime.now() # current date and time now_str = now.strftime("%Y-%m-%d-%H-%M-%S") backup_dir = BACKUP_DOCUMENTS_PATH / ('download-time-' + now_str) if not backup_dir.exists(): backup_dir.mkdir() # iterate over hours _previous = _from _next = _previous + 60 * 60 while _next <= _to: _prev_str = datetime.fromtimestamp(_previous).strftime(DATE_HOUR_PARAM_FORMAT) _next_str = datetime.fromtimestamp(_next).strftime(DATE_HOUR_PARAM_FORMAT) print("Download from " + _prev_str + " to " + _next_str) minutes = list() # iterate over minutes _previous_minute = _previous _to_minute = _next _next_minute = _previous_minute + 60 * worker_frame_minute while _next_minute <= _to_minute: minutes.append((_previous_minute, _next_minute)) _previous_minute = _next_minute _next_minute = _previous_minute + worker_frame_minute * 60 hour_data = [] # launch pool process_count = int(60 / worker_frame_minute) with Pool(processes=process_count) as pool: res = pool.map(download_worker, minutes) for worker_results in res: if len(worker_results) > 0: for data in worker_results: hour_data.append(data) # save results to json file if len(hour_data) > 0: file_path = backup_dir / ('data-' + _prev_str + '_' + _next_str + '.json') with open(str(file_path), 'w') as outfile: json.dump(hour_data, outfile) try: # try batch commit for report_json in hour_data: report = IndividualReportFactory.build(report_json) session.add(report) session.commit() except IntegrityError: session.rollback() # duplication, then one by one print('Warn: Duplicate in batch commit') for report_json in hour_data: try: report = IndividualReportFactory.build(report_json) session.add(report) session.commit() except IntegrityError: session.rollback() print('Warn: Duplicate doc id: ' + report_json['id']) _previous = _next _next = _previous + (60 * 60)