def post(self, state_id): logger.debug(f"POST VALUES: {self.request.body}") nels_id = int(self.get_body_argument("nelsId", default=None)) location = self.get_body_argument("selectedFiles", default=None) state = states.get(state_id) if state is None: self.send_response_404() logger.debug(f"State info for import: {state}") try: user = state['user'] tracking_id = self._register_import(user, nels_id, location) tracking_id = utils.encrypt_value(tracking_id) submit_mq_job(tracking_id, "import") self.redirect(galaxy_url) except Exception as e: logger.error(f"Error during import registation: {e}") logger.debug(f"State info for import: {state}") logger.debug(f"nels_id: {nels_id}") logger.debug(f"location: {location}") self.send_response_400()
def post(self, tracking_id): logger.debug("requeue import tracking") self.check_token() values = self.post_values() self.require_arguments(values, ['state']) state = values['state'] try: tracking_id = utils.decrypt_value(tracking_id) tracking = db.get_import_tracking(tracking_id) print(tracking) tracking['state'] = state for k in ['id', 'create_time', 'update_time']: del tracking[k] tracking[ 'log'] = f"requeue import tracker {tracking_id} and changed state to {state}" tracking_id = db.add_import_tracking(tracking) tracking_id = utils.encrypt_value(tracking_id) submit_mq_job(tracking_id, "import") self.send_response_200() except Exception as e: logger.error(f"Request import tracking error {e}") import stacktrace self.send_response_404()
def get_tos(self): session_key = self.get_session_key() user_tos = db.get_user_tos(session_key) if user_tos is None: logger.error(f"cannot find user from session-key {session_key}") return self.send_response_403() return user_tos
def submit_mq_job(tracker_id: int, type: str) -> None: payload = {'tracker_id': tracker_id, 'type': type} if mq is None: logger.error('MQ not configured, cannot send message') return mq.publish(body=json.dumps(payload))
def submit_mq_job(tracking_id: int, type: str) -> None: # Utility method for submitting jobs to the RabbitMQ payload = {'tracker_id': tracking_id, 'type': type} if mq is None: logger.error('MQ not configured, cannot send message') return mq.publish(body=json.dumps(payload))
async def get(self): logger.debug("get rabbitmq connection status") self.check_token() try: if mq.connection.is_open: mq.connection.process_data_events() alive = True message = "" else: alive = False logger.error("Connection is NOT open") message = "Connection is NOT open" except pika.exceptions.ConnectionClosed as e: logger.error(e) alive = False message = "Connection closed: " + str(e) except pika.exceptions.StreamLostError as e: logger.error(e) alive = False message = "Connection lost: " + str(e) except: logger.error(sys.exc_info()[0].args[0]) alive = False message = "Unspecific error: " + sys.exc_info()[0].args[0] if alive: return self.send_response(data={'status': alive}) else: return self.send_response_503(data={ 'status': alive, 'message': message })
async def get(self, export_id=None): logger.debug("get history download") self.check_token() # chunk size to read chunk_size = 1024 * 1024 * 1 # 1 MiB export_id = utils.decrypt_value(export_id) export = db.get_export(export_id)[0] try: dataset = db.get_dataset(export['dataset_id']) filename = utils.construct_file_path(obj_id=dataset['id'], file_dir=galaxy_file_path) logger.debug("start the download") with open(filename, 'rb') as f: while True: chunk = f.read(chunk_size) if not chunk: break try: self.write(chunk) # write the cunk to response await self.flush() # flush the current chunk to socket except iostream.StreamClosedError: # this means the client has closed the connection # so break the loop break finally: # deleting the chunk is very important because # if many clients are downloading files at the # same time, the chunks in memory will keep # increasing and will eat up the RAM del chunk time.sleep(0) except Exception as e: logger.error(e) self.send_response_400(data={'error': str(e)}) logger.debug("download completed")
def post(self, instance, state_id): # logger.debug(f"POST VALUES: {self.request.body}") nels_id = int(self.get_body_argument("nelsId", default=None)) location = self.get_body_argument("selectedFiles", default=None) if instance == instance_id: logger.debug("Direct access to state") state = states.get(state_id) else: logger.debug("Callback access to state") state = instances[instance]['api'].get_state(state_id) if state is None: self.send_response_404() logger.debug(f"State info for export: {state}") try: instance_name = instances[instance]['name'] user = state['user'] history_id = state['history_id'] tracking_id = self._register_export(instance_name, user, history_id, nels_id, location) tracking_id = utils.encrypt_value(tracking_id) submit_mq_job(tracking_id, "export") logger.info(f"Redirecting to {instances[instance]['url']}") self.redirect(instances[instance]['url']) except Exception as e: logger.error(f"Error during export registation: {e}") logger.debug(f"State info for export: {state}") logger.debug(f"nels_id: {nels_id}") logger.debug(f"location: {location}") self.send_response_400()
def run_fetch_export(tracker): logger.info(f'{tracker["id"]}: fetch export start') export_id = tracker['export_id'] tracker_id = tracker['id'] instance = tracker['instance'] outfile = "{}/{}.tgz".format(tempfile.mkdtemp(dir=tmp_dir), export_id) master_api.update_export(tracker_id, { 'tmpfile': outfile, 'state': 'fetch-running' }) try: cmd = f"curl -H 'Authorization: bearer {instances[instance]['nga_key']}' -Lo {outfile} {instances[instance]['nga_url']}/history/download/{export_id}/" logger.debug(f'{tracker["id"]}: fetch-cmd: {cmd}') run_cmd(cmd) logger.debug('{tracker["id"]}: fetch cmd done') master_api.update_export(tracker_id, { 'tmpfile': outfile, 'state': 'fetch-ok' }) submit_mq_job(tracker_id, "export") except Exception as e: master_api.update_export(tracker_id, { 'tmpfile': outfile, 'state': 'fetch-error', 'log': str(e) }) logger.error(f"{tracker['id']} fetch error: {e}") logger.info(f'{tracker["id"]}: fetch export done') return
def do_work(ch, method, properties, body): logger.debug( "Callback call::: Method %s Delivery tag: %s Message body: %s\n" % (method, properties, body)) ch.basic_ack(delivery_tag=method.delivery_tag) try: payload = json.loads(body) except Exception as e: print(e) return if "tracker_id" not in payload and "type" not in payload: logger.error(f"Invalid message {payload}") raise Exception(f"Invalid message {payload}") return tracker_id = payload['tracker_id'] type = payload['type'] if type == 'export': tracker = master_api.get_export(tracker_id) elif type == 'import': tracker = master_api.get_import(tracker_id) else: logger.error(f"Invalid type '{type}'") raise Exception(f"Invalid type '{type}'") return logger.debug(f"tracker {tracker}") state = tracker['state'] logger.debug(f"do_work tracker_id: {tracker_id} state: '{state}'") if type == 'export' and state == 'pre-queueing': run_history_export(tracker) elif type == 'export' and state == 'ok': run_fetch_export(tracker) elif type == 'export' and state == 'fetch-ok': run_push_export(tracker) elif state == 'finished': pass elif type == 'import' and state == 'pre-fetch': get_history_from_nels(tracker) elif type == 'import' and state == 'nels-transfer-ok': import_history(tracker) else: logger.error(f"Unknown state {state} for tracker_id: {tracker_id}")
def run_history_export(tracker): logger.info(f'{tracker["id"]}: history export start') instance = tracker['instance'] # print( instance ) try: info = instances[instance]['api'].get_info() if info['free_gb'] < 30: # Not enough free disk space to do this, alert sysadmin logger.error("Not enough free space for export, email admin.") master_api.update_export(tracker['id'], {'state': 'disk-space-error'}) return except Exception as e: # traceback.print_tb(e.__traceback__) logger.error(f"{tracker['id']}: Fetch info error {e}") try: galaxy_instance = GalaxyInstance(instances[instance]['url'], key=instances[instance]['api_key'], verify=certifi.where()) except Exception as e: logger.error(f"{tracker['id']}: Trigger export through bioblend: {e}") master_api.update_export(tracker['id'], {'state': 'bioblend-error'}) return try: export_id = galaxy_instance.histories.export_history( tracker['history_id'], maxwait=1, gzip=True) except Exception as e: logger.error( f"{tracker['id']}/{tracker['instance']}: bioblend trigger export {e}" ) master_api.update_export(tracker['id'], { 'state': 'bioblend-error', 'log': e['err_msg'] }) return while True: if export_id is None or export_id == '': history = instances[instance]['api'].get_history_export( history_id=tracker['history_id']) logger.debug(f"history id not found !{history}") if history is not None and history != '': master_api.update_export(tracker['id'], { "export_id": history['export_id'], 'state': 'new' }) export_id = history['export_id'] else: logger.error( f"{tracker['id']}: No history id associated with {export_id}" ) raise RuntimeError( f"{tracker['id']}: No history id associated with {export_id}" ) else: # print( f" API :: {instance['api']}" ) export = instances[instance]['api'].get_history_export( export_id=export_id) logger.debug(export) master_api.update_export(tracker['id'], { "export_id": export_id, 'state': export['state'] }) if export['state'] in ['ok', 'error']: submit_mq_job(tracker['id'], 'export') logger.info(f'{tracker["id"]}: history export done') return logger.debug("entering sleep cycle") time.sleep(sleep_time)