def scan_launch(scan_id): with session_transaction() as session: scan = None try: log.debug("scan: %s launching", scan_id) # Part for common action for whole scan scan = Scan.load_from_ext_id(scan_id, session) scan_request = scan_ctrl._create_scan_request( scan.files_ext, scan.get_probelist(), scan.mimetype_filtering) scan_request = scan_ctrl._add_empty_results( scan.files_ext, scan_request, scan, session) # Nothing to do if scan_request.nb_files == 0: scan.set_status(IrmaScanStatus.finished) log.warning("scan %s: finished nothing to do", scan_id) return # Part for action file_ext by file_ext file_ext_id_list = [file.external_id for file in scan.files_ext] celery.group(scan_launch_file_ext.si(file_ext_id) for file_ext_id in file_ext_id_list)() scan.set_status(IrmaScanStatus.launched) session.commit() log.info("scan %s: launched", scan_id) return except Exception as e: log.exception(e) if scan is not None: scan.set_status(IrmaScanStatus.error)
def test_transaction_error(self, m_db_session): exception = IrmaDatabaseError with self.assertRaises(exception): with module.session_transaction(): raise exception m_db_session.commit.assert_not_called() m_db_session.rollback.assert_called() m_db_session.close.assert_called()
def set_result(file_ext_id, probe, result): with session_transaction() as session: file_ext = FileExt.load_from_ext_id(file_ext_id, session=session) sanitized_res = _sanitize_res(result) file_ext.set_result(probe, sanitized_res) scan_id = file_ext.scan.external_id log.info("scan %s: file %s result from %s", scan_id, file_ext_id, probe) is_finished(scan_id)
def handle_output_files(file_ext_id, result, error_case=False): log.info("Handling output for file %s", file_ext_id) with session_transaction() as session: file_ext = FileExt.load_from_ext_id(file_ext_id, session) scan = file_ext.scan uploaded_files = result.get('uploaded_files', None) log.debug("scan %s file %s depth %s", scan.external_id, file_ext_id, file_ext.depth) if uploaded_files is None: return resubmit = scan.resubmit_files max_resubmit_level = get_max_resubmit_level() if max_resubmit_level != 0 and file_ext.depth > \ max_resubmit_level: log.warning("scan %s file %s resubmit level %s exceeded max " "level (%s)", scan.external_id, file_ext_id, file_ext.depth, max_resubmit_level ) resubmit = False if not resubmit or error_case: reason = "Error case" if error_case else "Resubmit disabled" log.debug("scan %s: %s flushing files", scan.external_id, reason) celery_brain.files_flush(list(uploaded_files.values()), scan.external_id) return log.debug("scan %s: found files %s", scan.external_id, uploaded_files) # Retrieve the DB probe_result to link it with # a new FileProbeResult in _append_new_files probe_result = file_ext.fetch_probe_result(result['name']) new_fws = _append_new_files_to_scan(scan, uploaded_files, probe_result, file_ext.depth+1) parent_file = file_ext.file for new_fw in new_fws: parent_file.children.append(new_fw) session.commit() log.debug("scan %s: %d new files to resubmit", scan.external_id, len(new_fws)) scan_request = _create_scan_request(new_fws, scan.get_probelist(), scan.mimetype_filtering) scan_request = _add_empty_results(new_fws, scan_request, scan, session) if scan_request.nb_files == 0: scan.set_status(IrmaScanStatus.finished) log.info("scan %s: nothing to do flushing files", scan.external_id) celery_brain.files_flush(list(uploaded_files.values()), scan.external_id) return for new_fw in new_fws: celery_brain.scan_launch(new_fw.external_id, new_fw.probes, scan.external_id) return
def remove_files_size(max_size): """ Delete old files from FS, nullify path value in DB :param max_size: limit of space in bytes attributed to the file system :return: number of deleted files """ with session_transaction() as session: nb_deleted = File.remove_files_max_size(max_size, session) log.debug("Max_size: %s Nb_deleted: %s", max_size, nb_deleted) return nb_deleted
def remove_files(max_age_sec): """ Delete old files from FS, nullify path value in DB :param max_age_sec: limit of file age in seconds :return: number of files deleted """ with session_transaction() as session: nb_deleted = File.remove_old_files(max_age_sec, session) log.debug("Max_age_sec: %s Nb_deleted: %s", max_age_sec, nb_deleted) return nb_deleted
def handle_output_files(file_ext_id, result, error_case=False): log.info("Handling output for file %s", file_ext_id) with session_transaction() as session: file_ext = FileExt.load_from_ext_id(file_ext_id, session) scan = file_ext.scan uploaded_files = result.get('uploaded_files', None) log.debug("scan %s file %s depth %s", scan.external_id, file_ext_id, file_ext.depth) if uploaded_files is None: return resubmit = scan.resubmit_files max_resubmit_level = get_max_resubmit_level() if max_resubmit_level != 0 and file_ext.depth > \ max_resubmit_level: log.warning( "scan %s file %s resubmit level %s exceeded max " "level (%s)", scan.external_id, file_ext_id, file_ext.depth, max_resubmit_level) resubmit = False if not resubmit or error_case: reason = "Error case" if error_case else "Resubmit disabled" log.debug("scan %s: %s flushing files", scan.external_id, reason) celery_brain.files_flush(list(uploaded_files.values()), scan.external_id) return log.debug("scan %s: found files %s", scan.external_id, uploaded_files) # Retrieve the DB probe_result to link it with # a new FileProbeResult in _append_new_files probe_result = file_ext.fetch_probe_result(result['name']) new_fws = _append_new_files_to_scan(scan, uploaded_files, probe_result, file_ext.depth + 1) parent_file = file_ext.file for new_fw in new_fws: parent_file.children.append(new_fw) session.commit() log.debug("scan %s: %d new files to resubmit", scan.external_id, len(new_fws)) scan_request = _create_scan_request(new_fws, scan.get_probelist(), scan.mimetype_filtering) scan_request = _add_empty_results(new_fws, scan_request, scan, session) if scan_request.nb_files == 0: scan.set_status(IrmaScanStatus.finished) log.info("scan %s: nothing to do flushing files", scan.external_id) celery_brain.files_flush(list(uploaded_files.values()), scan.external_id) return for new_fw in new_fws: celery_brain.scan_launch(new_fw.external_id, new_fw.probes, scan.external_id) return
def is_finished(scan_id): with session_transaction() as session: scan = Scan.load_from_ext_id(scan_id, session) log.debug("scan %s: is_finished %d/%d", scan_id, scan.probes_finished, scan.probes_total) if scan.finished() and scan.status != IrmaScanStatus.finished: # call finished hook for each files for file_ext in scan.files_ext: file_ext.hook_finished() scan.set_status(IrmaScanStatus.finished) session.commit() # launch flush celery task on brain log.debug("scan %s: calling scan_flush", scan.external_id) celery_brain.scan_flush(scan.external_id)
def scan_launch_file_ext(file_ext_id): file_ext = None with session_transaction() as session: try: file_ext = FileExt.load_from_ext_id(file_ext_id, session) scan_id = file_ext.scan.external_id log.debug("scan %s: launch scan for file_ext: %s", scan_id, file_ext_id) ftp_ctrl.upload_file(file_ext_id, file_ext.file.path) # launch new celery scan task on brain celery_brain.scan_launch(file_ext_id, file_ext.probes, scan_id) except IrmaFtpError as e: log.error("file_ext %s: ftp upload error %s", file_ext_id, str(e)) if file_ext is not None: file_ext.scan.set_status(IrmaScanStatus.error_ftp_upload) except Exception as e: log.exception(e)
def set_status(scan_id, status): log.debug("scan %s: set status %s", scan_id, status) with session_transaction() as session: scan = Scan.load_from_ext_id(scan_id, session=session) scan.set_status(status)
def test_transaction(self, m_db_session): with module.session_transaction(): pass m_db_session.commit.assert_called() m_db_session.rollback.assert_not_called() m_db_session.close.assert_called()
from frontend.models.sqlobjects import Tag from api.common.sessions import session_transaction from irma.common.utils import decode_utf8 if len(sys.argv) != 2: print("usage: {0} <tag_list> (comma separated)".format(sys.argv[0])) sys.exit(1) # get tag list as argument tag_list = sys.argv[1] # split comma separated list tags = tag_list.split(",") # force all tags to lowercase tags = map(lambda x: decode_utf8(x.lower()), tags) with session_transaction() as session: # get all existing tags existing_tags = Tag.query_find_all(session) existing_text = [t.text for t in existing_tags] # filter only the one needed to be created to_create_tags = filter(lambda x: x not in existing_text, tags) print u"[+] Tags already existing: {0}".format(",".join(existing_text)) for tag in to_create_tags: t = Tag(tag) print u"[+] creating Tag: {0}".format(tag) session.add(t)
from frontend.models.sqlobjects import Tag from api.common.sessions import session_transaction from lib.common.utils import decode_utf8 if len(sys.argv) != 2: print("usage: {0} <tag_list> (comma separated)".format(sys.argv[0])) sys.exit(1) # get tag list as argument tag_list = sys.argv[1] # split comma separated list tags = tag_list.split(",") # force all tags to lowercase tags = map(lambda x: decode_utf8(x.lower()), tags) with session_transaction() as session: # get all existing tags existing_tags = Tag.query_find_all(session) existing_text = [t.text for t in existing_tags] # filter only the one needed to be created to_create_tags = filter(lambda x: x not in existing_text, tags) print u"[+] Tags already existing: {0}".format(",".join(existing_text)) for tag in to_create_tags: t = Tag(tag) print u"[+] creating Tag: {0}".format(tag) session.add(t)