def set_result(scanid, file_hash, probe, result): with session_transaction() as session: scan = Scan.load_from_ext_id(scanid, session=session) fws = scan.get_filewebs_by_sha256(file_hash) if len(fws) == 0: log.error("file %s not found in scan", file_hash) return fws_file = File.load_from_sha256(file_hash, session) fws_file.timestamp_last_scan = compat.timestamp() fws_file.update(['timestamp_last_scan'], session=session) sanitized_res = _sanitize_res(result) # update results for all files with same sha256 for fw in fws: # Update main reference results with fresh results pr = _fetch_probe_result(fw, probe) _update_ref_results(fw, fw.file, pr) fw.file.update(session=session) # fill ProbeResult with probe raw results pr.doc = sanitized_res pr.status = sanitized_res.get('status', None) s_type = sanitized_res.get('type', None) pr.type = IrmaProbeType.normalize(s_type) pr.update(session=session) probedone = [] for fw_pr in fw.probe_results: if fw_pr.doc is not None: probedone.append(fw_pr.name) log.info("scanid: %s result from %s probedone %s", scanid, probe, probedone) is_finished(scanid)
def launch_asynchronous(scanid): log.debug("scanid: %s", scanid) with session_transaction() as session: scan = Scan.load_from_ext_id(scanid, session=session) IrmaScanStatus.filter_status(scan.status, IrmaScanStatus.ready, IrmaScanStatus.ready) scan_request = _create_scan_request(scan.files_web, scan.get_probelist(), scan.mimetype_filtering) scan_request = _add_empty_results(scan.files_web, scan_request, scan, session) # Nothing to do if scan_request.nb_files == 0: scan.set_status(IrmaScanStatus.finished) session.commit() log.warning("scanid: %s finished nothing to do", scanid) return try: upload_list = list() for file in scan.files: upload_list.append(file.path) ftp_ctrl.upload_scan(scanid, upload_list) except IrmaFtpError as e: log.error("scanid: %s ftp upload error %s", scanid, str(e)) scan.set_status(IrmaScanStatus.error_ftp_upload) session.commit() return # launch new celery scan task on brain celery_brain.scan_launch(scanid, scan_request.to_dict()) scan.set_status(IrmaScanStatus.uploaded) session.commit() log.info("scanid: %s uploaded", scanid) return
def test002_transaction_error(self, m_db_session): exception = IrmaDatabaseError with self.assertRaises(exception): with module.session_transaction(): raise exception m_db_session.commit.assert_not_called() m_db_session.rollback.assert_called() m_db_session.close.assert_called()
def is_finished(scanid): with session_transaction() as session: scan = Scan.load_from_ext_id(scanid, session=session) if scan.finished() and scan.status != IrmaScanStatus.finished: scan.set_status(IrmaScanStatus.finished) session.commit() # launch flush celery task on brain log.debug("scanid: %s calling scan_flush", scan.external_id) celery_brain.scan_flush(scan.external_id)
def set_launched(scanid, scan_report_dict): """ set status launched for scan :param scanid: id returned by scan_new :param scanreport: scan details output by brain :return: None :raise: IrmaDatabaseError """ with session_transaction() as session: log.info("scanid: %s is now launched", format(scanid)) scan = Scan.load_from_ext_id(scanid, session=session) if scan.status == IrmaScanStatus.uploaded: scan.set_status(IrmaScanStatus.launched) session.commit()
def handle_output_files(scanid, parent_file_hash, probe, result): with session_transaction() as session: scan = Scan.load_from_ext_id(scanid, session=session) uploaded_files = result.get('uploaded_files', None) if uploaded_files is None or not scan.resubmit_files: log.debug("scanid: %s Nothing to resubmit or resubmit disabled", scanid) return log.info("scanid: %s appending new uploaded files %s", scanid, uploaded_files.keys()) parent_file = File.load_from_sha256(parent_file_hash, session) # filter already present file in current scan hash_uploaded = [f.sha256 for f in scan.files] new_fws = _append_new_files_to_scan(scan, uploaded_files, session) for fw in new_fws: parent_file.children.append(fw) _resubmit_files(scan, parent_file, new_fws, hash_uploaded, session)
def test001_transaction(self, m_db_session): with module.session_transaction(): pass m_db_session.commit.assert_called() m_db_session.rollback.assert_not_called() m_db_session.close.assert_called()
def remove_files(max_age_sec): with session_transaction() as session: return File.remove_old_files(max_age_sec, session)
def remove_files(max_age_sec): with session_transaction() as session: nb_deleted = File.remove_old_files(max_age_sec, session) log.debug("Max_age_sec: %s Nb_deleted: %s", max_age_sec, nb_deleted) return nb_deleted
import sys from frontend.models.sqlobjects import Tag from frontend.helpers.sessions import session_transaction from lib.common.utils import decode_utf8 if len(sys.argv) != 2: print("usage: {0} <tag_list> (comma separated)".format(sys.argv[0])) sys.exit(1) # get tag list as argument tag_list = sys.argv[1] # split comma separated list tags = tag_list.split(",") # force all tags to lowercase tags = map(lambda x: decode_utf8(x.lower()), tags) with session_transaction() as session: # get all existing tags existing_tags = Tag.query_find_all(session) existing_text = [t.text for t in existing_tags] # filter only the one needed to be created to_create_tags = filter(lambda x: x not in existing_text, tags) print u"[+] Tags already existing: {0}".format(",".join(existing_text)) for tag in to_create_tags: t = Tag(tag) print u"[+] creating Tag: {0}".format(tag) session.add(t)
def remove_files(max_age_sec): with session_transaction() as session: nb_deleted = File.remove_old_files(max_age_sec, session) log.debug("Max_age_sec: %s Nb_deleted: %s", max_age_sec) return nb_deleted