def delete(self, using=None, keep_parents=False): self.kill(signal.SIGKILL) if not storage.delete(self.json_file): return False if not storage.delete(self.pid_file): return False return super(AbstractJobModel, self).delete(using, keep_parents)
def delete(self, using=None, keep_parents=False): if self.status not in (signal.SIGKILL, signal.SIGABRT): self.kill(signal.SIGKILL) if not storage.delete(self.json_file): return False if not storage.delete(self.pid_file): return False return super(SniffingJobModel, self).delete(using, keep_parents)
def _check_log_file(): storage.check_folder(os.path.dirname(APP_LOGFILE)) # Se il file di log pesa almeno 5 MB, lo sovrascrivo if os.path.isfile(APP_LOGFILE): try: size_file_mb = os.stat(APP_LOGFILE).st_size / 1000000.0 if size_file_mb >= 5: storage.delete(APP_LOGFILE) except FileNotFoundError: # Un altro thread o processo ha già eliminato il file return
def _launch_browser(bind_host: str): user = whoami() def browser_target(): """ The function that launch the browser """ set_owner_process(user) Log.info('Launching browser with User: '******'Web browser opened') pidfile = MultiTask.multiprocess(browser_target, asynchronous=True, cpu=1) delete(pidfile) # The pidfile is not required
def upload_file(self, tmp_file) -> str: """ :param tmp_file: the in-memory uploaded file :type tmp_file: django.core.files.uploadedfile.InMemoryUploadedFile :rtype: str """ upload_folder = join(APP_TMP, self.name) storage.check_folder(upload_folder) uploaded_filename = timestamp() + '_' + tmp_file.name uploaded_path = join(upload_folder, uploaded_filename) storage.delete(uploaded_path) with open(uploaded_path, 'wb+') as destination: for chunk in tmp_file.chunks(): destination.write(chunk) return str(uploaded_path)
def start(self, target, args, asynchronous, cpu): self.tasks = [] def task_target(*arguments): result = None if self.tasks_type == MultiTask.MULTI_PROCESSING: curr_task = multiprocessing.process.current_process() Log.info(self.tag + 'started (PID=' + str(curr_task.pid) + ')') else: curr_task = threading.current_thread() Log.info(self.tag + 'started') if target is not None: result = target(*arguments) if result is not None: Log.success("Result: " + str(result)) # Scrivo il risultato nel file Log.info('Writing result in ' + str(self.resfile)) storage.overwrite_file(str(result), self.resfile) # TODO: dump result as object with "pickle" # Termino tutti gli altri threads/processi if self.tasks_type == MultiTask.MULTI_PROCESSING: Log.info('Killing other processes') running_pids = MultiTask.get_pids_from_file(self.pidfile) for pid in running_pids: pid = int(pid) if pid == curr_task.pid: continue try: os.kill(pid, signal.SIGKILL) Log.info('Process ' + str(pid) + ' killed!') except Exception as e: Log.error(str(e)) Log.info(self.tag + 'end') else: Log.info('Ignoring other threads') # Killa se stesso pid = multiprocessing.process.current_process().pid Log.info(self.tag + 'end') os.kill(pid, signal.SIGKILL) for i in range(0, cpu): task_args = () for arg in args: Log.info('Argument type: ' + str(type(arg))) if is_listable(arg): # Divido gli elementi in 1/cpu parti p_list_len = (len(arg) / cpu) + (len(arg) % cpu) if type(arg) == dict: iterator = iter(arg.items()) task_args += ( dict(itertools.islice(iterator, int((i * p_list_len)), int((i + 1) * p_list_len))), ) else: task_args += (arg[int((i * p_list_len)):int(((i + 1) * p_list_len))],) else: task_args += (arg,) task = self.Multitask(target=task_target, args=task_args) self.tasks.append(task) if self.tasks_type == MultiTask.MULTI_PROCESSING: pids = [] signal.signal(signal.SIGCHLD, signal.SIG_IGN) # Ignore child exit status for task in self.tasks: task.start() # noinspection PyUnresolvedReferences pids.append(task.pid) storage.overwrite_file(str(pids).strip('[]'), self.pidfile) else: for task in self.tasks: task.start() if not asynchronous: # Attende la fine dell'esecuzione di tutti i tasks for task in self.tasks: task.join() Log.info('Task ' + str(task.name) + ' joined') Log.info('Reading result in ' + str(self.resfile)) # Prendo il risultato dal file res = storage.read_file(self.resfile) # TODO: load result as object with "pickle" # Elimino l'eventuale file con i pid storage.delete(self.pidfile) # Elimino il file con il risultato storage.delete(self.resfile) Log.success('MultiTask -> result: ' + str(res)) return res return None
def delete(self, using=None, keep_parents=False): if not storage.delete(self.pcap_file): return False return super(SniffingJobModel, self).delete(using, keep_parents)