def run(self): print("Running...") os.system('ls -i -t ' + self._cfg['dir_archive'] +'/* | cut -d\' \' -f2 | tail -n+1 | xargs rm -f') data_old = JSONAdapter.load(self._cfg['dir_in'], self._cfg['serial_file']) if data_old is not None: data_file_name_new = data_old[0] + '_' + self._cfg['serial_file'] Utils.rename_file(self._cfg['dir_in'], self._cfg['dir_archive'], \ self._cfg['serial_file'], data_file_name_new) else: data_old = [] if Utils.is_internet_up() is True: urls = self._cfg['urls_to_check'] data_new = [] data_new.insert(0, Utils.timestamp_to_string(time.time())) thread_id = 0 threads = [] display = Display(visible=0, size=(1024, 768)) display.start() for url in urls: thread_id += 1 alivechecker_thread = AliveChecker(thread_id, self._cfg, url) threads.append(alivechecker_thread) alivechecker_thread.start() # Waiting for all threads to complete for thread in threads: thread.join() display.stop() for thread in threads: data_new.append(thread.data) logging.debug('%s\n' % (thread.log)) thread.browser.quit() if len(data_new) > 0: JSONAdapter.save(data_new, self._cfg['dir_in'], self._cfg['serial_file']) data_all = [] if len(data_old) > 0: data_all.append(data_old) data_all.append(data_new) JSONAdapter.save(data_all, self._cfg['dir_out'], self._cfg['serial_file']) state = self._evaluator.run(data_all) logging.debug('Final state: %s' % (state)) if self._emailnotifiers and state != '': EmailNotifiers.notify(state) else: logging.debug('Empty data') else: logging.error('Internet is definitely down!') sys.exit(2) print("Done...")
def _walk_dir(self, dir, path_out): try: for root, dirs, files in os.walk(dir): for file in files: file.replace('$', '\$') if Utils.is_zip(root, file) or \ Utils.is_rar(root, file) or \ Utils.is_tar(root, file): self._extract_file(root, file) Utils.remove_file(root, file) else: if Utils.is_apk(root, file): Utils.rename_file(root, path_out, file) else: Utils.remove_file(root, file) except OSError, e: logging.error("Error walking dir '%s': %s" % (dir, e)) raise OSError