def get(self, job_id): if not job_id in JobsLoader.Instance().get_jobs(): return "Can't find any job config with this ID.", 404 dbHandler = LocalDbHandler( JobsLoader.Instance().build_job_data_path(job_id)) return dbHandler.list_conflict_nodes()
def get(self, job_id='', relative_path=''): """ retrieves the stat info for a given file / list the active job details :param job_id: (optional) Job Id of the file/ folder :param relative_path: (optional) relative path of the file/folder with respect to the corresponding repository(job_id) :returns a json response """ if request.path == '/stat': jobs = JobsLoader.Instance().get_jobs() json_jobs = {} for job in jobs: json_jobs.update({jobs[job].id: [jobs[job].directory, jobs[job].server, jobs[job].label, jobs[job].workspace]}) return json_jobs else: directory_path = JobsLoader.Instance().get_job(job_id).directory base_path = JobsLoader.Instance().build_job_data_path(job_id) path = os.path.join(directory_path, relative_path) #r = os.stat(path) # Get the status of the file idle/busy... by join of ajxp_index and ajxp_node_status tables db_handler = LocalDbHandler(base_path, directory_path) if Path(str(path.encode("utf-8"))).is_dir(): node_status = db_handler.get_directory_node_status("/" + relative_path) else: node_status = db_handler.get_node_status("/" + relative_path) return {"node_status": node_status}
def post(self): json_conflict = request.get_json() job_id = json_conflict['job_id'] try: job_config = JobsLoader.Instance().get_job(job_id) except Exception: return "Can't find any job config with this ID.", 404 dbHandler = LocalDbHandler( JobsLoader.Instance().build_job_data_path(job_id)) dbHandler.update_node_status(json_conflict['node_path'], json_conflict['status']) if not dbHandler.count_conflicts() and job_config.active: t = PydioScheduler.Instance().get_thread(job_id) if t: t.start_now() return json_conflict
def __init__(self, job_config, job_data_path): """ Initialize thread internals :param job_config: JobConfig instance :param job_data_path: Filesystem path where the job data are stored :return: """ threading.Thread.__init__(self) self.last_run = 0 self.configs_path = job_data_path self.job_config = job_config self.init_global_progress() self.basepath = job_config.directory self.ws_id = job_config.workspace self.sdk = PydioSdk( job_config.server, ws_id=self.ws_id, remote_folder=job_config.remote_folder, user_id=job_config.user_id, device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=job_config.trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies()) self.system = SystemSdk(job_config.directory) self.remote_seq = 0 self.local_seq = 0 self.local_target_seq = 0 self.remote_target_seq = 0 self.local_seqs = [] self.remote_seqs = [] self.db_handler = LocalDbHandler(self.configs_path, job_config.directory) self.interrupt = False self.event_timer = 2 self.online_timer = 10 self.offline_timer = 60 self.online_status = True self.job_status_running = True self.direction = job_config.direction self.event_logger = EventLogger(self.configs_path) self.processing_signals = {} self.current_tasks = [] self.event_handler = None self.watcher = None self.watcher_first_run = True # TODO: TO BE LOADED FROM CONFIG self.storage_watcher = job_config.label.startswith('LSYNC') self.marked_for_snapshot_pathes = [] dispatcher.send(signal=PUBLISH_SIGNAL, sender=self, channel='status', message='START') if job_config.direction != 'down': self.event_handler = SqlEventHandler( includes=job_config.filters['includes'], excludes=job_config.filters['excludes'], basepath=job_config.directory, job_data_path=self.configs_path) self.watcher = LocalWatcher(job_config.directory, self.configs_path, event_handler=self.event_handler) self.db_handler.check_lock_on_event_handler(self.event_handler) if os.path.exists(self.configs_path + "/sequences"): try: sequences = pickle.load( open(self.configs_path + "/sequences", "rb")) self.remote_seq = sequences['remote'] self.local_seq = sequences['local'] if self.event_handler: self.event_handler.last_seq_id = self.local_seq except Exception: # Wrong content, remove sequences file. os.unlink(self.configs_path + "/sequences") dispatcher.connect(self.handle_transfer_rate_event, signal=TRANSFER_RATE_SIGNAL, sender=self.sdk) dispatcher.connect(self.handle_transfer_callback_event, signal=TRANSFER_CALLBACK_SIGNAL, sender=self.sdk) if self.job_config.frequency == 'manual': self.job_status_running = False
def __init__(self, job_config, job_data_path): """ Initialize thread internals :param job_config: JobConfig instance :param job_data_path: Filesystem path where the job data are stored :return: """ threading.Thread.__init__(self) self.last_run = 0 self.configs_path = job_data_path self.job_config = job_config sqlite_files = [ file for file in os.listdir(self.configs_path) if file.endswith(".sqlite") ] for sqlite_file in sqlite_files: try: exists_and_correct = check_sqlite_file( os.path.join(self.configs_path, sqlite_file)) if exists_and_correct: logging.info( "Structure and Integrity of SQLite file %s is intact " % str(os.path.join(self.configs_path, sqlite_file))) except DBCorruptedException as e: logging.debug( "SQLite file %s is corrupted (Reason: %s), Deleting file and Reinitialising sync" % (str(os.path.join(self.configs_path, sqlite_file)), e.message)) os.unlink(os.path.join(self.configs_path, sqlite_file)) self.update_sequences_file(0, 0) self.init_global_progress() self.basepath = job_config.directory self.ws_id = job_config.workspace self.sdk = PydioSdk( job_config.server, ws_id=self.ws_id, remote_folder=job_config.remote_folder, user_id=job_config.user_id, device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=job_config.trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies(), timeout=job_config.timeout) self.system = SystemSdk(job_config.directory) self.remote_seq = 0 self.local_seq = 0 self.local_target_seq = 0 self.remote_target_seq = 0 self.local_seqs = [] self.remote_seqs = [] self.db_handler = LocalDbHandler(self.configs_path, job_config.directory) self.interrupt = False self.event_timer = 2 self.online_timer = job_config.online_timer self.offline_timer = 60 self.online_status = True self.job_status_running = True self.direction = job_config.direction self.event_logger = EventLogger(self.configs_path) self.processing_signals = {} self.current_tasks = [] self.event_handler = None self.watcher = None self.watcher_first_run = True # TODO: TO BE LOADED FROM CONFIG self.storage_watcher = job_config.label.startswith('LSYNC') self.wait_for_changes = False # True when no changes detected in last cycle, can be used to disable websockets self.marked_for_snapshot_pathes = [] self.processing = False # indicates whether changes are being processed dispatcher.send(signal=PUBLISH_SIGNAL, sender=self, channel='status', message='START') if job_config.direction != 'down' or ( self.job_config.direction == 'down' and self.job_config.solve != 'remote'): self.event_handler = SqlEventHandler( includes=job_config.filters['includes'], excludes=job_config.filters['excludes'], basepath=job_config.directory, job_data_path=self.configs_path) self.watcher = LocalWatcher(job_config.directory, self.configs_path, event_handler=self.event_handler) self.db_handler.check_lock_on_event_handler(self.event_handler) if os.path.exists(os.path.join(self.configs_path, "sequences")): try: with open(os.path.join(self.configs_path, "sequences"), "rb") as f: sequences = pickle.load(f) self.remote_seq = sequences['remote'] self.local_seq = sequences['local'] if self.event_handler: self.event_handler.last_seq_id = self.local_seq except Exception as e: logging.exception(e) # Wrong content, remove sequences file. os.unlink(os.path.join(self.configs_path, "sequences")) dispatcher.connect(self.handle_transfer_rate_event, signal=TRANSFER_RATE_SIGNAL, sender=self.sdk) dispatcher.connect(self.handle_transfer_callback_event, signal=TRANSFER_CALLBACK_SIGNAL, sender=self.sdk) if self.job_config.frequency == 'manual': self.job_status_running = False self.logger = EventLogger(self.configs_path)