def get(self, job_id = None): if request.path=='/': return redirect("/res/index.html", code=302) jobs = self.loader.get_jobs() if not job_id: std_obj = [] for k in jobs: std_obj.append(JobConfig.encoder(jobs[k])) return std_obj return JobConfig.encoder(jobs[job_id])
def get(self, job_id=None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: std_obj = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k) std_obj.append(data) return std_obj data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: std_obj = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k) std_obj.append(data) return std_obj data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def save_jobs(self, jobs): self.jobs = None all_jobs = [] for k in jobs: all_jobs.append(JobConfig.encoder(jobs[k])) with open(self.config_file, "w") as fp: json.dump(all_jobs, fp, indent=2)
def post(self): JobsLoader.Instance().get_jobs() json_req = request.get_json() new_job = JobConfig.object_decoder(json_req) if 'test_path' in json_req: json_req['directory'] = os.path.join( ConfigManager.Instance().get_data_path(), json_req['repoObject']['label']) return json_req elif 'compute_sizes' in json_req: dl_rate = 2 * 1024 * 1024 up_rate = 0.1 * 1024 * 1024 # COMPUTE REMOTE SIZE from pydio.sdk.remote import PydioSdk trust_ssl = False if 'trust_ssl' in json_req: trust_ssl = json_req['trust_ssl'] sdk = PydioSdk( json_req['server'], json_req['workspace'], json_req['remote_folder'], '', auth=(json_req['user'], json_req['password']), device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies()) up = [0.0] def callback(location, change, info): if change and "bytesize" in change and change[ "md5"] != "directory": up[0] += float(change["bytesize"]) sdk.changes_stream(0, callback) # COMPUTE LOCAL SIZE down = 0.0 if os.path.exists(json_req['directory']): for dirpath, dirnames, filenames in os.walk( json_req['directory']): for f in filenames: fp = os.path.join(dirpath, f) try: down += os.path.getsize(fp) except OSError: pass json_req['byte_size'] = up[0] + down json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate return json_req JobsLoader.Instance().update_job(new_job) scheduler = PydioScheduler.Instance() scheduler.reload_configs() scheduler.disable_job(new_job.id) if not 'toggle_status' in json_req: JobsLoader.Instance().clear_job_data(new_job.id) scheduler.enable_job(new_job.id) return JobConfig.encoder(new_job)
def post(self): jobs = self.loader.get_jobs() json_req = request.get_json() test_job = JobConfig.object_decoder(json_req) jobs[test_job.id] = test_job self.loader.save_jobs(jobs) jobs = self.loader.get_jobs() return JobConfig.encoder(test_job)
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: json_jobs = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k, (request.path == '/jobs-status')) json_jobs.append(data) if request.path == '/jobs-status': response = {'is_connected_to_internet': connection_helper.internet_ok, 'jobs': json_jobs} return response return json_jobs data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: json_jobs = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k, (request.path == '/jobs-status')) json_jobs.append(data) if request.path == '/jobs-status': response = {'is_connected_to_internet': connection_helper.internet_ok, 'jobs': json_jobs} return response return json_jobs logging.info("Requiring job %s" % job_id) data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def post(self): JobsLoader.Instance().get_jobs() json_req = request.get_json() new_job = JobConfig.object_decoder(json_req) if 'test_path' in json_req: json_req['directory'] = os.path.join(ConfigManager.Instance().get_data_path(), json_req['repoObject']['label']) return json_req elif 'compute_sizes' in json_req: dl_rate = 2 * 1024 * 1024 up_rate = 0.1 * 1024 * 1024 # COMPUTE REMOTE SIZE from pydio.sdkremote.remote import PydioSdk trust_ssl = False if 'trust_ssl' in json_req: trust_ssl = json_req['trust_ssl'] try: _timeout = int(json_req["timeout"]) except ValueError: _timeout = 20 # default to 20 sdk = PydioSdk(json_req['server'], json_req['workspace'], json_req['remote_folder'], '', auth=(json_req['user'], json_req['password']), device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies(), timeout=_timeout) up = [0.0] def callback(location, change, info): if change and "bytesize" in change and change["md5"] != "directory": try: up[0] += float(change["bytesize"]) except ValueError: pass sdk.changes_stream(0, callback) # COMPUTE LOCAL SIZE down = 0.0 if os.path.exists(json_req['directory']): for dirpath, dirnames, filenames in os.walk(json_req['directory']): for f in filenames: fp = os.path.join(dirpath, f) try: down += os.path.getsize(fp) except OSError: pass json_req['byte_size'] = up[0] + down json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate return json_req JobsLoader.Instance().update_job(new_job) scheduler = PydioScheduler.Instance() scheduler.reload_configs() scheduler.disable_job(new_job.id) if not 'toggle_status' in json_req: JobsLoader.Instance().clear_job_data(new_job.id) scheduler.enable_job(new_job.id) return JobConfig.encoder(new_job)