def post(self): JobsLoader.Instance().get_jobs() json_req = request.get_json() new_job = JobConfig.object_decoder(json_req) if 'test_path' in json_req: json_req['directory'] = os.path.join( ConfigManager.Instance().get_data_path(), json_req['repoObject']['label']) return json_req elif 'compute_sizes' in json_req: dl_rate = 2 * 1024 * 1024 up_rate = 0.1 * 1024 * 1024 # COMPUTE REMOTE SIZE from pydio.sdk.remote import PydioSdk trust_ssl = False if 'trust_ssl' in json_req: trust_ssl = json_req['trust_ssl'] sdk = PydioSdk( json_req['server'], json_req['workspace'], json_req['remote_folder'], '', auth=(json_req['user'], json_req['password']), device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies()) up = [0.0] def callback(location, change, info): if change and "bytesize" in change and change[ "md5"] != "directory": up[0] += float(change["bytesize"]) sdk.changes_stream(0, callback) # COMPUTE LOCAL SIZE down = 0.0 if os.path.exists(json_req['directory']): for dirpath, dirnames, filenames in os.walk( json_req['directory']): for f in filenames: fp = os.path.join(dirpath, f) try: down += os.path.getsize(fp) except OSError: pass json_req['byte_size'] = up[0] + down json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate return json_req JobsLoader.Instance().update_job(new_job) scheduler = PydioScheduler.Instance() scheduler.reload_configs() scheduler.disable_job(new_job.id) if not 'toggle_status' in json_req: JobsLoader.Instance().clear_job_data(new_job.id) scheduler.enable_job(new_job.id) return JobConfig.encoder(new_job)
def post(self): jobs = self.loader.get_jobs() json_req = request.get_json() test_job = JobConfig.object_decoder(json_req) jobs[test_job.id] = test_job self.loader.save_jobs(jobs) jobs = self.loader.get_jobs() return JobConfig.encoder(test_job)
def post(self): JobsLoader.Instance().get_jobs() json_req = request.get_json() new_job = JobConfig.object_decoder(json_req) if 'test_path' in json_req: json_req['directory'] = os.path.join(ConfigManager.Instance().get_data_path(), json_req['repoObject']['label']) return json_req elif 'compute_sizes' in json_req: dl_rate = 2 * 1024 * 1024 up_rate = 0.1 * 1024 * 1024 # COMPUTE REMOTE SIZE from pydio.sdkremote.remote import PydioSdk trust_ssl = False if 'trust_ssl' in json_req: trust_ssl = json_req['trust_ssl'] try: _timeout = int(json_req["timeout"]) except ValueError: _timeout = 20 # default to 20 sdk = PydioSdk(json_req['server'], json_req['workspace'], json_req['remote_folder'], '', auth=(json_req['user'], json_req['password']), device_id=ConfigManager.Instance().get_device_id(), skip_ssl_verify=trust_ssl, proxies=ConfigManager.Instance().get_defined_proxies(), timeout=_timeout) up = [0.0] def callback(location, change, info): if change and "bytesize" in change and change["md5"] != "directory": try: up[0] += float(change["bytesize"]) except ValueError: pass sdk.changes_stream(0, callback) # COMPUTE LOCAL SIZE down = 0.0 if os.path.exists(json_req['directory']): for dirpath, dirnames, filenames in os.walk(json_req['directory']): for f in filenames: fp = os.path.join(dirpath, f) try: down += os.path.getsize(fp) except OSError: pass json_req['byte_size'] = up[0] + down json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate return json_req JobsLoader.Instance().update_job(new_job) scheduler = PydioScheduler.Instance() scheduler.reload_configs() scheduler.disable_job(new_job.id) if not 'toggle_status' in json_req: JobsLoader.Instance().clear_job_data(new_job.id) scheduler.enable_job(new_job.id) return JobConfig.encoder(new_job)
def get(self, job_id = None): if request.path=='/': return redirect("/res/index.html", code=302) jobs = self.loader.get_jobs() if not job_id: std_obj = [] for k in jobs: std_obj.append(JobConfig.encoder(jobs[k])) return std_obj return JobConfig.encoder(jobs[job_id])
def get(self, job_id=None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: std_obj = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k) std_obj.append(data) return std_obj data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: std_obj = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k) std_obj.append(data) return std_obj data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def save_jobs(self, jobs): self.jobs = None all_jobs = [] for k in jobs: all_jobs.append(JobConfig.encoder(jobs[k])) with open(self.config_file, "w") as fp: json.dump(all_jobs, fp, indent=2)
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: json_jobs = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k, (request.path == '/jobs-status')) json_jobs.append(data) if request.path == '/jobs-status': response = {'is_connected_to_internet': connection_helper.internet_ok, 'jobs': json_jobs} return response return json_jobs data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def get(self, job_id = None): if request.path == '/': return redirect("/res/index.html", code=302) jobs = JobsLoader.Instance().get_jobs() if not job_id: json_jobs = [] for k in jobs: data = JobConfig.encoder(jobs[k]) self.enrich_job(data, k, (request.path == '/jobs-status')) json_jobs.append(data) if request.path == '/jobs-status': response = {'is_connected_to_internet': connection_helper.internet_ok, 'jobs': json_jobs} return response return json_jobs logging.info("Requiring job %s" % job_id) data = JobConfig.encoder(jobs[job_id]) self.enrich_job(data, job_id) return data
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser('Pydio Synchronization Tool') # Pass a server configuration via arguments parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode, default='http://localhost') parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None) parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None) parser.add_argument('-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize', type=unicode, default=None) parser.add_argument('-u', '--user', help='User name', type=unicode, default=None) parser.add_argument('-p', '--password', help='Password', type=unicode, default=None) parser.add_argument('-px', '--proxy', help='Enter like http::username::password::proxyIP::proxyPort::...::check_proxy_flag ' 'By default proxy connection test happens, to avoid mention 0 or False', type=unicode, default=None) parser.add_argument('-mp', '--memory_profile', help="To Generate the memory profile :: use <<-mp True >> as argument", type=unicode, default=False) parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi') # Pass a configuration file parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations') # Pass a path to rdiff binary parser.add_argument('-i', '--rdiff', type=unicode, help='Path to rdiff executable', default=None) # Configure API access parser.add_argument('--api_user', help='Set the agent API username (instead of random)', type=unicode, default=None) parser.add_argument('--api_password', help='Set the agent API password (instead of random)', type=unicode, default=None) parser.add_argument('--api_address', help='Set the agent IP address. By default, no address means that local ' 'access only is allowed.', type=str, default=None) parser.add_argument('--api_port', help='Set the agent port. By default, will try to use 5556, and if not ' 'available will switch to another port.', type=int, default=5556) parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False) parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False) parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False) parser.add_argument('--save-cfg', action='store_true', default=True) parser.add_argument('--extract_html', help='Utils for extracting HTML strings and compiling po files to json', type=unicode, default=False) parser.add_argument('--auto-start', action='store_true') parser.add_argument('-v', '--verbose', action='count', default=1) args, _ = parser.parse_known_args(argv) jobs_root_path = Path(__file__).parent / 'data' if not jobs_root_path.exists(): jobs_root_path = Path(DEFAULT_DATA_PATH.encode(guess_filesystemencoding())) if not jobs_root_path.exists(): jobs_root_path.mkdir(parents=True) # This is a first start user_dir = unicode(get_user_home(APP_NAME)) if not os.path.exists(user_dir): try: os.mkdir(user_dir) except Exception: pass if os.path.exists(user_dir): from pydio.utils.favorites_manager import add_to_favorites add_to_favorites(user_dir, APP_NAME) setup_logging(args.verbose, jobs_root_path) if args.auto_start: import pydio.autostart pydio.autostart.setup(argv) return 0 u_jobs_root_path = str(jobs_root_path).decode(guess_filesystemencoding()) config_manager = ConfigManager.Instance(configs_path=u_jobs_root_path, data_path=DEFAULT_PARENT_PATH) jobs_loader = JobsLoader.Instance(data_path=u_jobs_root_path) config_manager.set_rdiff_path(args.rdiff) if args.proxy is not None: data = args.proxy.split('::') if len(args.proxy.split('::'))%5 in range(0, 2) else logging.error("Wrong number of parameters pased for proxy") msg = {} for i in range(len(args.proxy.split('::'))/5): msg[data[i*5]] = {"username": data[i*5+1], "password": data[i*5+2], "hostname": data[i*5+3], "port": data[i*5+4]} proxy_flag = data[-1] if len(args.proxy.split('::'))%5 == 1 else True # default true config_manager.set_user_proxy(msg, check_proxy_flag=proxy_flag) return 0 if args.server and args.directory and args.workspace: job_config = JobConfig() job_config.load_from_cliargs(args) data = {job_config.id: job_config} if args.save_cfg: logging.info("Storing config in %s", os.path.join(u_jobs_root_path, 'configs.json')) jobs_loader.save_jobs(data) else: fp = args.file if fp and fp != '.': logging.info("Loading config from %s", fp) jobs_loader.config_file = fp jobs_loader.load_config() data = jobs_loader.get_jobs() logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2)) if args.diag_imports: # nothing more to do return sys.exit(0) if args.memory_profile: from pydio.utils.pydio_profiler import LogFile sys.stdout = LogFile('stdout') if args.extract_html: from pydio.utils.i18n import PoProcessor proc = PoProcessor() if args.extract_html == 'extract': root = Path(__file__).parent count = proc.extract_all_html_strings(str(root / 'ui' / 'res' ), str(root / 'res' / 'i18n' / 'html_strings.py' )) logging.info('Wrote %i strings to html_strings.py - Now update PO files using standard tools' % count) # nothing more to do elif args.extract_html == 'compile': root = Path(__file__).parent proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'), str(root / 'ui' / 'res' / 'i18n.js')) return sys.exit(0) if args.diag_http: keys = data.keys() if args.password: smoke_tests = PydioDiagnostics( data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id, args.password) else: smoke_tests = PydioDiagnostics( data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id) rc = smoke_tests.run() if rc != 0: logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message)) return sys.exit(rc) ports_detector = PortsDetector(store_file=str(jobs_root_path / 'ports_config'), username=args.api_user, password=args.api_password, default_port=args.api_port) ports_detector.create_config_file() scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader) server = PydioApi(ports_detector.get_port(), ports_detector.get_username(), ports_detector.get_password(), external_ip=args.api_address) from pydio.job import manager manager.api_server = server try: thread.start_new_thread(server.start_server, ()) time.sleep(0.3) if not server.running: logging.error('Cannot start web server, exiting application') sys.exit(1) scheduler.start_all() except (KeyboardInterrupt, SystemExit): server.shutdown_server() sys.exit()
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser('Pydio Synchronization Tool') parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode, default='http://localhost') parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None) parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None) parser.add_argument( '-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize', type=unicode, default=None) parser.add_argument('-u', '--user', help='User name', type=unicode, default=None) parser.add_argument('-p', '--password', help='Password', type=unicode, default=None) parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi') parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations') parser.add_argument( '-z', '--zmq_port', type=int, help= 'Available port for zmq, both this port and this port +1 will be used', default=5556) parser.add_argument('-i', '--rdiff', type=unicode, help='Path to rdiff executable', default=None) parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False) parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False) parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False) parser.add_argument('--save-cfg', action='store_true', default=True) parser.add_argument( '--extract_html', help='Utils for extracting HTML strings and compiling po files to json', type=unicode, default=False) parser.add_argument('--auto-start', action='store_true') parser.add_argument('--auto_detect_port', type=bool, help='Auto detect available ports', default=False) parser.add_argument('-v', '--verbose', action='count', default=1) args, _ = parser.parse_known_args(argv) jobs_root_path = Path(__file__).parent / 'data' if not jobs_root_path.exists(): jobs_root_path = Path(DEFAULT_DATA_PATH) if not jobs_root_path.exists(): jobs_root_path.mkdir() setup_logging(args.verbose, jobs_root_path) if args.auto_start: import pydio.autostart pydio.autostart.setup(argv) return 0 jobs_loader = JobsLoader.Instance(data_path=str(jobs_root_path)) config_manager = ConfigManager.Instance(data_path=str(jobs_root_path)) config_manager.set_rdiff_path(args.rdiff) if args.server and args.directory and args.workspace: job_config = JobConfig() job_config.load_from_cliargs(args) data = {job_config.id: job_config} if args.save_cfg: logging.info("Storing config in %s", str(jobs_root_path / 'configs.json')) jobs_loader.save_jobs(data) else: fp = args.file if fp and fp != '.': logging.info("Loading config from %s", fp) jobs_loader.config_file = fp jobs_loader.load_config() data = jobs_loader.get_jobs() logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2)) if args.diag_imports: # nothing more to do return sys.exit(0) if args.extract_html: from pydio.utils.i18n import PoProcessor proc = PoProcessor() if args.extract_html == 'extract': root = Path(__file__).parent count = proc.extract_all_html_strings( str(root / 'ui' / 'res'), str(root / 'res' / 'i18n' / 'html_strings.py')) logging.info( 'Wrote %i strings to html_strings.py - Now update PO files using standard tools' % count) # nothing more to do elif args.extract_html == 'compile': root = Path(__file__).parent proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'), str(root / 'ui' / 'res' / 'i18n.js')) return sys.exit(0) if args.diag_http: keys = data.keys() if args.password: smoke_tests = PydioDiagnostics(data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id, args.password) else: smoke_tests = PydioDiagnostics(data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id) rc = smoke_tests.run() if rc != 0: logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message)) return sys.exit(rc) ports_detector = PortsDetector(args.zmq_port, args.auto_detect_port, store_file=str(jobs_root_path / 'ports_config')) ports_detector.create_config_file() scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader) server = PydioApi(ports_detector.get_open_port('flask_api')) from pydio.job import manager manager.api_server = server try: thread.start_new_thread(server.start_server, ()) time.sleep(0.3) if not server.running: logging.error('Cannot start web server, exiting application') sys.exit(1) scheduler.start_all() except (KeyboardInterrupt, SystemExit): server.shutdown_server() sys.exit()
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser('Pydio Synchronization Tool') parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode, default='http://localhost') parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None) parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None) parser.add_argument('-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize', type=unicode, default=None) parser.add_argument('-u', '--user', help='User name', type=unicode, default=None) parser.add_argument('-p', '--password', help='Password', type=unicode, default=None) parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi') parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations') parser.add_argument('-z', '--zmq_port', type=int, help='Available port for zmq, both this port and this port +1 will be used', default=5556) parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False) parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False) parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False) parser.add_argument('--save-cfg', action='store_true', default=True) parser.add_argument('--auto-start', action='store_true') parser.add_argument('--auto_detect_port', type=bool, help='Auto detect available ports', default=False) parser.add_argument('-v', '--verbose', action='count', default=1) args, _ = parser.parse_known_args(argv) jobs_root_path = Path(__file__).parent / 'data' if not jobs_root_path.exists(): jobs_root_path = Path(DEFAULT_DATA_PATH) if not jobs_root_path.exists(): jobs_root_path.mkdir() setup_logging(args.verbose, jobs_root_path) if args.auto_start: import pydio.autostart pydio.autostart.setup(argv) return 0 jobs_loader = JobsLoader.Instance(data_path=str(jobs_root_path)) ConfigManager.Instance(data_path=str(jobs_root_path)) if args.file or not argv: fp = args.file if fp and fp != '.': logging.info("Loading config from %s", fp) jobs_loader.config_file = fp jobs_loader.load_config() data = jobs_loader.get_jobs() else: job_config = JobConfig() job_config.load_from_cliargs(args) data = {job_config.id: job_config} if args.save_cfg: logging.info("Storing config in %s", str(jobs_root_path / 'configs.json')) jobs_loader.save_jobs(data) logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2)) if args.diag_imports: # nothing more to do return sys.exit(0) if args.diag_http: keys = data.keys() if args.password: smoke_tests = PydioDiagnostics( data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id, args.password) else: smoke_tests = PydioDiagnostics( data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id) rc = smoke_tests.run() if rc != 0: logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message)) return sys.exit(rc) ports_detector = PortsDetector(args.zmq_port, args.auto_detect_port, store_file=str(jobs_root_path / 'ports_config')) ports_detector.create_config_file() scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader) server = PydioApi(ports_detector.get_open_port('flask_api')) from pydio.job import manager manager.api_server = server try: thread.start_new_thread(server.start_server, ()) time.sleep(0.3) if not server.running: logging.error('Cannot start web server, exiting application') sys.exit(1) scheduler.start_all() except (KeyboardInterrupt, SystemExit): server.shutdown_server() sys.exit()