예제 #1
0
 def enrich_job(self, job_data, job_id):
     running = PydioScheduler.Instance().is_job_running(job_id)
     job_data['running'] = running
     logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
     last_events = logger.get_all(1, 0)
     if len(last_events):
         job_data['last_event'] = last_events.pop()
     if running:
         job_data['state'] = PydioScheduler.Instance().get_job_progress(
             job_id)
예제 #2
0
 def get(self, cmd, job_id=None):
     if job_id:
         if cmd == 'enable' or cmd == 'disable':
             job_config = JobsLoader.Instance().get_job(job_id)
             job_config.active = True if cmd == 'enable' else False
             JobsLoader.Instance().update_job(job_config)
             PydioScheduler.Instance().reload_configs()
         PydioScheduler.Instance().handle_job_signal(self, cmd, job_id)
     else:
         return PydioScheduler.Instance().handle_generic_signal(self, cmd)
     return ('success', )
예제 #3
0
 def enrich_job(self, job_data, job_id, get_notification=False):
     running = PydioScheduler.Instance().is_job_running(job_id)
     job_data['running'] = running
     logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
     if get_notification:
         notification = logger.consume_notification()
         if notification:
             job_data['notification'] = notification
     last_events = logger.get_all(1, 0)
     if len(last_events):
         job_data['last_event'] = last_events.pop()
     if running:
         job_data['state'] = PydioScheduler.Instance().get_job_progress(
             job_id)
예제 #4
0
 def delete(self, job_id):
     JobsLoader.Instance().delete_job(job_id)
     scheduler = PydioScheduler.Instance()
     scheduler.reload_configs()
     scheduler.disable_job(job_id)
     JobsLoader.Instance().clear_job_data(job_id, parent=True)
     return job_id + "deleted", 204
예제 #5
0
 def get(self, job_id):
     jobs = JobsLoader.Instance().get_jobs()
     if not job_id in jobs:
         return {"error": "Cannot find job"}
     try:
         from pydio.job.change_history import ChangeHistory
     except ImportError:
         from job.change_history import ChangeHistory
     scheduler = PydioScheduler.Instance()
     job = scheduler.control_threads[job_id]
     args = request.args
     res = ""
     if 'status' in args:
         if args['status'].upper() == 'SUCCESS':
             for failed in job.current_store.change_history.get_all_success(
             ):
                 res += failed
         elif args['status'].upper() == 'FAILED':
             for failed in job.current_store.change_history.get_all_failed(
             ):
                 res += failed
         else:
             return {
                 'error': "Unknown status: " + urllib2.quote(args['status'])
             }
     else:
         for failed in job.current_store.change_history.get_all():
             res += failed
     return res
예제 #6
0
    def post(self):
        JobsLoader.Instance().get_jobs()
        json_req = request.get_json()
        new_job = JobConfig.object_decoder(json_req)

        if 'test_path' in json_req:
            json_req['directory'] = os.path.join(
                ConfigManager.Instance().get_data_path(),
                json_req['repoObject']['label'])
            return json_req
        elif 'compute_sizes' in json_req:
            dl_rate = 2 * 1024 * 1024
            up_rate = 0.1 * 1024 * 1024
            # COMPUTE REMOTE SIZE
            from pydio.sdk.remote import PydioSdk
            trust_ssl = False
            if 'trust_ssl' in json_req:
                trust_ssl = json_req['trust_ssl']
            sdk = PydioSdk(
                json_req['server'],
                json_req['workspace'],
                json_req['remote_folder'],
                '',
                auth=(json_req['user'], json_req['password']),
                device_id=ConfigManager.Instance().get_device_id(),
                skip_ssl_verify=trust_ssl,
                proxies=ConfigManager.Instance().get_defined_proxies())
            up = [0.0]

            def callback(location, change, info):
                if change and "bytesize" in change and change[
                        "md5"] != "directory":
                    up[0] += float(change["bytesize"])

            sdk.changes_stream(0, callback)
            # COMPUTE LOCAL SIZE
            down = 0.0
            if os.path.exists(json_req['directory']):
                for dirpath, dirnames, filenames in os.walk(
                        json_req['directory']):
                    for f in filenames:
                        fp = os.path.join(dirpath, f)
                        try:
                            down += os.path.getsize(fp)
                        except OSError:
                            pass

            json_req['byte_size'] = up[0] + down
            json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate
            return json_req

        JobsLoader.Instance().update_job(new_job)
        scheduler = PydioScheduler.Instance()
        scheduler.reload_configs()
        scheduler.disable_job(new_job.id)
        if not 'toggle_status' in json_req:
            JobsLoader.Instance().clear_job_data(new_job.id)
        scheduler.enable_job(new_job.id)
        return JobConfig.encoder(new_job)
예제 #7
0
    def get(self, job_id):
        if not job_id in JobsLoader.Instance().get_jobs():
            return "Can't find any job config with this ID.", 404

        logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
        if not request.args:
            logs = logger.get_all(20, 0)
        else:
            filter = request.args.keys()[0]
            filter_parameter = request.args.get(filter)
            logs = logger.filter(filter, filter_parameter)

        tasks = PydioScheduler.Instance().get_job_progress(job_id)
        return {"logs": logs, "running": tasks}
예제 #8
0
    def post(self):
        json_conflict = request.get_json()
        job_id = json_conflict['job_id']
        try:
            job_config = JobsLoader.Instance().get_job(job_id)
        except Exception:
            return "Can't find any job config with this ID.", 404

        dbHandler = LocalDbHandler(
            JobsLoader.Instance().build_job_data_path(job_id))
        dbHandler.update_node_status(json_conflict['node_path'],
                                     json_conflict['status'])
        if not dbHandler.count_conflicts() and job_config.active:
            t = PydioScheduler.Instance().get_thread(job_id)
            if t:
                t.start_now()
        return json_conflict
예제 #9
0
파일: main.py 프로젝트: sanand0/pydio-sync
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser('Pydio Synchronization Tool')
    # Pass a server configuration via arguments
    parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode,
                        default='http://localhost')
    parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None)
    parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None)
    parser.add_argument('-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize',
                        type=unicode, default=None)
    parser.add_argument('-u', '--user', help='User name', type=unicode, default=None)
    parser.add_argument('-p', '--password', help='Password', type=unicode, default=None)
    parser.add_argument('-px', '--proxy', help='Enter like http::username::password::proxyIP::proxyPort::...::check_proxy_flag '
                        'By default proxy connection test happens, to avoid mention 0 or False', type=unicode, default=None)
    parser.add_argument('-mp', '--memory_profile', help="To Generate the memory profile :: use <<-mp True >> as argument",
                        type=unicode, default=False)
    parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi')
    # Pass a configuration file
    parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations')
    # Pass a path to rdiff binary
    parser.add_argument('-i', '--rdiff', type=unicode, help='Path to rdiff executable', default=None)
    # Configure API access
    parser.add_argument('--api_user', help='Set the agent API username (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_password', help='Set the agent API password (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_address', help='Set the agent IP address. By default, no address means that local '
                                              'access only is allowed.', type=str, default=None)
    parser.add_argument('--api_port', help='Set the agent port. By default, will try to use 5556, and if not '
                                           'available will switch to another port.', type=int, default=5556)
    parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False)
    parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False)
    parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False)
    parser.add_argument('--save-cfg', action='store_true', default=True)
    parser.add_argument('--extract_html', help='Utils for extracting HTML strings and compiling po files to json',
                        type=unicode, default=False)
    parser.add_argument('--auto-start', action='store_true')
    parser.add_argument('-v', '--verbose', action='count', default=1)
    args, _ = parser.parse_known_args(argv)

    jobs_root_path = Path(__file__).parent / 'data'
    if not jobs_root_path.exists():
        jobs_root_path = Path(DEFAULT_DATA_PATH.encode(guess_filesystemencoding()))
        if not jobs_root_path.exists():
            jobs_root_path.mkdir(parents=True)
            # This is a first start
            user_dir = unicode(get_user_home(APP_NAME))
            if not os.path.exists(user_dir):
                try:
                    os.mkdir(user_dir)
                except Exception:
                    pass
            if os.path.exists(user_dir):
                from pydio.utils.favorites_manager import add_to_favorites
                add_to_favorites(user_dir, APP_NAME)

    setup_logging(args.verbose, jobs_root_path)

    if args.auto_start:
        import pydio.autostart

        pydio.autostart.setup(argv)
        return 0

    u_jobs_root_path = str(jobs_root_path).decode(guess_filesystemencoding())
    config_manager = ConfigManager.Instance(configs_path=u_jobs_root_path, data_path=DEFAULT_PARENT_PATH)

    jobs_loader = JobsLoader.Instance(data_path=u_jobs_root_path)
    config_manager.set_rdiff_path(args.rdiff)

    if args.proxy is not None:
        data = args.proxy.split('::') if len(args.proxy.split('::'))%5 in range(0, 2) else logging.error("Wrong number of parameters pased for proxy")
        msg = {}
        for i in range(len(args.proxy.split('::'))/5):
            msg[data[i*5]] = {"username": data[i*5+1], "password": data[i*5+2], "hostname": data[i*5+3], "port": data[i*5+4]}
        proxy_flag = data[-1] if len(args.proxy.split('::'))%5 == 1 else True  # default true
        config_manager.set_user_proxy(msg, check_proxy_flag=proxy_flag)
        return 0

    if args.server and args.directory and args.workspace:
        job_config = JobConfig()
        job_config.load_from_cliargs(args)
        data = {job_config.id: job_config}
        if args.save_cfg:
            logging.info("Storing config in %s", os.path.join(u_jobs_root_path, 'configs.json'))
            jobs_loader.save_jobs(data)
    else:
        fp = args.file
        if fp and fp != '.':
            logging.info("Loading config from %s", fp)
            jobs_loader.config_file = fp
            jobs_loader.load_config()
        data = jobs_loader.get_jobs()

    logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2))

    if args.diag_imports:
        # nothing more to do
        return sys.exit(0)

    if args.memory_profile:
        from pydio.utils.pydio_profiler import LogFile
        sys.stdout = LogFile('stdout')

    if args.extract_html:
        from pydio.utils.i18n import PoProcessor
        proc = PoProcessor()
        if args.extract_html == 'extract':
            root = Path(__file__).parent
            count = proc.extract_all_html_strings(str(root / 'ui' / 'res' ), str(root / 'res' / 'i18n' / 'html_strings.py' ))
            logging.info('Wrote %i strings to html_strings.py - Now update PO files using standard tools' % count)
            # nothing more to do
        elif args.extract_html == 'compile':
            root = Path(__file__).parent
            proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'), str(root / 'ui' / 'res' / 'i18n.js'))
        return sys.exit(0)

    if args.diag_http:
        keys = data.keys()
        if args.password:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id,
                args.password)
        else:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id)
        rc = smoke_tests.run()
        if rc != 0:
            logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message))
        return sys.exit(rc)

    ports_detector = PortsDetector(store_file=str(jobs_root_path / 'ports_config'), username=args.api_user,
                                   password=args.api_password, default_port=args.api_port)
    ports_detector.create_config_file()

    scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader)
    server = PydioApi(ports_detector.get_port(), ports_detector.get_username(),
        ports_detector.get_password(), external_ip=args.api_address)
    from pydio.job import manager
    manager.api_server = server

    try:

        thread.start_new_thread(server.start_server, ())
        time.sleep(0.3)
        if not server.running:
            logging.error('Cannot start web server, exiting application')
            sys.exit(1)
        scheduler.start_all()

    except (KeyboardInterrupt, SystemExit):
        server.shutdown_server()
        sys.exit()
예제 #10
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser('Pydio Synchronization Tool')
    parser.add_argument('-s',
                        '--server',
                        help='Server URL, with http(s) and path to pydio',
                        type=unicode,
                        default='http://localhost')
    parser.add_argument('-d',
                        '--directory',
                        help='Local directory',
                        type=unicode,
                        default=None)
    parser.add_argument('-w',
                        '--workspace',
                        help='Id or Alias of workspace to synchronize',
                        type=unicode,
                        default=None)
    parser.add_argument(
        '-r',
        '--remote_folder',
        help='Path to an existing folder of the workspace to synchronize',
        type=unicode,
        default=None)
    parser.add_argument('-u',
                        '--user',
                        help='User name',
                        type=unicode,
                        default=None)
    parser.add_argument('-p',
                        '--password',
                        help='Password',
                        type=unicode,
                        default=None)
    parser.add_argument('-dir',
                        '--direction',
                        help='Synchro Direction',
                        type=str,
                        default='bi')
    parser.add_argument('-f',
                        '--file',
                        type=unicode,
                        help='Json file containing jobs configurations')
    parser.add_argument(
        '-z',
        '--zmq_port',
        type=int,
        help=
        'Available port for zmq, both this port and this port +1 will be used',
        default=5556)
    parser.add_argument('-i',
                        '--rdiff',
                        type=unicode,
                        help='Path to rdiff executable',
                        default=None)
    parser.add_argument('--diag',
                        help='Run self diagnostic',
                        action='store_true',
                        default=False)
    parser.add_argument('--diag-http',
                        help='Check server connection',
                        action='store_true',
                        default=False)
    parser.add_argument('--diag-imports',
                        help='Check imports and exit',
                        action='store_true',
                        default=False)
    parser.add_argument('--save-cfg', action='store_true', default=True)
    parser.add_argument(
        '--extract_html',
        help='Utils for extracting HTML strings and compiling po files to json',
        type=unicode,
        default=False)
    parser.add_argument('--auto-start', action='store_true')
    parser.add_argument('--auto_detect_port',
                        type=bool,
                        help='Auto detect available ports',
                        default=False)
    parser.add_argument('-v', '--verbose', action='count', default=1)
    args, _ = parser.parse_known_args(argv)

    jobs_root_path = Path(__file__).parent / 'data'
    if not jobs_root_path.exists():
        jobs_root_path = Path(DEFAULT_DATA_PATH)
        if not jobs_root_path.exists():
            jobs_root_path.mkdir()

    setup_logging(args.verbose, jobs_root_path)

    if args.auto_start:
        import pydio.autostart

        pydio.autostart.setup(argv)
        return 0

    jobs_loader = JobsLoader.Instance(data_path=str(jobs_root_path))
    config_manager = ConfigManager.Instance(data_path=str(jobs_root_path))
    config_manager.set_rdiff_path(args.rdiff)

    if args.server and args.directory and args.workspace:
        job_config = JobConfig()
        job_config.load_from_cliargs(args)
        data = {job_config.id: job_config}
        if args.save_cfg:
            logging.info("Storing config in %s",
                         str(jobs_root_path / 'configs.json'))
            jobs_loader.save_jobs(data)
    else:
        fp = args.file
        if fp and fp != '.':
            logging.info("Loading config from %s", fp)
            jobs_loader.config_file = fp
            jobs_loader.load_config()
        data = jobs_loader.get_jobs()

    logging.debug("data: %s" %
                  json.dumps(data, default=JobConfig.encoder, indent=2))

    if args.diag_imports:
        # nothing more to do
        return sys.exit(0)

    if args.extract_html:
        from pydio.utils.i18n import PoProcessor
        proc = PoProcessor()
        if args.extract_html == 'extract':
            root = Path(__file__).parent
            count = proc.extract_all_html_strings(
                str(root / 'ui' / 'res'),
                str(root / 'res' / 'i18n' / 'html_strings.py'))
            logging.info(
                'Wrote %i strings to html_strings.py - Now update PO files using standard tools'
                % count)
            # nothing more to do
        elif args.extract_html == 'compile':
            root = Path(__file__).parent
            proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'),
                            str(root / 'ui' / 'res' / 'i18n.js'))
        return sys.exit(0)

    if args.diag_http:
        keys = data.keys()
        if args.password:
            smoke_tests = PydioDiagnostics(data[keys[0]].server,
                                           data[keys[0]].workspace,
                                           data[keys[0]].remote_folder,
                                           data[keys[0]].user_id,
                                           args.password)
        else:
            smoke_tests = PydioDiagnostics(data[keys[0]].server,
                                           data[keys[0]].workspace,
                                           data[keys[0]].remote_folder,
                                           data[keys[0]].user_id)
        rc = smoke_tests.run()
        if rc != 0:
            logging.error("Diagnostics failed: %s %s" %
                          (str(rc), smoke_tests.status_message))
        return sys.exit(rc)

    ports_detector = PortsDetector(args.zmq_port,
                                   args.auto_detect_port,
                                   store_file=str(jobs_root_path /
                                                  'ports_config'))
    ports_detector.create_config_file()

    scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path,
                                        jobs_loader=jobs_loader)
    server = PydioApi(ports_detector.get_open_port('flask_api'))
    from pydio.job import manager
    manager.api_server = server

    try:

        thread.start_new_thread(server.start_server, ())
        time.sleep(0.3)
        if not server.running:
            logging.error('Cannot start web server, exiting application')
            sys.exit(1)
        scheduler.start_all()

    except (KeyboardInterrupt, SystemExit):
        server.shutdown_server()
        sys.exit()