Пример #1
0
 def delete(self, job_id):
     JobsLoader.Instance().delete_job(job_id)
     scheduler = PydioScheduler.Instance()
     scheduler.reload_configs()
     scheduler.disable_job(job_id)
     JobsLoader.Instance().clear_job_data(job_id, parent=True)
     return job_id + "deleted", 204
Пример #2
0
    def get(self, job_id='', relative_path=''):
        """
        retrieves the stat info for a given file / list the active job details
        :param job_id: (optional) Job Id of the file/ folder
        :param relative_path: (optional) relative path of the file/folder with respect
               to the corresponding repository(job_id)
        :returns a json response
        """
        if request.path == '/stat':
            jobs = JobsLoader.Instance().get_jobs()
            json_jobs = {}
            for job in jobs:
                json_jobs.update({jobs[job].id: [jobs[job].directory, jobs[job].server, jobs[job].label, jobs[job].workspace]})
            return json_jobs
        else:

            directory_path = JobsLoader.Instance().get_job(job_id).directory
            base_path = JobsLoader.Instance().build_job_data_path(job_id)
            path = os.path.join(directory_path, relative_path)

            #r = os.stat(path)

            # Get the status of the file idle/busy... by join of ajxp_index and ajxp_node_status tables
            db_handler = LocalDbHandler(base_path, directory_path)
            if Path(str(path.encode("utf-8"))).is_dir():
                node_status = db_handler.get_directory_node_status("/" + relative_path)
            else:
                node_status = db_handler.get_node_status("/" + relative_path)

            return {"node_status": node_status}
Пример #3
0
    def get(self, job_id):
        if not job_id in JobsLoader.Instance().get_jobs():
            return "Can't find any job config with this ID.", 404

        dbHandler = LocalDbHandler(
            JobsLoader.Instance().build_job_data_path(job_id))
        return dbHandler.list_conflict_nodes()
Пример #4
0
    def post(self):
        JobsLoader.Instance().get_jobs()
        json_req = request.get_json()
        new_job = JobConfig.object_decoder(json_req)

        if 'test_path' in json_req:
            json_req['directory'] = os.path.join(
                ConfigManager.Instance().get_data_path(),
                json_req['repoObject']['label'])
            return json_req
        elif 'compute_sizes' in json_req:
            dl_rate = 2 * 1024 * 1024
            up_rate = 0.1 * 1024 * 1024
            # COMPUTE REMOTE SIZE
            from pydio.sdk.remote import PydioSdk
            trust_ssl = False
            if 'trust_ssl' in json_req:
                trust_ssl = json_req['trust_ssl']
            sdk = PydioSdk(
                json_req['server'],
                json_req['workspace'],
                json_req['remote_folder'],
                '',
                auth=(json_req['user'], json_req['password']),
                device_id=ConfigManager.Instance().get_device_id(),
                skip_ssl_verify=trust_ssl,
                proxies=ConfigManager.Instance().get_defined_proxies())
            up = [0.0]

            def callback(location, change, info):
                if change and "bytesize" in change and change[
                        "md5"] != "directory":
                    up[0] += float(change["bytesize"])

            sdk.changes_stream(0, callback)
            # COMPUTE LOCAL SIZE
            down = 0.0
            if os.path.exists(json_req['directory']):
                for dirpath, dirnames, filenames in os.walk(
                        json_req['directory']):
                    for f in filenames:
                        fp = os.path.join(dirpath, f)
                        try:
                            down += os.path.getsize(fp)
                        except OSError:
                            pass

            json_req['byte_size'] = up[0] + down
            json_req['eta'] = up[0] * 8 / dl_rate + down * 8 / up_rate
            return json_req

        JobsLoader.Instance().update_job(new_job)
        scheduler = PydioScheduler.Instance()
        scheduler.reload_configs()
        scheduler.disable_job(new_job.id)
        if not 'toggle_status' in json_req:
            JobsLoader.Instance().clear_job_data(new_job.id)
        scheduler.enable_job(new_job.id)
        return JobConfig.encoder(new_job)
Пример #5
0
 def get(self, cmd, job_id=None):
     if job_id:
         if cmd == 'enable' or cmd == 'disable':
             job_config = JobsLoader.Instance().get_job(job_id)
             job_config.active = True if cmd == 'enable' else False
             JobsLoader.Instance().update_job(job_config)
             PydioScheduler.Instance().reload_configs()
         PydioScheduler.Instance().handle_job_signal(self, cmd, job_id)
     else:
         return PydioScheduler.Instance().handle_generic_signal(self, cmd)
     return ('success', )
Пример #6
0
    def get(self, job_id):
        if not job_id in JobsLoader.Instance().get_jobs():
            return "Can't find any job config with this ID.", 404

        logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
        if not request.args:
            logs = logger.get_all(20, 0)
        else:
            filter = request.args.keys()[0]
            filter_parameter = request.args.get(filter)
            logs = logger.filter(filter, filter_parameter)

        tasks = PydioScheduler.Instance().get_job_progress(job_id)
        return {"logs": logs, "running": tasks}
Пример #7
0
    def get(self, job_id):
        if job_id != 'request':
            jobs = JobsLoader.Instance().get_jobs()
            if not job_id in jobs:
                return {"error": "Cannot find job"}
            job = jobs[job_id]
            url = job.server + '/api/' + job.workspace + '/ls/?options=d&recursive=true'
            auth = (job.user_id, keyring.get_password(job.server, job.user_id))
            verify = not job.trust_ssl
        else:
            args = request.args
            base = args['url'].rstrip('/')
            verify = False if args['trust_ssl'] == 'true' else True
            url = base + '/api/' + args[
                'ws'] + '/ls/?options=d&recursive=true&max_depth=2'
            if 'password' in args:
                auth = (args['user'], args['password'])
            else:
                auth = (args['user'], keyring.get_password(base, args['user']))

        if verify and "REQUESTS_CA_BUNDLE" in os.environ:
            verify = os.environ["REQUESTS_CA_BUNDLE"]
        resp = requests.get(url, stream=True, auth=auth, verify=verify)
        o = xmltodict.parse(resp.content)
        if not 'tree' in o or 'message' in o['tree']:
            return [{'error': 'Cannot load workspace'}]
        if not 'tree' in o['tree']:
            return []
        if isinstance(o['tree']['tree'], types.DictType):
            return [o['tree']['tree']]
        return o['tree']['tree']
Пример #8
0
 def get(self, job_id):
     jobs = JobsLoader.Instance().get_jobs()
     if not job_id in jobs:
         return {"error": "Cannot find job"}
     try:
         from pydio.job.change_history import ChangeHistory
     except ImportError:
         from job.change_history import ChangeHistory
     scheduler = PydioScheduler.Instance()
     job = scheduler.control_threads[job_id]
     args = request.args
     res = ""
     if 'status' in args:
         if args['status'].upper() == 'SUCCESS':
             for failed in job.current_store.change_history.get_all_success(
             ):
                 res += failed
         elif args['status'].upper() == 'FAILED':
             for failed in job.current_store.change_history.get_all_failed(
             ):
                 res += failed
         else:
             return {
                 'error': "Unknown status: " + urllib2.quote(args['status'])
             }
     else:
         for failed in job.current_store.change_history.get_all():
             res += failed
     return res
Пример #9
0
    def start_watcher(self):
        if self.watcher:
            if self.watcher_first_run:

                def status_callback(status):
                    self.logger.log_state(status, 'sync')

                self.init_global_progress()

                try:
                    self.global_progress['status_indexing'] = 1
                    self.logger.log_state(
                        _('Checking changes since last launch...'), "sync")
                    very_first = True
                    self.db_handler.update_bulk_node_status_as_idle()
                    self.watcher.check_from_snapshot(
                        state_callback=status_callback)
                except DBCorruptedException as e:
                    self.stop()
                    JobsLoader.Instance().clear_job_data(self.job_config.id)
                    logging.error(e)
                    return
                except Exception as e:
                    logging.exception(e)
                    self.interrupt = True
                    self.logger.log_state(
                        _('Oops, error while indexing the local folder. Pausing the task.'
                          ), 'error')
                    logging.error(e)

                self.watcher_first_run = False
            self.watcher.start()
Пример #10
0
    def get(self, job_id):
        if job_id != 'request':
            jobs = JobsLoader.Instance().get_jobs()
            if not job_id in jobs:
                return {"error": "Cannot find job"}
            job = jobs[job_id]

            url = job.server + '/api/pydio/state/user/repositories?format=json'
            auth = (job.user_id, keyring.get_password(job.server, job.user_id))
            verify = not job.trust_ssl
        else:
            args = request.args
            base = args['url'].rstrip('/')
            verify = False if args['trust_ssl'] == 'true' else True
            url = base + '/api/pydio/state/user/repositories?format=json'
            if 'password' in args:
                auth = (args['user'], args['password'])
            else:
                auth = (args['user'], keyring.get_password(base, args['user']))

        if verify and "REQUESTS_CA_BUNDLE" in os.environ:
            verify = os.environ["REQUESTS_CA_BUNDLE"]
        resp = requests.get(url, stream=True, auth=auth, verify=verify)
        data = json.loads(resp.content)
        if 'repositories' in data and 'repo' in data['repositories']:
            if isinstance(data['repositories']['repo'], types.DictType):
                data['repositories']['repo'] = [data['repositories']['repo']]
            data['repositories']['repo'] = filter(
                lambda x: not x['@access_type'].startswith('ajxp_'),
                data['repositories']['repo'])

        return data
Пример #11
0
    def post(self):
        json_conflict = request.get_json()
        job_id = json_conflict['job_id']
        try:
            job_config = JobsLoader.Instance().get_job(job_id)
        except Exception:
            return "Can't find any job config with this ID.", 404

        dbHandler = LocalDbHandler(
            JobsLoader.Instance().build_job_data_path(job_id))
        dbHandler.update_node_status(json_conflict['node_path'],
                                     json_conflict['status'])
        if not dbHandler.count_conflicts() and job_config.active:
            t = PydioScheduler.Instance().get_thread(job_id)
            if t:
                t.start_now()
        return json_conflict
Пример #12
0
 def enrich_job(self, job_data, job_id, get_notification=False):
     running = PydioScheduler.Instance().is_job_running(job_id)
     job_data['running'] = running
     logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
     if get_notification:
         notification = logger.consume_notification()
         if notification:
             job_data['notification'] = notification
     last_events = logger.get_all(1, 0)
     if len(last_events):
         job_data['last_event'] = last_events.pop()
     if running:
         job_data['state'] = PydioScheduler.Instance().get_job_progress(job_id)
     try:
         job_data['last_action'] = EventLogger(JobsLoader.Instance().build_job_data_path(job_id)).get_last_action()[-1][-1]
     except IndexError:
         pass
Пример #13
0
 def enrich_job(self, job_data, job_id):
     running = PydioScheduler.Instance().is_job_running(job_id)
     job_data['running'] = running
     logger = EventLogger(JobsLoader.Instance().build_job_data_path(job_id))
     last_events = logger.get_all(1, 0)
     if len(last_events):
         job_data['last_event'] = last_events.pop()
     if running:
         job_data['state'] = PydioScheduler.Instance().get_job_progress(
             job_id)
Пример #14
0
    def get(self, job_id):
            """
            retrieves the stat info for a given file / list the active job details
            :param job_id: (optional) Job Id of the file/ folder that needs to be shared
            :returns a json response
                        on success: returns a shared link
            """
            args = request.args
            jobs = JobsLoader.Instance().get_jobs()
            if not job_id in jobs:
                return {"error": "Cannot find job"}
            job = jobs[job_id]

            from pydio.sdk.remote import PydioSdk
            remote_instance = PydioSdk(job.server, job.workspace, job.remote_folder, job.user_id,
                           auth="",
                           device_id=ConfigManager.Instance().get_device_id(),
                           skip_ssl_verify=job.trust_ssl,
                           proxies=ConfigManager.Instance().get_defined_proxies(),
                           timeout=job.timeout)

            if args['action'] == 'share':
                relative_path = os.path.normpath(job.remote_folder + "/" + args["relative_path"]).replace('\\', '/')
                # Check if the shared link is already present
                check_res = remote_instance.check_share_link(
                    relative_path
                )

                if len(check_res) > 2:  # when share link doesn't exists content length will be zero for file and 2 for folder
                    res = json.loads(check_res)
                    if "minisite" in res and res["minisite"]["public"]:
                        return {"link": res["minisite"]["public_link"], "existingLinkFlag": "true"}
                    elif "repositoryId" in res:
                        return {"link": _("The folder is already shared as a workspace!"), "existingLinkFlag": "true"}

                elif args["checkExistingLinkFlag"]:
                    return {"existingLinkFlag": "false"}

                res = remote_instance.share(
                    args["ws_label"],
                    args["ws_description"] if "ws_description" in args else "",
                    args["password"] if "password" in args else "",
                    args["expiration"] if "expiration" in args else 0,
                    args["downloads"] if "downloads" in args else 0,
                    args["can_read"] if "can_read" in args else "true",
                    args["can_download"] if "can_download" in args else "true",
                    relative_path,
                    args["link_handler"] if "link_handler" in args else "",
                    args["can_write"] if "can_write" in args else "false"
                )
                return {"link": res}
            else:
                res = remote_instance.unshare(job.remote_folder + "/" + args["path"])
                return {"response": res, "existingLinkFlag": "false"}
Пример #15
0
    def __init__(self, server_port, user, password, external_ip=None):
        logging.info('-----------------------------------------------')
        if external_ip:
            logging.info('Starting agent on http://' + external_ip + ':' +
                         str(server_port) + '/')
            logging.info('Warning, this agent UI is world accessible!')
        else:
            logging.info('Starting agent locally on http://localhost:' +
                         str(server_port) + '/')
        logging.info('------------------------------------------------')

        self.user_data_path = JobsLoader.Instance().data_path
        self.port = server_port
        self.external_ip = external_ip
        authDB.add_user(user, password)
        self.running = False
        if getattr(sys, 'frozen', False):
            self.real_static_folder = Path(sys._MEIPASS) / 'ui' / 'res'
            static_folder = str(self.real_static_folder)
        else:
            self.real_static_folder = Path(__file__).parent / 'res'
            static_folder = 'res'

        logging.debug('Starting Flask server with following static folder : ' +
                      static_folder)
        self.app = Flask(__name__,
                         static_folder=static_folder,
                         static_url_path='/res')
        self.app.logger.setLevel(logging.ERROR)
        l = logging.getLogger("werkzeug")
        if l:
            l.setLevel(logging.ERROR)
        super(PydioApi, self).__init__(self.app)
        self.add_resource(JobManager, '/', '/jobs', '/jobs/<string:job_id>',
                          '/jobs-status')
        self.add_resource(WorkspacesManager, '/ws/<string:job_id>')
        self.add_resource(FoldersManager, '/folders/<string:job_id>')
        self.add_resource(LogManager, '/jobs/<string:job_id>/logs')
        self.add_resource(ConflictsManager, '/jobs/<string:job_id>/conflicts',
                          '/jobs/conflicts')
        self.add_resource(CmdManager, '/cmd/<string:cmd>/<string:job_id>',
                          '/cmd/<string:cmd>')
        self.add_resource(ProxyManager, '/proxy')
        self.app.add_url_rule('/res/i18n.js', 'i18n', self.serve_i18n_file)
        self.app.add_url_rule('/res/config.js', 'config',
                              self.server_js_config)
        self.app.add_url_rule('/res/dynamic.css', 'dynamic_css',
                              self.serve_dynamic_css)
        self.app.add_url_rule('/res/about.html', 'dynamic_about',
                              self.serve_about_content)
        if EndpointResolver:
            self.add_resource(ResolverManager, '/resolve/<string:client_id>')
            self.app.add_url_rule('/res/dynamic.png', 'dynamic_png',
                                  self.serve_dynamic_image)
Пример #16
0
 def get(self, job_id=None):
     if request.path == '/':
         return redirect("/res/index.html", code=302)
     jobs = JobsLoader.Instance().get_jobs()
     if not job_id:
         std_obj = []
         for k in jobs:
             data = JobConfig.encoder(jobs[k])
             self.enrich_job(data, k)
             std_obj.append(data)
         return std_obj
     data = JobConfig.encoder(jobs[job_id])
     self.enrich_job(data, job_id)
     return data
Пример #17
0
 def get(self, job_id = None):
     if request.path == '/':
         return redirect("/res/index.html", code=302)
     jobs = JobsLoader.Instance().get_jobs()
     if not job_id:
         json_jobs = []
         for k in jobs:
             data = JobConfig.encoder(jobs[k])
             self.enrich_job(data, k, (request.path == '/jobs-status'))
             json_jobs.append(data)
         if request.path == '/jobs-status':
             response = {'is_connected_to_internet': connection_helper.internet_ok, 'jobs': json_jobs}
             return response
         return json_jobs
     logging.info("Requiring job %s" % job_id)
     data = JobConfig.encoder(jobs[job_id])
     self.enrich_job(data, job_id)
     return data
Пример #18
0
 def check_sync(self, job_id=None):
     # load conf
     conf = JobsLoader.Instance()
     jobs = conf.jobs
     if job_id is None:
         return Response(str(jobs.keys()), status=200, mimetype="text")
     if job_id not in jobs:
         return Response("Unknown job", status=400, mimetype="text")
     # check job exists
     job = jobs[job_id]
     sdk = PydioSdk(job.server,
                    ws_id=job.workspace,
                    remote_folder=job.remote_folder,
                    user_id=job.user_id,
                    device_id=ConfigManager.Instance().get_device_id(),
                    skip_ssl_verify=job.trust_ssl,
                    proxies=ConfigManager.Instance().get_defined_proxies(),
                    timeout=380)
     checker = SyncChecker(job_id, jobs, sdk)
     resp = checker.dofullcheck()
     return Response(json.dumps(resp), status=200, mimetype="text/json")
Пример #19
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser('Pydio Synchronization Tool')
    parser.add_argument('-s',
                        '--server',
                        help='Server URL, with http(s) and path to pydio',
                        type=unicode,
                        default='http://localhost')
    parser.add_argument('-d',
                        '--directory',
                        help='Local directory',
                        type=unicode,
                        default=None)
    parser.add_argument('-w',
                        '--workspace',
                        help='Id or Alias of workspace to synchronize',
                        type=unicode,
                        default=None)
    parser.add_argument(
        '-r',
        '--remote_folder',
        help='Path to an existing folder of the workspace to synchronize',
        type=unicode,
        default=None)
    parser.add_argument('-u',
                        '--user',
                        help='User name',
                        type=unicode,
                        default=None)
    parser.add_argument('-p',
                        '--password',
                        help='Password',
                        type=unicode,
                        default=None)
    parser.add_argument('-dir',
                        '--direction',
                        help='Synchro Direction',
                        type=str,
                        default='bi')
    parser.add_argument('-f',
                        '--file',
                        type=unicode,
                        help='Json file containing jobs configurations')
    parser.add_argument(
        '-z',
        '--zmq_port',
        type=int,
        help=
        'Available port for zmq, both this port and this port +1 will be used',
        default=5556)
    parser.add_argument('-i',
                        '--rdiff',
                        type=unicode,
                        help='Path to rdiff executable',
                        default=None)
    parser.add_argument('--diag',
                        help='Run self diagnostic',
                        action='store_true',
                        default=False)
    parser.add_argument('--diag-http',
                        help='Check server connection',
                        action='store_true',
                        default=False)
    parser.add_argument('--diag-imports',
                        help='Check imports and exit',
                        action='store_true',
                        default=False)
    parser.add_argument('--save-cfg', action='store_true', default=True)
    parser.add_argument(
        '--extract_html',
        help='Utils for extracting HTML strings and compiling po files to json',
        type=unicode,
        default=False)
    parser.add_argument('--auto-start', action='store_true')
    parser.add_argument('--auto_detect_port',
                        type=bool,
                        help='Auto detect available ports',
                        default=False)
    parser.add_argument('-v', '--verbose', action='count', default=1)
    args, _ = parser.parse_known_args(argv)

    jobs_root_path = Path(__file__).parent / 'data'
    if not jobs_root_path.exists():
        jobs_root_path = Path(DEFAULT_DATA_PATH)
        if not jobs_root_path.exists():
            jobs_root_path.mkdir()

    setup_logging(args.verbose, jobs_root_path)

    if args.auto_start:
        import pydio.autostart

        pydio.autostart.setup(argv)
        return 0

    jobs_loader = JobsLoader.Instance(data_path=str(jobs_root_path))
    config_manager = ConfigManager.Instance(data_path=str(jobs_root_path))
    config_manager.set_rdiff_path(args.rdiff)

    if args.server and args.directory and args.workspace:
        job_config = JobConfig()
        job_config.load_from_cliargs(args)
        data = {job_config.id: job_config}
        if args.save_cfg:
            logging.info("Storing config in %s",
                         str(jobs_root_path / 'configs.json'))
            jobs_loader.save_jobs(data)
    else:
        fp = args.file
        if fp and fp != '.':
            logging.info("Loading config from %s", fp)
            jobs_loader.config_file = fp
            jobs_loader.load_config()
        data = jobs_loader.get_jobs()

    logging.debug("data: %s" %
                  json.dumps(data, default=JobConfig.encoder, indent=2))

    if args.diag_imports:
        # nothing more to do
        return sys.exit(0)

    if args.extract_html:
        from pydio.utils.i18n import PoProcessor
        proc = PoProcessor()
        if args.extract_html == 'extract':
            root = Path(__file__).parent
            count = proc.extract_all_html_strings(
                str(root / 'ui' / 'res'),
                str(root / 'res' / 'i18n' / 'html_strings.py'))
            logging.info(
                'Wrote %i strings to html_strings.py - Now update PO files using standard tools'
                % count)
            # nothing more to do
        elif args.extract_html == 'compile':
            root = Path(__file__).parent
            proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'),
                            str(root / 'ui' / 'res' / 'i18n.js'))
        return sys.exit(0)

    if args.diag_http:
        keys = data.keys()
        if args.password:
            smoke_tests = PydioDiagnostics(data[keys[0]].server,
                                           data[keys[0]].workspace,
                                           data[keys[0]].remote_folder,
                                           data[keys[0]].user_id,
                                           args.password)
        else:
            smoke_tests = PydioDiagnostics(data[keys[0]].server,
                                           data[keys[0]].workspace,
                                           data[keys[0]].remote_folder,
                                           data[keys[0]].user_id)
        rc = smoke_tests.run()
        if rc != 0:
            logging.error("Diagnostics failed: %s %s" %
                          (str(rc), smoke_tests.status_message))
        return sys.exit(rc)

    ports_detector = PortsDetector(args.zmq_port,
                                   args.auto_detect_port,
                                   store_file=str(jobs_root_path /
                                                  'ports_config'))
    ports_detector.create_config_file()

    scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path,
                                        jobs_loader=jobs_loader)
    server = PydioApi(ports_detector.get_open_port('flask_api'))
    from pydio.job import manager
    manager.api_server = server

    try:

        thread.start_new_thread(server.start_server, ())
        time.sleep(0.3)
        if not server.running:
            logging.error('Cannot start web server, exiting application')
            sys.exit(1)
        scheduler.start_all()

    except (KeyboardInterrupt, SystemExit):
        server.shutdown_server()
        sys.exit()
Пример #20
0
    def get(self, job_id):
        if job_id != 'request':
            jobs = JobsLoader.Instance().get_jobs()
            if not job_id in jobs:
                return {"error": "Cannot find job"}
            job = jobs[job_id]

            url = job.server + '/api/pydio/state/user/repositories?format=json'
            auth = (job.user_id, keyring.get_password(job.server, job.user_id))
            verify = not job.trust_ssl
        else:
            args = request.args
            base = args['url'].rstrip('/')
            verify = False if args['trust_ssl'] == 'true' else True
            url = base + '/api/pydio/state/user/repositories?format=json'
            if 'password' in args:
                auth = (args['user'], args['password'])
            else:
                auth = (args['user'], keyring.get_password(base, args['user']))
            app_name_url = base + '/api/pydio/state/plugins?format=json'
            display_name_url = base + '/api/pydio/state/user/preferences?format=json'

        if verify and "REQUESTS_CA_BUNDLE" in os.environ:
            verify = os.environ["REQUESTS_CA_BUNDLE"]
        try:
            # TRY TO GET APPLICATION TITLE
            if app_name_url:
                resp = requests.get(
                    app_name_url,
                    stream=False,
                    auth=auth,
                    verify=verify,
                    proxies=ConfigManager.Instance().get_defined_proxies())
                resp.raise_for_status()
                try:
                    app_data = json.loads(resp.content)
                    app_name = ''
                    ajxpcores = app_data['plugins']['ajxpcore']
                    for core in ajxpcores:
                        if core['@id'] == 'core.ajaxplorer':
                            for prop in core['plugin_configs']['property']:
                                if prop['@name'] == 'APPLICATION_TITLE':
                                    app_name = json.loads(prop['$'])
                                    break
                            break
                except KeyError as k:
                    pass
                except ValueError:
                    pass
            # TRY TO GET USER DISPLAY NAME
            if display_name_url:
                resp = requests.get(
                    display_name_url,
                    stream=False,
                    auth=auth,
                    verify=verify,
                    proxies=ConfigManager.Instance().get_defined_proxies())
                resp.raise_for_status()
                try:
                    user_data = json.loads(resp.content)
                    user_display_name = ''
                    prefs = user_data['preferences']['pref']
                    for pref in prefs:
                        if pref['@name'] == 'USER_DISPLAY_NAME':
                            if pref['@value']:
                                user_display_name = pref['@value']
                            break
                except KeyError as k:
                    pass
                except ValueError:
                    pass

            resp = requests.get(
                url,
                stream=True,
                auth=auth,
                verify=verify,
                proxies=ConfigManager.Instance().get_defined_proxies())
            resp.raise_for_status()
            data = json.loads(resp.content)
            if 'repositories' in data and 'repo' in data['repositories']:
                if isinstance(data['repositories']['repo'], types.DictType):
                    data['repositories']['repo'] = [
                        data['repositories']['repo']
                    ]
                data['repositories']['repo'] = filter(
                    lambda x: not x['@access_type'].startswith('ajxp_'),
                    data['repositories']['repo'])
            if app_name:
                data['application_title'] = app_name
            if user_display_name:
                data['user_display_name'] = user_display_name
            return data
        except requests.HTTPError:
            r = resp.status_code
            message = _("Couldn't load your workspaces, check your server !")
            if r == 404:
                message = _(
                    "Server not found (404), is it up and has it Pydio installed ?"
                )
            elif r == 401:
                message = _(
                    "Authentication failed: please verify your login and password"
                )
            elif r == 403:
                message = _("Access to the server is forbidden")
            elif r == 500 or r == 408:
                message = _("Server seems to be encountering problems (500)")
            logging.debug("Error while loading workspaces : " + message)
            return {'error': message}, resp.status_code
        except SSLError as rt:
            logging.error(rt.message)
            return {
                'error':
                _("An SSL error happened! Is your server using a self-signed certificate? In that case please check 'Trust SSL certificate'"
                  )
            }, 400
        except ProxyError as rt:
            logging.error(rt.message)
            return {
                'error': _('A proxy error happened, please check the logs')
            }, 400
        except TooManyRedirects as rt:
            logging.error(rt.message)
            return {'error': _('Connection error: too many redirects')}, 400
        except ChunkedEncodingError as rt:
            logging.error(rt.message)
            return {
                'error': _('Chunked encoding error, please check the logs')
            }, 400
        except ContentDecodingError as rt:
            logging.error(rt.message)
            return {
                'error': _('Content Decoding error, please check the logs')
            }, 400
        except InvalidSchema as rt:
            logging.error(rt.message)
            return {'error': _('Http connection error: invalid schema.')}, 400
        except InvalidURL as rt:
            logging.error(rt.message)
            return {'error': _('Http connection error: invalid URL.')}, 400
        except ValueError:
            message = "Error while parsing request result:" + resp.content
            logging.debug(message)
            return {'error': message}, 400
        except Timeout as to:
            logging.error(to)
            return {'error': _('Connection timeout!')}, 400
        except RequestException as ree:
            logging.error(ree.message)
            return {'error': _('Cannot resolve domain!')}, 400
Пример #21
0
    def run(self):
        """
        Start the thread
        """
        logger = EventLogger(self.configs_path)
        very_first = False

        if self.watcher:
            if self.watcher_first_run:

                def status_callback(status):
                    logger.log_state(status, 'sync')

                self.init_global_progress()

                try:
                    self.global_progress['status_indexing'] = 1
                    logger.log_state(
                        _('Checking changes since last launch...'), "sync")
                    very_first = True
                    self.watcher.check_from_snapshot(
                        state_callback=status_callback)
                except DBCorruptedException as e:
                    self.stop()
                    JobsLoader.Instance().clear_job_data(self.job_config.id)
                    logging.error(e)
                    return
                except Exception as e:
                    self.interrupt = True
                    logger.log_state(
                        _('Oops, error while indexing the local folder. Pausing the task.'
                          ), 'error')
                    logging.error(e)

                self.watcher_first_run = False
            self.watcher.start()

        while not self.interrupt:

            try:
                # logging.info('Starting cycle with cycles local %i and remote %is' % (self.local_seq, self.remote_seq))
                self.processing_signals = {}
                self.init_global_progress()
                if very_first:
                    self.global_progress['status_indexing'] = 1

                interval = int(time.time() - self.last_run)
                if (self.online_status and interval < self.online_timer) or (
                        not self.online_status
                        and interval < self.offline_timer):
                    time.sleep(self.event_timer)
                    continue

                if not self.job_status_running:
                    logging.debug("self.online_timer: %s" % self.online_timer)
                    logger.log_state(_('Status: Paused'), "sync")
                    self.sleep_offline()
                    continue

                if self.job_config.frequency == 'time':
                    start_time = datetime.time(
                        int(self.job_config.start_time['h']),
                        int(self.job_config.start_time['m']))
                    end_time = datetime.time(
                        int(self.job_config.start_time['h']),
                        int(self.job_config.start_time['m']), 59)
                    now = datetime.datetime.now().time()
                    if not start_time < now < end_time:
                        logger.log_state(
                            _('Status: scheduled for %s') % str(start_time),
                            "sync")
                        self.sleep_offline()
                        continue
                    else:
                        logging.info(
                            "Now triggering synchro as expected at time " +
                            str(start_time))

                if not self.system.check_basepath():
                    log = _(
                        'Cannot find local folder! Did you disconnect a volume? Waiting %s seconds before retry'
                    ) % self.offline_timer
                    logging.error(log)
                    logger.log_state(
                        _('Cannot find local folder, did you disconnect a volume?'
                          ), "error")
                    self.sleep_offline()
                    continue

                if self.watcher:
                    for snap_path in self.marked_for_snapshot_pathes:
                        logging.info(
                            'LOCAL SNAPSHOT : loading snapshot for directory %s'
                            % snap_path)
                        if self.interrupt or not self.job_status_running:
                            raise InterruptException()
                        self.watcher.check_from_snapshot(snap_path)

                # Load local and/or remote changes, depending on the direction
                from pydio.job.change_stores import SqliteChangeStore
                self.current_store = SqliteChangeStore(
                    self.configs_path + '/changes.sqlite',
                    self.job_config.filters['includes'],
                    self.job_config.filters['excludes'])
                self.current_store.open()
                try:
                    if self.job_config.direction != 'up':
                        logging.info(
                            'Loading remote changes with sequence %s' %
                            str(self.remote_seq))
                        if self.remote_seq == 0:
                            logger.log_state(
                                _('Gathering data from remote workspace, this can take a while...'
                                  ), 'sync')
                            very_first = True
                        self.remote_target_seq = self.load_remote_changes_in_store(
                            self.remote_seq, self.current_store)
                        self.current_store.sync()
                    else:
                        self.remote_target_seq = 1
                        self.ping_remote()
                except RequestException as ce:
                    if not connection_helper.is_connected_to_internet():
                        error = _(
                            'No Internet connection detected! Waiting for %s seconds to retry'
                        ) % self.offline_timer
                    else:
                        error = _(
                            'Connection to server failed, server is probably down. Waiting %s seconds to retry'
                        ) % self.offline_timer
                    self.marked_for_snapshot_pathes = []
                    logging.error(error)
                    logger.log_state(error, "wait")
                    self.sleep_offline()
                    continue
                except Exception as e:
                    error = 'Error while connecting to remote server (%s), waiting for %i seconds before retempting ' % (
                        e.message, self.offline_timer)
                    logging.error(error)
                    logger.log_state(
                        _('Error while connecting to remote server (%s)') %
                        e.message, "error")
                    self.marked_for_snapshot_pathes = []
                    self.sleep_offline()
                    continue
                self.online_status = True
                if not self.job_config.server_configs:
                    self.job_config.server_configs = self.sdk.load_server_configs(
                    )
                self.sdk.set_server_configs(self.job_config.server_configs)

                if self.job_config.direction != 'down':
                    logging.info('Loading local changes with sequence ' +
                                 str(self.local_seq))
                    self.local_target_seq = self.db_handler.get_local_changes_as_stream(
                        self.local_seq, self.current_store.flatten_and_store)
                    self.current_store.sync()
                else:
                    self.local_target_seq = 1
                if not connection_helper.internet_ok:
                    connection_helper.is_connected_to_internet()

                changes_length = len(self.current_store)
                if not changes_length:
                    logging.info('No changes detected')
                    self.update_min_seqs_from_store()
                    self.exit_loop_clean(logger)
                    very_first = False
                    continue

                self.global_progress['status_indexing'] = 1
                logging.info('Reducing changes')
                logger.log_state(
                    _('Merging changes between remote and local, please wait...'
                      ), 'sync')

                logging.debug('Delete Copies')
                self.current_store.delete_copies()
                self.update_min_seqs_from_store()
                logging.debug('Dedup changes')
                self.current_store.dedup_changes()
                self.update_min_seqs_from_store()
                if not self.storage_watcher or very_first:
                    logging.debug('Detect unnecessary changes')
                    self.current_store.detect_unnecessary_changes(
                        local_sdk=self.system, remote_sdk=self.sdk)
                self.update_min_seqs_from_store()
                logging.debug('Clearing op and pruning folders moves')
                self.current_store.clear_operations_buffer()
                self.current_store.prune_folders_moves()
                self.update_min_seqs_from_store()

                logging.debug('Store conflicts')
                store_conflicts = self.current_store.clean_and_detect_conflicts(
                    self.db_handler)
                if store_conflicts:
                    logging.info('Conflicts detected, cannot continue!')
                    logger.log_state(_('Conflicts detected, cannot continue!'),
                                     'error')
                    self.current_store.close()
                    self.sleep_offline()
                    continue

                changes_length = len(self.current_store)
                if not changes_length:
                    logging.info('No changes detected')
                    self.exit_loop_clean(logger)
                    very_first = False
                    continue

                self.global_progress['status_indexing'] = 0
                import change_processor
                self.global_progress['queue_length'] = changes_length
                logging.info('Processing %i changes' % changes_length)
                logger.log_state(
                    _('Processing %i changes') % changes_length, "start")
                counter = [1]

                def processor_callback(change):
                    try:
                        if self.interrupt or not self.job_status_running:
                            raise InterruptException()
                        self.update_current_tasks()
                        self.update_global_progress()
                        Processor = StorageChangeProcessor if self.storage_watcher else ChangeProcessor
                        proc = Processor(change, self.current_store,
                                         self.job_config, self.system,
                                         self.sdk, self.db_handler,
                                         self.event_logger)
                        proc.process_change()
                        self.update_min_seqs_from_store(success=True)
                        self.global_progress['queue_done'] = float(counter[0])
                        counter[0] += 1
                        self.update_current_tasks()
                        self.update_global_progress()
                        time.sleep(0.1)
                        if self.interrupt or not self.job_status_running:
                            raise InterruptException()

                    except ProcessException as pe:
                        logging.error(pe.message)
                        return False
                    except InterruptException as i:
                        raise i
                    except PydioSdkDefaultException as p:
                        raise p
                    except Exception as ex:
                        logging.exception(ex.message)
                        return False
                    return True

                try:
                    if sys.platform.startswith('win'):
                        self.marked_for_snapshot_pathes = list(
                            set(self.current_store.find_modified_parents()) -
                            set(self.marked_for_snapshot_pathes))
                    self.current_store.process_changes_with_callback(
                        processor_callback)
                except InterruptException as iexc:
                    pass
                logger.log_state(
                    _('%i files modified') %
                    self.global_progress['queue_done'], 'success')
                if self.global_progress['queue_done']:
                    logger.log_notif(
                        _('%i files modified') %
                        self.global_progress['queue_done'], 'success')

            except PydioSdkDefaultException as re:
                logging.error(re.message)
                logger.log_state(re.message, 'error')
            except SSLError as rt:
                logging.error(rt.message)
                logger.log_state(
                    _('An SSL error happened, please check the logs'), 'error')
            except ProxyError as rt:
                logging.error(rt.message)
                logger.log_state(
                    _('A proxy error happened, please check the logs'),
                    'error')
            except TooManyRedirects as rt:
                logging.error(rt.message)
                logger.log_state(_('Connection error: too many redirects'),
                                 'error')
            except ChunkedEncodingError as rt:
                logging.error(rt.message)
                logger.log_state(
                    _('Chunked encoding error, please check the logs'),
                    'error')
            except ContentDecodingError as rt:
                logging.error(rt.message)
                logger.log_state(
                    _('Content Decoding error, please check the logs'),
                    'error')
            except InvalidSchema as rt:
                logging.error(rt.message)
                logger.log_state(_('Http connection error: invalid schema.'),
                                 'error')
            except InvalidURL as rt:
                logging.error(rt.message)
                logger.log_state(_('Http connection error: invalid URL.'),
                                 'error')
            except Timeout as to:
                logging.error(to)
                logger.log_state(_('Connection timeout, will retry later.'),
                                 'error')
            except RequestException as ree:
                logging.error(ree.message)
                logger.log_state(_('Cannot resolve domain!'), 'error')
            except Exception as e:
                if not (e.message.lower().count('[quota limit reached]')
                        or e.message.lower().count('[file permissions]')):
                    logging.exception('Unexpected Error: %s' % e.message)
                    logger.log_state(
                        _('Unexpected Error: %s') % e.message, 'error')

            logging.debug('Finished this cycle, waiting for %i seconds' %
                          self.online_timer)
            self.exit_loop_clean(logger)
            very_first = False
Пример #22
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser('Pydio Synchronization Tool')
    # Pass a server configuration via arguments
    parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode,
                        default='http://localhost')
    parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None)
    parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None)
    parser.add_argument('-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize',
                        type=unicode, default=None)
    parser.add_argument('-u', '--user', help='User name', type=unicode, default=None)
    parser.add_argument('-p', '--password', help='Password', type=unicode, default=None)
    parser.add_argument('-px', '--proxy', help='Enter like http::username::password::proxyIP::proxyPort::...::check_proxy_flag '
                        'By default proxy connection test happens, to avoid mention 0 or False', type=unicode, default=None)
    parser.add_argument('-mp', '--memory_profile', help="To Generate the memory profile :: use <<-mp True >> as argument",
                        type=unicode, default=False)
    parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi')
    # Pass a configuration file
    parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations')
    # Pass a path to rdiff binary
    parser.add_argument('-i', '--rdiff', type=unicode, help='Path to rdiff executable', default=None)
    # Configure API access
    parser.add_argument('--api_user', help='Set the agent API username (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_password', help='Set the agent API password (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_address', help='Set the agent IP address. By default, no address means that local '
                                              'access only is allowed.', type=str, default=None)
    parser.add_argument('--api_port', help='Set the agent port. By default, will try to use 5556, and if not '
                                           'available will switch to another port.', type=int, default=5556)
    parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False)
    parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False)
    parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False)
    parser.add_argument('--save-cfg', action='store_true', default=True)
    parser.add_argument('--extract_html', help='Utils for extracting HTML strings and compiling po files to json',
                        type=unicode, default=False)
    parser.add_argument('--auto-start', action='store_true')
    parser.add_argument('-v', '--verbose', action='count', default=1)
    args, _ = parser.parse_known_args(argv)

    jobs_root_path = Path(__file__).parent / 'data'
    if not jobs_root_path.exists():
        jobs_root_path = Path(DEFAULT_DATA_PATH.encode(guess_filesystemencoding()))
        if not jobs_root_path.exists():
            jobs_root_path.mkdir(parents=True)
            # This is a first start
            user_dir = unicode(get_user_home(APP_NAME))
            if not os.path.exists(user_dir):
                try:
                    os.mkdir(user_dir)
                except Exception:
                    pass
            if os.path.exists(user_dir):
                from pydio.utils.favorites_manager import add_to_favorites
                add_to_favorites(user_dir, APP_NAME)

    setup_logging(args.verbose, jobs_root_path)

    if args.auto_start:
        import pydio.autostart

        pydio.autostart.setup(argv)
        return 0

    u_jobs_root_path = str(jobs_root_path).decode(guess_filesystemencoding())
    config_manager = ConfigManager.Instance(configs_path=u_jobs_root_path, data_path=DEFAULT_PARENT_PATH)

    jobs_loader = JobsLoader.Instance(data_path=u_jobs_root_path)
    config_manager.set_rdiff_path(args.rdiff)

    if args.proxy is not None:
        data = args.proxy.split('::') if len(args.proxy.split('::'))%5 in range(0, 2) else logging.error("Wrong number of parameters pased for proxy")
        msg = {}
        for i in range(len(args.proxy.split('::'))/5):
            msg[data[i*5]] = {"username": data[i*5+1], "password": data[i*5+2], "hostname": data[i*5+3], "port": data[i*5+4]}
        proxy_flag = data[-1] if len(args.proxy.split('::'))%5 == 1 else True  # default true
        config_manager.set_user_proxy(msg, check_proxy_flag=proxy_flag)
        return 0

    if args.server and args.directory and args.workspace:
        job_config = JobConfig()
        job_config.load_from_cliargs(args)
        data = {job_config.id: job_config}
        if args.save_cfg:
            logging.info("Storing config in %s", os.path.join(u_jobs_root_path, 'configs.json'))
            jobs_loader.save_jobs(data)
    else:
        fp = args.file
        if fp and fp != '.':
            logging.info("Loading config from %s", fp)
            jobs_loader.config_file = fp
            jobs_loader.load_config()
        data = jobs_loader.get_jobs()

    logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2))

    if args.diag_imports:
        # nothing more to do
        return sys.exit(0)

    if args.memory_profile:
        from pydio.utils.pydio_profiler import LogFile
        sys.stdout = LogFile('stdout')

    if args.extract_html:
        from pydio.utils.i18n import PoProcessor
        proc = PoProcessor()
        if args.extract_html == 'extract':
            root = Path(__file__).parent
            count = proc.extract_all_html_strings(str(root / 'ui' / 'res' ), str(root / 'res' / 'i18n' / 'html_strings.py' ))
            logging.info('Wrote %i strings to html_strings.py - Now update PO files using standard tools' % count)
            # nothing more to do
        elif args.extract_html == 'compile':
            root = Path(__file__).parent
            proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'), str(root / 'ui' / 'res' / 'i18n.js'))
        return sys.exit(0)

    if args.diag_http:
        keys = data.keys()
        if args.password:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id,
                args.password)
        else:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id)
        rc = smoke_tests.run()
        if rc != 0:
            logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message))
        return sys.exit(rc)

    ports_detector = PortsDetector(store_file=str(jobs_root_path / 'ports_config'), username=args.api_user,
                                   password=args.api_password, default_port=args.api_port)
    ports_detector.create_config_file()

    scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader)
    server = PydioApi(ports_detector.get_port(), ports_detector.get_username(),
        ports_detector.get_password(), external_ip=args.api_address)
    from pydio.job import manager
    manager.api_server = server

    try:

        thread.start_new_thread(server.start_server, ())
        time.sleep(0.3)
        if not server.running:
            logging.error('Cannot start web server, exiting application')
            sys.exit(1)
        scheduler.start_all()

    except (KeyboardInterrupt, SystemExit):
        server.shutdown_server()
        sys.exit()
Пример #23
0
 def get(self):
     """
     :return: {} containing some basic usage information
     """
     jobs = JobsLoader.Instance().get_jobs()
     resp = {
         "errors": "zlib_blob",
         "nberrors": 0,
         "platform": platform.system()
     }
     for job_id in jobs:
         resp[job_id] = {"nbsyncedfiles": 0, "lastseq": 0, "serverInfo": {}}
     globalconfig = GlobalConfigManager.Instance(
         configs_path=ConfigManager.Instance().get_configs_path())
     resp["pydiosync_version"] = ConfigManager.Instance().get_version_data(
     )["version"]
     # parse logs for Errors, zip the errors
     logdir = globalconfig.configs_path
     files = os.listdir(logdir)
     logfiles = []
     for f in files:
         if f.startswith(globalconfig.default_settings['log_configuration']
                         ['log_file_name']):
             logfiles.append(f)
     compressor = zlib.compressobj()
     compressed_data = ""
     errors = "["
     for logfile in logfiles:
         try:
             with open(os.path.join(logdir, logfile), 'r') as f:
                 for l in f.readlines():
                     if l.find('ERROR') > -1:
                         resp['nberrors'] += 1
                         errors += '"' + l.replace('\n', '') + '",'
                 compressed_data += compressor.compress(str(errors))
                 errors = ""
         except Exception as e:
             logging.exception(e)
     compressor.compress("]")
     compressed_data += compressor.flush()
     # base64 encode the compressed extracted errors
     resp['errors'] = compressed_data
     resp["errors"] = base64.b64encode(resp["errors"])
     # Instantiate and get logs from pydio.sqlite
     for job_id in jobs:
         try:
             url = posixpath.join(jobs[job_id].server,
                                  'index.php?get_action=get_boot_conf')
             req = requests.get(url, verify=False)
             logging.info("URL " + url)
             logging.info(req.content)
             jsonresp = json.loads(req.content)
             if 'ajxpVersion' in jsonresp:
                 resp[job_id]['serverInfo']['ajxpVersion'] = jsonresp[
                     'ajxpVersion']
             if 'customWording' in jsonresp:
                 resp[job_id]['serverInfo']['customWording'] = jsonresp[
                     'customWording']
             if 'currentLanguage' in jsonresp:
                 resp[job_id]['serverInfo']['currentLanguage'] = jsonresp[
                     'currentLanguage']
             if 'theme' in jsonresp:
                 resp[job_id]['serverInfo']['theme'] = jsonresp['theme']
             if 'licence_features' in jsonresp:
                 resp[job_id]['serverInfo']['licence_features'] = jsonresp[
                     'licence_features']
         except Exception as e:
             logging.exception(e)
         pydiosqlite = SqlEventHandler(
             includes=jobs[job_id].filters['includes'],
             excludes=jobs[job_id].filters['excludes'],
             basepath=jobs[job_id].directory,
             job_data_path=os.path.join(globalconfig.configs_path, job_id))
         dbstats = pydiosqlite.db_stats()
         resp[job_id] = {}
         resp[job_id]['nbsyncedfiles'] = dbstats['nbfiles']
         resp[job_id]['nbdirs'] = dbstats['nbdirs']
         #logging.info(dir(jobs[jobs.keys()[0]]))
         try:
             with open(
                     os.path.join(globalconfig.configs_path, job_id,
                                  "sequence"), "rb") as f:
                 sequences = pickle.load(f)
                 resp[job_id]['lastseq'] = sequences['local']
                 resp[job_id]['remotelastseq'] = sequences['remote']
         except Exception:
             logging.info('Problem loading sequences file')
             resp[job_id]['lastseq'] = -1
             resp[job_id]['remotelastseq'] = -1
     return resp