Example #1
0
 def __init__(self, basepath, includes, excludes, job_data_path):
     super(SqlEventHandler, self).__init__()
     self.base = basepath
     self.includes = includes
     self.excludes = excludes
     db_handler = LocalDbHandler(job_data_path, basepath)
     self.unique_id = hashlib.md5(job_data_path.encode(guess_filesystemencoding())).hexdigest()
     self.db = db_handler.db
     self.reading = False
     self.last_write_time = 0
     self.db_wait_duration = 1
     self.last_seq_id = 0
     self.prevent_atomic_commit = False
     self.con = None
Example #2
0
    def start_from_config(self, job_config):
        if not job_config.active:
            return
        job_data_path = self.jobs_root_path / str(job_config.id)
        if not job_data_path.exists():
            job_data_path.mkdir(parents=True)
        job_data_path = str(job_data_path).decode(guess_filesystemencoding())

        merger = ContinuousDiffMerger(job_config, job_data_path=job_data_path)
        try:
            merger.start()
            self.control_threads[job_config.id] = merger
        except (KeyboardInterrupt, SystemExit):
            merger.stop()
Example #3
0
    def start_from_config(self, job_config):
        if not job_config.active:
            return
        job_data_path = self.jobs_root_path / str(job_config.id)
        if not job_data_path.exists():
            job_data_path.mkdir(parents=True)
        job_data_path = str(job_data_path).decode(guess_filesystemencoding())

        merger = ContinuousDiffMerger(job_config, job_data_path=job_data_path)
        try:
            merger.start()
            self.control_threads[job_config.id] = merger
        except (KeyboardInterrupt, SystemExit):
            merger.stop()
Example #4
0
 def __init__(self, basepath, includes, excludes, job_data_path):
     super(SqlEventHandler, self).__init__()
     self.base = basepath
     self.includes = includes
     self.excludes = excludes
     db_handler = LocalDbHandler(job_data_path, basepath)
     self.unique_id = hashlib.md5(
         job_data_path.encode(guess_filesystemencoding())).hexdigest()
     self.db = db_handler.db
     self.reading = False
     self.last_write_time = 0
     self.db_wait_duration = 1
     self.last_seq_id = 0
     self.prevent_atomic_commit = False
     self.con = None
Example #5
0
 def __init__(self, basepath, includes, excludes, job_data_path):
     super(SqlEventHandler, self).__init__()
     self.base = basepath
     self.includes = includes
     self.excludes = excludes
     db_handler = LocalDbHandler(job_data_path, basepath)
     self.unique_id = hashlib.md5(job_data_path.encode(guess_filesystemencoding())).hexdigest()
     self.db = db_handler.db
     # Increasing the timeout (default 5 seconds), to avoid database is locked error
     self.timeout = db_handler.timeout
     self.reading = False
     self.last_write_time = 0
     self.db_wait_duration = .4
     self.last_seq_id = 0
     self.prevent_atomic_commit = False
     self.con = None
     self.locked = False
Example #6
0
 def __init__(self, basepath, includes, excludes, job_data_path):
     super(SqlEventHandler, self).__init__()
     self.base = basepath
     self.includes = includes
     self.excludes = excludes
     db_handler = LocalDbHandler(job_data_path, basepath)
     self.unique_id = hashlib.md5(
         job_data_path.encode(guess_filesystemencoding())).hexdigest()
     self.db = db_handler.db
     # Increasing the timeout (default 5 seconds), to avoid database is locked error
     self.timeout = db_handler.timeout
     self.reading = False
     self.last_write_time = 0
     self.db_wait_duration = .4
     self.last_seq_id = 0
     self.prevent_atomic_commit = False
     self.con = None
     self.locked = False
Example #7
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser('Pydio Synchronization Tool')
    # Pass a server configuration via arguments
    parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode,
                        default='http://localhost')
    parser.add_argument('-d', '--directory', help='Local directory', type=unicode, default=None)
    parser.add_argument('-w', '--workspace', help='Id or Alias of workspace to synchronize', type=unicode, default=None)
    parser.add_argument('-r', '--remote_folder', help='Path to an existing folder of the workspace to synchronize',
                        type=unicode, default=None)
    parser.add_argument('-u', '--user', help='User name', type=unicode, default=None)
    parser.add_argument('-p', '--password', help='Password', type=unicode, default=None)
    parser.add_argument('-px', '--proxy', help='Enter like http::username::password::proxyIP::proxyPort::...::check_proxy_flag '
                        'By default proxy connection test happens, to avoid mention 0 or False', type=unicode, default=None)
    parser.add_argument('-mp', '--memory_profile', help="To Generate the memory profile :: use <<-mp True >> as argument",
                        type=unicode, default=False)
    parser.add_argument('-dir', '--direction', help='Synchro Direction', type=str, default='bi')
    # Pass a configuration file
    parser.add_argument('-f', '--file', type=unicode, help='Json file containing jobs configurations')
    # Pass a path to rdiff binary
    parser.add_argument('-i', '--rdiff', type=unicode, help='Path to rdiff executable', default=None)
    # Configure API access
    parser.add_argument('--api_user', help='Set the agent API username (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_password', help='Set the agent API password (instead of random)', type=unicode, default=None)
    parser.add_argument('--api_address', help='Set the agent IP address. By default, no address means that local '
                                              'access only is allowed.', type=str, default=None)
    parser.add_argument('--api_port', help='Set the agent port. By default, will try to use 5556, and if not '
                                           'available will switch to another port.', type=int, default=5556)
    parser.add_argument('--diag', help='Run self diagnostic', action='store_true', default=False)
    parser.add_argument('--diag-http', help='Check server connection', action='store_true', default=False)
    parser.add_argument('--diag-imports', help='Check imports and exit', action='store_true', default=False)
    parser.add_argument('--save-cfg', action='store_true', default=True)
    parser.add_argument('--extract_html', help='Utils for extracting HTML strings and compiling po files to json',
                        type=unicode, default=False)
    parser.add_argument('--auto-start', action='store_true')
    parser.add_argument('-v', '--verbose', action='count', default=1)
    args, _ = parser.parse_known_args(argv)

    jobs_root_path = Path(__file__).parent / 'data'
    if not jobs_root_path.exists():
        jobs_root_path = Path(DEFAULT_DATA_PATH.encode(guess_filesystemencoding()))
        if not jobs_root_path.exists():
            jobs_root_path.mkdir(parents=True)
            # This is a first start
            user_dir = unicode(get_user_home(APP_NAME))
            if not os.path.exists(user_dir):
                try:
                    os.mkdir(user_dir)
                except Exception:
                    pass
            if os.path.exists(user_dir):
                from pydio.utils.favorites_manager import add_to_favorites
                add_to_favorites(user_dir, APP_NAME)

    setup_logging(args.verbose, jobs_root_path)

    if args.auto_start:
        import pydio.autostart

        pydio.autostart.setup(argv)
        return 0

    u_jobs_root_path = str(jobs_root_path).decode(guess_filesystemencoding())
    config_manager = ConfigManager.Instance(configs_path=u_jobs_root_path, data_path=DEFAULT_PARENT_PATH)

    jobs_loader = JobsLoader.Instance(data_path=u_jobs_root_path)
    config_manager.set_rdiff_path(args.rdiff)

    if args.proxy is not None:
        data = args.proxy.split('::') if len(args.proxy.split('::'))%5 in range(0, 2) else logging.error("Wrong number of parameters pased for proxy")
        msg = {}
        for i in range(len(args.proxy.split('::'))/5):
            msg[data[i*5]] = {"username": data[i*5+1], "password": data[i*5+2], "hostname": data[i*5+3], "port": data[i*5+4]}
        proxy_flag = data[-1] if len(args.proxy.split('::'))%5 == 1 else True  # default true
        config_manager.set_user_proxy(msg, check_proxy_flag=proxy_flag)
        return 0

    if args.server and args.directory and args.workspace:
        job_config = JobConfig()
        job_config.load_from_cliargs(args)
        data = {job_config.id: job_config}
        if args.save_cfg:
            logging.info("Storing config in %s", os.path.join(u_jobs_root_path, 'configs.json'))
            jobs_loader.save_jobs(data)
    else:
        fp = args.file
        if fp and fp != '.':
            logging.info("Loading config from %s", fp)
            jobs_loader.config_file = fp
            jobs_loader.load_config()
        data = jobs_loader.get_jobs()

    logging.debug("data: %s" % json.dumps(data, default=JobConfig.encoder, indent=2))

    if args.diag_imports:
        # nothing more to do
        return sys.exit(0)

    if args.memory_profile:
        from pydio.utils.pydio_profiler import LogFile
        sys.stdout = LogFile('stdout')

    if args.extract_html:
        from pydio.utils.i18n import PoProcessor
        proc = PoProcessor()
        if args.extract_html == 'extract':
            root = Path(__file__).parent
            count = proc.extract_all_html_strings(str(root / 'ui' / 'res' ), str(root / 'res' / 'i18n' / 'html_strings.py' ))
            logging.info('Wrote %i strings to html_strings.py - Now update PO files using standard tools' % count)
            # nothing more to do
        elif args.extract_html == 'compile':
            root = Path(__file__).parent
            proc.po_to_json(str(root / 'res' / 'i18n' / '*.po'), str(root / 'ui' / 'res' / 'i18n.js'))
        return sys.exit(0)

    if args.diag_http:
        keys = data.keys()
        if args.password:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id,
                args.password)
        else:
            smoke_tests = PydioDiagnostics(
                data[keys[0]].server, data[keys[0]].workspace, data[keys[0]].remote_folder, data[keys[0]].user_id)
        rc = smoke_tests.run()
        if rc != 0:
            logging.error("Diagnostics failed: %s %s" % (str(rc), smoke_tests.status_message))
        return sys.exit(rc)

    ports_detector = PortsDetector(store_file=str(jobs_root_path / 'ports_config'), username=args.api_user,
                                   password=args.api_password, default_port=args.api_port)
    ports_detector.create_config_file()

    scheduler = PydioScheduler.Instance(jobs_root_path=jobs_root_path, jobs_loader=jobs_loader)
    server = PydioApi(ports_detector.get_port(), ports_detector.get_username(),
        ports_detector.get_password(), external_ip=args.api_address)
    from pydio.job import manager
    manager.api_server = server

    try:

        thread.start_new_thread(server.start_server, ())
        time.sleep(0.3)
        if not server.running:
            logging.error('Cannot start web server, exiting application')
            sys.exit(1)
        scheduler.start_all()

    except (KeyboardInterrupt, SystemExit):
        server.shutdown_server()
        sys.exit()