def get(self): """ retrieves the general config info from general config file :returns a json response """ global_config_manager = GlobalConfigManager.Instance(configs_path=ConfigManager.Instance().get_configs_path()) return global_config_manager.get_general_config()
def __init__(self, filename, includes, excludes, poolsize=4, local_sdk=None, remote_sdk=None, job_config=None, db_handler=None): self.db = filename self.includes = includes self.excludes = excludes self.create = False global_config_manager = GlobalConfigManager.Instance( configs_path=os.path.dirname(os.path.dirname(filename))) # Increasing the timeout (default 5 seconds), to avoid database is locked error self.timeout = global_config_manager.get_general_config( )['max_wait_time_for_local_db_access'] if not os.path.exists(self.db): self.create = True self.last_commit = time.time() self.local_sdk = local_sdk self.remote_sdk = remote_sdk self.pendingoperations = [] self.maxpoolsize = poolsize self.failingchanges = {} # keep track of failing changes self.change_history = ChangeHistory( self.db[:self.db.rfind("/")] + "/history.sqlite", self.local_sdk, self.remote_sdk, job_config, db_handler) self.job_config = job_config
def get(self, complete_url): global_config_manager = GlobalConfigManager.Instance( configs_path=ConfigManager.Instance().get_configs_path()) general_config = global_config_manager.get_general_config() noupdate_msg = {u'noupdate': u'No update available'} if bool(general_config['update_info']['enable_update_check']): import time if general_config['update_info']['update_check_frequency_days'] > 0: if (int(time.strftime("%Y%m%d")) - int( time.strftime( '%Y%m%d', time.gmtime(general_config['update_info'] ['last_update_date'] / 1000))) ) > general_config['update_info'][ 'update_check_frequency_days']: general_config['update_info'][ 'last_update_date'] = time.time() * 1000 global_config_manager.update_general_config(general_config) else: return noupdate_msg elif general_config['update_info'][ 'update_check_frequency_days'] == 0: general_config['update_info']['last_update_date'] = time.time( ) * 1000 global_config_manager.update_general_config(general_config) resp = requests.get( complete_url, stream=False, proxies=ConfigManager.Instance().get_defined_proxies()) return json.loads(resp.content) else: return noupdate_msg
def post(self): """ writes the general config into the general config file :returns a json response """ data = request.get_json() if len(data) > 0: global_config_manager = GlobalConfigManager.Instance(configs_path=ConfigManager.Instance().get_configs_path()) return global_config_manager.update_general_config(data=data)
def __init__(self, filename, includes, excludes): self.db = filename self.includes = includes self.excludes = excludes self.create = False from pydio.utils.global_config import GlobalConfigManager global_config_manager = GlobalConfigManager.Instance(configs_path=os.path.dirname(os.path.dirname(filename))) # Increasing the timeout (default 5 seconds), to avoid database is locked error self.timeout = global_config_manager.get_general_config()['max_wait_time_for_local_db_access'] if not os.path.exists(self.db): self.create = True
def __init__(self, job_data_path=''): self.db = job_data_path + '/pydio.sqlite' if not os.path.exists(job_data_path): os.mkdir(job_data_path) # Fetch the local db access timeout global_config_manager = GlobalConfigManager.Instance( configs_path=job_data_path) self.timeout = global_config_manager.get_general_config( )['max_wait_time_for_local_db_access'] if not os.path.exists(self.db): self.init_db()
def __init__(self, job_data_path='', base=''): self.base = base self.db = job_data_path + '/pydio.sqlite' self.job_data_path = job_data_path self.event_handler = None global_config_manager = GlobalConfigManager.Instance( configs_path=job_data_path) # Increasing the timeout (default 5 seconds), to avoid database is locked error self.timeout = global_config_manager.get_general_config( )['max_wait_time_for_local_db_access'] if not os.path.exists(self.db): self.init_db()
def __init__(self, basepath, job_data_path, sub_folder=None): self.db = job_data_path + '/pydio.sqlite' self.basepath = basepath self._stat_snapshot = {} self._inode_to_path = {} self.is_recursive = True self.sub_folder = sub_folder global_config_manager = GlobalConfigManager.Instance( configs_path=job_data_path) # Increasing the timeout (default 5 seconds), to avoid database is locked error self.timeout = global_config_manager.get_general_config( )['max_wait_time_for_local_db_access'] try: self.load_from_db() except OperationalError as oe: raise DBCorruptedException(oe)
def get_languages(): try: from pydio.utils.global_config import GlobalConfigManager except ImportError: from utils.global_config import GlobalConfigManager try: conf = GlobalConfigManager.Instance().get_general_config() except Exception: # languages not ready, default to English return ["en_US"] languages = [] try: if conf["language"] == "": languages = get_default_language() else: return [conf["language"]] except KeyError: languages = get_default_language() return languages
# Remove double folder Pydio/Pydio on windows DEFAULT_DATA_PATH = DEFAULT_DATA_PATH.replace( os.path.join(APP_NAME, APP_NAME), APP_NAME) elif sys.platform == 'linux2': # According to XDG specification # http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html CONFIGDIR = os.getenv('XDG_DATA_HOME') if CONFIGDIR: logging.info('Linux CONFIG DIR XDG_DATA_HOME: ' + CONFIGDIR) if not CONFIGDIR: CONFIGDIR = os.path.expanduser('~/.local/share') logging.info('Linux CONFIG DIR EXPANDED: ' + CONFIGDIR) DEFAULT_DATA_PATH = os.path.join(CONFIGDIR, APP_NAME) logging.info('Linux DEFAULT_DATA_PATH: ' + DEFAULT_DATA_PATH) global_config_manager = GlobalConfigManager.Instance( configs_path=DEFAULT_DATA_PATH) global_config_manager.set_general_config( global_config_manager.default_settings) DEFAULT_PARENT_PATH = get_user_home(APP_NAME) def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser('Pydio Synchronization Tool') # Pass a server configuration via arguments parser.add_argument('-s', '--server', help='Server URL, with http(s) and path to pydio', type=unicode, default='http://localhost') parser.add_argument('-d',
def get(self): """ :return: {} containing some basic usage information """ jobs = JobsLoader.Instance().get_jobs() resp = { "errors": "zlib_blob", "nberrors": 0, "platform": platform.system() } for job_id in jobs: resp[job_id] = {"nbsyncedfiles": 0, "lastseq": 0, "serverInfo": {}} globalconfig = GlobalConfigManager.Instance( configs_path=ConfigManager.Instance().get_configs_path()) resp["pydiosync_version"] = ConfigManager.Instance().get_version_data( )["version"] # parse logs for Errors, zip the errors logdir = globalconfig.configs_path files = os.listdir(logdir) logfiles = [] for f in files: if f.startswith(globalconfig.default_settings['log_configuration'] ['log_file_name']): logfiles.append(f) compressor = zlib.compressobj() compressed_data = "" errors = "[" for logfile in logfiles: try: with open(os.path.join(logdir, logfile), 'r') as f: for l in f.readlines(): if l.find('ERROR') > -1: resp['nberrors'] += 1 errors += '"' + l.replace('\n', '') + '",' compressed_data += compressor.compress(str(errors)) errors = "" except Exception as e: logging.exception(e) compressor.compress("]") compressed_data += compressor.flush() # base64 encode the compressed extracted errors resp['errors'] = compressed_data resp["errors"] = base64.b64encode(resp["errors"]) # Instantiate and get logs from pydio.sqlite for job_id in jobs: try: url = posixpath.join(jobs[job_id].server, 'index.php?get_action=get_boot_conf') req = requests.get(url, verify=False) logging.info("URL " + url) logging.info(req.content) jsonresp = json.loads(req.content) if 'ajxpVersion' in jsonresp: resp[job_id]['serverInfo']['ajxpVersion'] = jsonresp[ 'ajxpVersion'] if 'customWording' in jsonresp: resp[job_id]['serverInfo']['customWording'] = jsonresp[ 'customWording'] if 'currentLanguage' in jsonresp: resp[job_id]['serverInfo']['currentLanguage'] = jsonresp[ 'currentLanguage'] if 'theme' in jsonresp: resp[job_id]['serverInfo']['theme'] = jsonresp['theme'] if 'licence_features' in jsonresp: resp[job_id]['serverInfo']['licence_features'] = jsonresp[ 'licence_features'] except Exception as e: logging.exception(e) pydiosqlite = SqlEventHandler( includes=jobs[job_id].filters['includes'], excludes=jobs[job_id].filters['excludes'], basepath=jobs[job_id].directory, job_data_path=os.path.join(globalconfig.configs_path, job_id)) dbstats = pydiosqlite.db_stats() resp[job_id] = {} resp[job_id]['nbsyncedfiles'] = dbstats['nbfiles'] resp[job_id]['nbdirs'] = dbstats['nbdirs'] #logging.info(dir(jobs[jobs.keys()[0]])) try: with open( os.path.join(globalconfig.configs_path, job_id, "sequence"), "rb") as f: sequences = pickle.load(f) resp[job_id]['lastseq'] = sequences['local'] resp[job_id]['remotelastseq'] = sequences['remote'] except Exception: logging.info('Problem loading sequences file') resp[job_id]['lastseq'] = -1 resp[job_id]['remotelastseq'] = -1 return resp