def init_backend(cls, path, fields, init=False, soft=False): # Metadata database init_repository('{0}/database'.format(path), bare=False) lfs.make_folder('{0}/database/.git/patchs'.format(path)) cls.init_backend_static(path) # Make catalog make_catalog('{0}/catalog'.format(path), fields)
def load(self): path = expanduser('~/.usine') if lfs.is_file(path): return 'ERROR: %s is a file, remove it first' % path # Make the user configuration file if needed if not lfs.exists(path): print 'Making the configuration folder:', path lfs.make_folder(path) return 'Now add the INI files within the folder' # Read the user configuration file ini = [ '%s/%s' % (path, x) for x in lfs.get_names(path) if x[-4:] == '.ini' ] if len(ini) == 0: return 'ERROR: zero INI files found in %s/' % path # Read the ini file cfg = RawConfigParser() cfg.read(ini) # Get the data for section in cfg._sections: options = cfg._sections[section] type, name = section.split() module = modules[type] obj = module(options) # Keep the data unit self.by_type.setdefault(type, []).append(obj) self.by_type_and_name[(type, name)] = obj # Sort for type in self.by_type: self.by_type[type].sort(key=lambda x: x.name)
def setUp(self): metadata = Metadata(cls=WebPage) title = MetadataProperty(u'Hello World', lang='en') metadata.set_property('title', title) self.metadata = metadata # Sandbox lfs.make_folder('sandbox')
def test_remove_folder(self): # Create hierarchy lfs.make_folder('tests/folder') lfs.make_folder('tests/folder/a') lfs.make_file('tests/folder/a/hello.txt') # Remove and test lfs.remove('tests/folder') self.assertEqual(lfs.exists('tests/folder'), False)
def __init__(self, db_path, db_fs, read_only): self.db_fs = db_fs self.db_path = db_path # Init patchs folder self.patchs_path = '{0}/database/.git/patchs'.format(db_path) if not lfs.exists(self.patchs_path): lfs.make_folder(self.patchs_path) self.patchs_fs = lfs.open(self.patchs_path) # Launch rotate on patchs folder (only one time, so only on RW database) if not read_only: self.launch_rotate()
def action_build(self): """Make a source distribution for every required Python package. """ path = expanduser('~/.usine/cache') if not lfs.exists(path): lfs.make_folder(path) print '**********************************************************' print ' BUILD' print '**********************************************************' for name, branch in self.get_packages(): config.options.branch = branch source = self.get_source(name) source.action_dist()
def action_build(self): """Make a source distribution for every required Python package. """ path = expanduser("~/.usine/cache") if not lfs.exists(path): lfs.make_folder(path) print "**********************************************************" print " BUILD" print "**********************************************************" for name, version in self.get_packages(): config.options.version = version source = self.get_source(name) source.action_dist()
def action_build(self): """Make a source distribution for every required Python package. """ # Get .cache folder path = expanduser('~/.usine/cache') if not lfs.exists(path): lfs.make_folder(path) for name, version in self.get_packages(): config.options.version = version source = self.get_source(name) if self.is_local: source.action_sync() source.action_checkout() else: # If we build for remote we want to build a dist source.action_dist()
def test_remove_empty_folder(self): lfs.make_folder('tests/folder') lfs.remove('tests/folder') self.assertEqual(lfs.exists('tests/folder'), False)
def test_make_folder(self): lfs.make_folder('tests/folder') self.assertEqual(lfs.is_folder('tests/folder'), True) lfs.remove('tests/folder')
def setUp(self): lfs.make_folder('tmp')
def __init__(self, target, read_only=False, cache_size=None, profile_space=False): target = lfs.get_absolute_path(target) self.target = target self.read_only = read_only # Set timestamp self.timestamp = str(int(time() / 2)) # Load the config config = get_config(target) self.config = config load_modules(config) self.modules = config.get_value('modules') # Contact Email self.smtp_from = config.get_value('smtp-from') # Full-text indexing self.index_text = config.get_value('index-text', type=Boolean, default=True) # Accept cors self.accept_cors = config.get_value( 'accept-cors', type=Boolean, default=False) # Profile Memory if profile_space is True: import guppy.heapy.RM # The database if cache_size is None: cache_size = config.get_value('database-size') if ':' in cache_size: size_min, size_max = cache_size.split(':') else: size_min = size_max = cache_size size_min, size_max = int(size_min), int(size_max) read_only = read_only or config.get_value('database-readonly') database = get_database(target, size_min, size_max, read_only) self.database = database # Find out the root class root = get_root(database) # Load environment file root_file_path = inspect.getfile(root.__class__) environement_path = str(get_reference(root_file_path).resolve('environment.json')) if vfs.exists(environement_path): with open(environement_path, 'r') as f: data = f.read() self.environment = json.loads(data) # Init fake context context = get_fake_context(database, root.context_cls) context.server = self # Initialize access_log = '%s/log/access' % target super(Server, self).__init__(root, access_log=access_log) # Email service self.spool = lfs.resolve2(self.target, 'spool') spool_failed = '%s/failed' % self.spool if not lfs.exists(spool_failed): lfs.make_folder(spool_failed) # Configuration variables get_value = config.get_value self.smtp_host = get_value('smtp-host') self.smtp_login = get_value('smtp-login', default='').strip() self.smtp_password = get_value('smtp-password', default='').strip() # Email is sent asynchronously self.flush_spool() # Logging log_file = '%s/log/events' % target log_level = config.get_value('log-level') if log_level not in log_levels: msg = 'configuraion error, unexpected "%s" value for log-level' raise ValueError, msg % log_level log_level = log_levels[log_level] logger = Logger(log_file, log_level, rotate=timedelta(weeks=3)) register_logger(logger, None) logger = WebLogger(log_file, log_level) register_logger(logger, 'itools.web') # Session timeout self.session_timeout = get_value('session-timeout') # Register routes self.register_dispatch_routes()
def init_backend_static(cls, path): # Static database lfs.make_folder('{0}/database_static'.format(path)) lfs.make_folder('{0}/database_static/.history'.format(path))
def init_backend(cls, path, init=False, soft=False): # Metadata database init_repository('{0}/database'.format(path), bare=False) lfs.make_folder('{0}/database/.git/patchs'.format(path)) cls.init_backend_static(path)
def get_test_filenames(test_path, force_download): """Return the test file names If the test files does'nt exists, we download it """ uris = {'http://download.wikimedia.org/qualitywiki/latest': [('qualitywiki-latest-stub-articles.xml', '.gz'), #~ 3.1 KB ('qualitywiki-latest-stub-meta-current.xml', '.gz'), #~ 11.0 KB ('qualitywiki-latest-stub-meta-history.xml', '.gz')], #~ 28.9 KB 'http://download.wikimedia.org/tawiki/latest': [('tawiki-latest-stub-articles.xml', '.gz'), #~ 1.2 MB ('tawiki-latest-stub-meta-history.xml', '.gz')], #~ 7.3 MB 'http://www.w3.org/XML/Test/': [('xmlts20080205', '.tar.gz')] } compressed_dir_path = join(test_path, 'compressed_files') if force_download is True: if lfs.exists(compressed_dir_path): print 'Remove compressed directory ', compressed_dir_path lfs.remove(compressed_dir_path) for names in uris.itervalues(): for (name, ext) in names: path = join(test_path, name) if lfs.exists(path): print 'Remove %s file' % path lfs.remove(path) # test directory if lfs.exists(test_path) is False: lfs.make_folder(test_path) # compressed directory if lfs.exists(compressed_dir_path) is False: lfs.make_folder(compressed_dir_path) else: lfs.open(compressed_dir_path) test_dir_filenames = lfs.get_names(test_path) for base_uri, names in uris.iteritems(): for (name, ext) in names: if test_dir_filenames.count(name): continue compressed_dest = join(compressed_dir_path, '%s%s' % (name, ext)) # check if tarball already exists if lfs.exists(compressed_dest) is False: src = join(base_uri, '%s%s' % (name, ext)) print 'GET %s file' % src dest = join(test_path, name) if vfs.exists(src) is False: print "%s uri does not exists" % src continue src_file = vfs.open(src) # save Gzip file compressed_dest_file = lfs.make_file(compressed_dest) compressed_dest_file.write(src_file.read()) compressed_dest_file.close() src_file.close() print 'Extract file %s' % compressed_dest # Uncompressed File Path if name == 'xmlts20080205': # uncompress only xmlconf.xml file tar = open_tar(compressed_dest) xmlconf_file = tar.extractfile('xmlconf/xmlconf.xml') ucf_path = join(test_path, name) ucf_file = lfs.make_file(ucf_path) ucf_file.write(xmlconf_file.read()) ucf_file.close() else: # untar Gzip file compressed_dest_file = lfs.open(compressed_dest) gzip_file = GzipFile(compressed_dest) ucf_path = join(test_path, name) ucf_file = lfs.make_file(ucf_path) ucf_file.write(gzip_file.read()) compressed_dest_file.close() gzip_file.close() ucf_file.close() tests = [] # update test dir name test_dir_filenames = lfs.get_names(test_path) for filename in test_dir_filenames: real_path = join(test_path, filename) if lfs.is_file(real_path): bytes = lfs.get_size(real_path) tests.append((real_path, filename, bytes, get_string_size(bytes))) tests.sort(key=lambda x: x[2]) return tests
def init_backend(cls, path, fields, init=False, soft=False): lfs.make_folder('{0}/database'.format(path))