def build_lib(location, archive=None, dependancies=None, ignore_errors=False): """Build a project with a single lib (for testing)""" if archive is None: archive = Archive() if location.startswith('py:'): py = location.split(':', 1)[1] __import__(py) module = sys.modules[py] location = dirname(abspath(module.__file__)) with fsopendir(location) as import_fs: lib = archive.load_library(import_fs) #lib.finalize() #require_libs = lib._cfg.get_list('tests', 'import', '') or [] if dependancies: for require_lib in dependancies: if require_lib.startswith('py:'): py = require_lib.split(':', 1)[1] __import__(py) module = sys.modules[py] location = dirname(abspath(module.__file__)) else: location = require_lib with fsopendir(location) as import_fs: _lib = archive.load_library(import_fs) archive.finalize(ignore_errors=ignore_errors) return archive, lib
def main(): args = parse_args_or_exit() logging.basicConfig(level=logging.ERROR if args.quiet else logging.DEBUG) rpm_lib = rpm_adapter.SimpleRPM() templates = spec_template.templates_from_dir( fsopendir(args.config_dir), rpm_lib) if args.print_only: for template in templates: print template.main_source sys.exit(0) target_dir = fsopendir(args.target_dir) if args.dry_run: executor = executors.PrintExecutor(sys.stdout) else: executor = executors.RealExecutor() for template in templates: source = sources.GitHubSource(template.main_source) commands = source.clone_commands(target_dir) log.info(commands) result = executor.run(commands) if result.return_code != 0: log.warning("FAILED: %s", commands) if result.stdout: log.warning("STDOUT: %s", result.stdout) if result.stderr: log.warning("STDERR: %s", result.stderr)
def setup_bundle(self, name, source_url=None, build_url=None, library=None): """Configure a bundle from existing sources""" from test import bundles from os.path import dirname, join from fs.opener import fsopendir import yaml if not library: library = self.library() if not source_url: source_url = 'mem://source'.format(name) if not build_url: build_url = 'mem://build'.format(name) for fs_url in (source_url, build_url): d = parse_url_to_dict(fs_url) # For persistent fs types, make sure it is empty before the test. if d['scheme'] not in ('temp', 'mem'): assert fsopendir(fs_url).isdirempty('/') test_source_fs = fsopendir(join(dirname(bundles.__file__), 'example.com', name)) config = yaml.load(test_source_fs.getcontents('bundle.yaml')) b = library.new_from_bundle_config(config) b.set_file_system(source_url=source_url, build_url=build_url) self.copy_bundle_files(test_source_fs, b.source_fs) return b
def _refdocs_init(): if REFDOC_ROOT.startswith("rpc://"): try: l_refdoc_fs = RPCFS("http://" + REFDOC_ROOT[len("rpc://") :]) except RemoteConnectionError: lpath = os.path.join(INSTANCE_ROOT, "refdocs") logger.error("Could not connect to configured REFDOCS. " "Using {0}.".format(lpath)) l_refdoc_fs = fsopendir(lpath, create_dir=True) else: l_refdoc_fs = fsopendir(REFDOC_ROOT, create_dir=True) return l_refdoc_fs
def _docstore_init(): if DOCSTORE_ROOT.startswith("rpc://"): try: l_docstore_fs = RPCFS("http://" + DOCSTORE_ROOT[len("rpc://") :]) except RemoteConnectionError: lpath = os.path.join(INSTANCE_ROOT, "docstore") logger.error("Could not connect to configured DOCSTORE. " "Using {0}.".format(lpath)) l_docstore_fs = fsopendir(lpath, create_dir=True) else: l_docstore_fs = fsopendir(DOCSTORE_ROOT, create_dir=True) return l_docstore_fs
def _wallet_init(): if DOCUMENT_WALLET_ROOT.startswith('rpc://'): try: l_wallet_fs = RPCFS('http://' + DOCUMENT_WALLET_ROOT[len('rpc://'):]) except RemoteConnectionError: lpath = os.path.join(INSTANCE_ROOT, 'wallet') logger.error('Could not connect to configured DOCUMENT_WALLET. ' 'Using {0}.'.format(lpath)) l_wallet_fs = fsopendir(lpath, create_dir=True) else: l_wallet_fs = fsopendir(DOCUMENT_WALLET_ROOT, create_dir=True) return l_wallet_fs
def test_type_intuit(self): from ambry_sources.intuit import TypeIntuiter cache_fs = fsopendir(self.setup_temp_dir()) spec = self.sources['simple_fixed'] s = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) f = MPRowsFile(cache_fs, spec.name) with f.writer as w: w.load_rows(s) with f.reader as r: ti = TypeIntuiter().process_header(r.headers).run(r.rows, r.n_rows) with f.writer as w: w.set_types(ti) columns = [] with f.reader as w: for col in w.columns: columns.append((col.pos, col.name, col.type)) expected_columns = [ (1, u'id', u'int'), (2, u'uuid', u'str'), (3, u'int', u'int'), (4, u'float', u'float') ] self.assertEqual(columns, expected_columns)
def get_rc(cls, rewrite=True): """Create a new config file for test and return the RunConfig. This method will start with the user's default Ambry configuration, but will replace the library.filesystem_root with the value of filesystem.test, then depending on the value of the AMBRY_TEST_DB environmental variable, it will set library.database to the DSN of either database.test-sqlite or database.test-postgres """ config = ambry.run.load() # not cached; get_config is orig_root = config.library.filesystem_root root_dir = config.filesystem.test.format(root=orig_root) if config.library.database == cls.library_test_dsn: raise Exception('production database and test database can not be the same.') config.library.filesystem_root = root_dir config.library.database = cls.library_test_dsn config.accounts = None test_root = fsopendir(root_dir, create_dir=True) if rewrite: with test_root.open('config.yaml', 'w', encoding='utf-8', ) as f: config.loaded = None config.dump(f) return ambry.run.load(test_root.getsyspath('/'))
def send_file(self, filename): """Send file to the client.""" try: fs = fsopendir(self.uri) return send_file(fs.open(filename)) except Exception as e: raise StorageError('Could not send file: {}'.format(e))
def test_row_intuit(self): """Check that the sources can be loaded and analyzed without exceptions and that the guesses for headers and start are as expected""" from ambry_sources.intuit import RowIntuiter cache_fs = fsopendir('temp://') # cache_fs = fsopendir('/tmp/ritest/') sources = self.load_sources('sources-non-std-headers.csv') for source_name, spec in sources.items(): s = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) rows = list(s) l = len(rows) # the files are short, so the head and tail overlap ri = RowIntuiter(debug=False).run(rows[:int(l*.75)], rows[int(l*.25):], len(rows)) print source_name, ri.start_line, ri.header_lines self.assertEqual( spec.expect_headers, ','.join(str(e) for e in ri.header_lines), 'Headers of {} source does not match to row intuiter'.format(spec.name)) self.assertEqual( spec.expect_start, ri.start_line, 'Start line of {} source does not match to row intuiter start line.'.format(spec.name))
def upload_files(): if 'file' not in request.files: return jsonify(success=False, message='No file part'), 400 attachment = request.files['file'] if not attachment: return filedir = current_app.config['RECORD_EDITOR_FILE_UPLOAD_FOLDER'] fs = fsopendir(filedir, create_dir=True) filename = secure_filename(attachment.filename) base_filename = filename count = 1 while fs.isfile(filename) and count < MAX_UNIQUE_KEY_COUNT: filename = '%s_%s' % (base_filename, count) count += 1 with fs.open(filename, mode='wb') as remote_file: copy_file(attachment.stream, remote_file) full_url = fs.getpathurl(filename, allow_none=True) if not full_url: full_url = fs.getsyspath(filename) return jsonify({'path': full_url})
def make_ramfs(): def getsyspath(fname): return 'SYSPATH:' + fname fs = fsopendir('ram:///') fs.getsyspath = mock.Mock(side_effect=getsyspath) return fs
def run_upload(self): args = self.args path = os.path.abspath(os.path.join(args.location, 'lib.ini')) try: with io.open(path, 'rt') as f: lib_settings = settings.SettingsContainer.read_from_file(f) except IOError: self.console.error("unable to read library settings from '{}'".format(path)) return -1 lib_name = lib_settings.get("lib", "name") lib_version = args.version or lib_settings.get("lib", "version") if args.docs: return self.upload_docs(lib_name, lib_version) package_name = "{}-{}".format(lib_name, lib_version) package_filename = "{}.zip".format(package_name) upload_info = self.call('package.get-upload-info') upload_url = upload_info['url'] lib_fs = fsopendir(args.location) package_destination_fs = lib_fs.makeopendir('__moyapackage__') if not package_destination_fs.exists(package_filename): raise CommandError("package '{}' does not exist, run 'moya-pm build'".format(package_filename)) self.upload(upload_url, lib_name, lib_version, package_destination_fs, package_filename, overwrite=args.overwrite)
def __init__(self, name, namespace, fs=None, compress=False, compress_min=1024): super(FileCache, self).__init__(name, namespace, compress=compress, compress_min=compress_min, thread_safe=True) if fs is None: fs = fsopendir('filecache', create_dir=True) elif isinstance(fs, string_types): fs = fsopendir(fs, create_dir=True) if namespace: sub_dir = namespace.replace('/', '-').replace(' ', '-') fs = fs.makeopendir(sub_dir) self.fs = fs self.max_key_length = 80
def test_generator(self): from ambry_sources.sources import GeneratorSource, SourceSpec from ambry_sources import head, tail cache_fs = fsopendir(self.setup_temp_dir()) def gen(): yield list('abcde') for i in range(10): yield [i, i + 1, i + 2, i + 3, i + 4] f = HDFPartition(cache_fs, 'foobar') s = GeneratorSource(SourceSpec('foobar'), gen()) ri = RowIntuiter().run(head(s, 100), tail(s, 100)) row_spec = self._row_intuiter_to_dict(ri) ti = TypeIntuiter().process_header(ri.headers).run(GeneratorSource(SourceSpec('foobar'), gen())) with f.writer as w: w.set_row_spec(row_spec, ri.headers) w.set_types(ti) f.load_rows(GeneratorSource(SourceSpec('foobar'), gen())) self.assertEqual(f.headers, list('abcde')) rows = [] for row in f.select(): rows.append(row.dict) self.assertEqual(len(rows), 10) self.assertEqual(rows[0], {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4}) self.assertEqual(rows[-1], {'a': 9, 'b': 10, 'c': 11, 'd': 12, 'e': 13})
def test_creates_virtual_tables_for_partition_with_segment_without_errors(self): fs = fsopendir('temp://') def gen(): # generate header yield ['col1', 'col2'] # generate rows yield [0, 0] yield [1, 1] mprows = MPRowsFile(fs, 'example.com/simple-0.1.3/1.mpr') mprows.load_rows(GeneratorSource(SourceSpec('foobar'), gen())) # create virtual tables. This should not raise an error. # connection = apsw.Connection(':memory:') try: add_partition(connection, mprows, 'vid1') except Exception as exc: raise AssertionError('partition adding unexpectadly failed with {} error.'.format(exc)) # check selected rows # cursor = connection.cursor() result = cursor.execute('SELECT * FROM {}'.format('vid1')).fetchall() self.assertEqual(result, [(0, 0), (1, 1)])
def test_created_source_has_zip_filesystem(self): # FIXME: Optimize to use local file instead of downloading it all the time. cache_fs = fsopendir(self.setup_temp_dir()) sources = self.load_sources(file_name='geo_sources.csv') spec = sources['community_plan'] source = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) self.assertIsInstance(source._fstor._fs, ZipFS)
def test_library(self): db_path = '/tmp/foo/bar/library.db' if os.path.exists(db_path): os.remove(db_path) config_root = fsopendir('temp://') config_root.createfile(CONFIG_FILE) config_file_syspath = config_root.getsyspath(CONFIG_FILE) with open(config_file_syspath, 'w') as f: f.write(""" library: category: development filesystem_root: /tmp/foo/bar remotes: census: s3://test.library.civicknowledge.com/census public: s3://test.library.civicknowledge.com/public restricted: s3://test.library.civicknowledge.com/restricted test: s3://test.library.civicknowledge.com/test""") config = load(config_root.getsyspath('/')) lf = LibraryFilesystem(config) self.assertTrue('/tmp/foo/bar', lf.root) l = Library(config) l.sync_config() self.assertEqual( sorted(['test', 'restricted', 'census', 'public']), sorted([x.short_name for x in l.remotes]))
def test_all(self): """ Test all sources from geo_sources.csv """ cache_fs = fsopendir(self.setup_temp_dir()) sources = self.load_sources(file_name='geo_sources.csv') for name, spec in sources.items(): if name == 'highways': # it is already tested. Skip. continue source = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) # now check its load to MPRows mpr = MPRowsFile(cache_fs, spec.name).load_rows(source) first_row = next(iter(mpr.reader)) # Are columns recognized properly? NAME_INDEX = 1 # which element of the column description contains name. # Collect all names from column descriptors. Skip first elem of the schema because # it's descriptor of column descriptor elements. columns = [x[NAME_INDEX] for x in mpr.meta['schema'][1:]] self.assertIn('id', columns) self.assertIn('geometry', columns) # Is first row valid? self.assertEqual(len(columns), len(first_row))
def run(self): args = self.args fs = fsopendir(args.fs) from ...command.sub import project_serve location = os.path.dirname(project_serve.__file__) init_logging(pathjoin(location, 'logging.ini')) if args.serve_templates: ini = 'templatesettings.ini' else: ini = 'settings.ini' application = WSGIApplication(location, ini, 'main', disable_autoreload=True) application.archive.filesystems['static'] = fs server = make_server(args.host, int(args.port), application, handler_class=RequestHandler) log.info("server started on http://{}:{}".format(args.host, args.port)) def handle_error(request, client_address): _type, value, tb = sys.exc_info() if isinstance(value, KeyboardInterrupt): interrupt_main() server.handle_error = handle_error try: server.serve_forever() finally: application.close()
def templatize(self, path): from fs.opener import fsopendir from fs.path import splitext, split fs = fsopendir(path) text_ext = ['', '.py', '.ini', '.xml', '.html', '.txt', '.json'] bin_ext = ['.png', '.jpg', '.ico', '.gif'] def check_path(path): dirname, filename = split(path) return filename not in [".svn", ".hg"] for path in fs.walkfiles(wildcard=check_path): _, ext = splitext(path) ext = ext.lower() if ext in text_ext: print('@TEXT {}'.format(path)) for line in fs.open(path, 'rt'): print(line.rstrip()) elif ext in bin_ext: print('@BIN {}'.format(path)) with fs.open(path, 'rb') as f: chunk = f.read(64) while chunk: print(''.join('%02x' % ord(b) for b in chunk)) chunk = f.read(64)
def test_ctor(self): d = '/tmp/socrata' from os import makedirs from os.path import exists from shutil import rmtree if exists(d): print "Make", d rmtree(d) makedirs(d) cache_fs = fsopendir(d) # fsopendir(self.setup_temp_dir()) sources = self.load_sources(file_name='sources.csv') spec = sources['facilities'] source = get_source(spec, cache_fs) def cb(*args): print args mpr = MPRowsFile(cache_fs, spec.name).load_rows(source, callback = cb, limit = 10)
def test_writes_rows_is_cache_is_large(self, fake_write_rows): temp_fs = fsopendir("temp://") parent = MagicMock() writer = HDFWriter(parent, temp_fs.getsyspath("temp.h5")) writer.cache = [[] for i in range(10000)] writer.insert_row(["row1"]) fake_write_rows.assert_called_once_with()
def test_file_does_not_exist(self): fs = fsopendir('ram:///') self.assertRaises( exceptions.NoSuchFile, lambda: spec_template.template_from_file( 'xenops-cli.spec.in', fs, rpm_adapter.SimpleRPM()))
def test_non_spec_ins_are_excluded(self): fs = fsopendir('ram:///') fs.createfile('somefile') templates = spec_template.templates_from_dir( fs, rpm_adapter.SimpleRPM()) self.assertEquals([], templates)
def test_executes_select_query_without_any_error(self, fake_shares): fake_shares.return_value = True def gen(): # generate header yield ['col1', 'col2'] # generate first row yield [0, 0] fs = fsopendir('temp://') datafile = MPRowsFile(fs, 'vid1') datafile.load_rows(GeneratorSource(SourceSpec('foobar'), gen())) connection = None try: PostgreSQLTestBase._create_postgres_test_db() connection = psycopg2.connect(**PostgreSQLTestBase.pg_test_db_data) # create foreign table for partition with connection.cursor() as cursor: # we have to close opened transaction. cursor.execute('COMMIT;') add_partition(cursor, datafile, 'vid1') # query just created foreign table. with connection.cursor() as cursor: cursor.execute('SELECT * FROM partitions.vid1;') finally: if connection: connection.close() PostgreSQLTestBase._drop_postgres_test_db()
def test_returns_dict_with_description(self): temp_fs = fsopendir("temp://") reader = MagicMock() hdf_partition = HDFPartition(temp_fs, path="temp.h5") ret = hdf_partition._info(reader) self.assertIn("version", ret) self.assertIn("data_start_pos", ret)
def test_returns_self(self): temp_fs = fsopendir("temp://") parent = MagicMock() filename = temp_fs.getsyspath("temp.h5") _create_h5(filename) reader = HDFReader(parent, filename) self.assertEqual(reader, reader.__enter__())
def test_removes_files(self): temp_fs = fsopendir("temp://") temp_fs.createfile("temp.h5") hdf_partition = HDFPartition(temp_fs, path="temp.h5") self.assertTrue(temp_fs.exists("temp.h5")) hdf_partition.remove() self.assertFalse(temp_fs.exists("temp.h5"))
def test_returns_stat_from_meta(self): temp_fs = fsopendir("temp://") hdf_partition = HDFPartition(temp_fs, path="temp.h5") with patch.object(HDFPartition, "meta", new_callable=PropertyMock) as fake_meta: fake_meta.return_value = {"stats": 22} self.assertEqual(hdf_partition.stats, 22)
def run(self): log.setLevel(logging.ERROR) args = self.args conf_path = self.app.conf dataplicity_path = dirname(conf_path) if args.build: do_build(dataplicity_path) with fsopendir(dataplicity_path) as src_fs: version = firmware.get_version(src_fs) filename = "firmware-{}.zip".format(version) firmware_path = join('__firmware__', filename) try: firmware_contents = src_fs.getcontents(firmware_path, 'rb') except ResourceNotFoundError: print "{} is missing, you can build firmware with 'dataplicity build'".format(firmware_path) return -1 firmware_b64 = b64encode(firmware_contents) client = self.app.make_client(log) conf = client.conf remote = client.remote device_class_name = conf.get('device', 'class') #serial = conf.get('device', 'serial') ui = firmware.get_ui(fsopendir(dataplicity_path)) print "uploading firmware..." with remote.batch() as batch: batch.call_with_id('auth_result', 'device.check_auth', device_class=device_class_name, serial=client.serial, auth_token=client.auth_token) batch.call_with_id("publish_result", "device.publish", device_class=device_class_name, version=version, firmware_b64=firmware_b64, ui=ui, replace=args.replace) batch.get_result('auth_result') try: publish_result = batch.get_result('publish_result') except JSONRPCError as e: if e.code == ErrorCodes.FIRMWARE_EXISTS: print "Firmware {:010} exists!\nBump the version number in firmware.conf or use --replace to overwrite".format(version) return -1 raise print "visit {} to manage firmware".format(publish_result['url']) if args.bump: with fsopendir(dataplicity_path) as src_fs: firmware.bump(src_fs)
def get_url(fs_path): """ Get a URL for the file """ (dirurl, filename) = opener.pathsplit(fs_path) fs = opener.fsopendir(dirurl) return fs.getpathurl(filename)
def get_syspath(fs_path): """ Get a local system path to the file """ (dirurl, filename) = opener.pathsplit(fs_path) fs = opener.fsopendir(dirurl) return fs.getsyspath(filename)
if tempfile.tempdir is None: tempfile.tempdir = tempfile.mkdtemp() #: The path to the temporary directory which all application code can import, #: and create whatever temporary files it needs within it. #: #: This directory will be removed by Tendril at clean application exit or #: by the Operating System as per it's policies. #: #: Every execution of tendril in a separate process owns it's own temporary #: directory. #: #: .. seealso:: :func:`fsutils_cleanup` #: TEMPDIR = tempfile.gettempdir() temp_fs = fsopendir(TEMPDIR) def get_tempname(): """ Gets a random string for use as a temporary filename. :return: A filename that can be used. """ return next(tempfile._get_candidate_names()) def zipdir(path, zfpath): """ Creates a zip file at ``zfpath`` containing all the files in ``path``. This function is simple wrapper around python's :mod:`zipfile` module.
def logic(self, context): start = time() (name, _location, py, priority, template_priority) = self.get_parameters(context, 'name', 'location', 'py', 'priority', 'templatepriority') archive = self.document.archive absolute = False if _location is not None: location = _location else: if py in sys.modules: reload(sys.modules[py]) try: __import__(py) except ImportError as e: raise errors.ElementError( "unable to import Python module '{}'".format(py), element=self, diagnosis=text_type(e)) module = sys.modules[py] location = dirname(abspath(module.__file__)) absolute = True if '::/' in location: import_fs = fsopendir(location) else: if absolute: import_fs = fsopendir(location) else: project_fs = context['fs'] if project_fs.hassyspath('/'): project_path = project_fs.getsyspath('/') import_path = pathjoin(project_path, location) import_fs = fsopendir(import_path) else: import_fs = context['fs'].opendir(location) lib = archive.load_library(import_fs, priority=priority, template_priority=template_priority, long_name=name, rebuild=context.root.get('_rebuild', False)) if lib.failed_documents: if _location is not None: msg = "Failed to load library '{}' from location '{}'" raise errors.StartupFailedError( msg.format(name or lib.long_name, _location)) elif py: msg = "Failed to load library '{}' from Python module '{}'" raise errors.StartupFailedError( msg.format(name or lib.long_name, py)) else: raise errors.StartupFailedError( "Failed to load library '{}'".format(name or lib.long_name)) startup_log.debug("%s imported %.1fms", lib, (time() - start) * 1000.0) if lib.priority: startup_log.debug("%s priority is %s", lib, lib.priority) if lib.template_priority: startup_log.debug("%s template priority is %s", lib, lib.template_priority)
def test_csv_time(self): """ Time writing rows with a PartitionDataFile. """ from ambry.etl.partition import new_partition_data_file fs = fsopendir('mem://test') data = [] ncols = 30 types = (int, float, str) schema = [(i, types[i % 3]) for i in range(ncols)] # # Two different mungers, one uses a loop, one unrolls the loop in a lambda def munger1(schema): """Create a function to call casters on a row. Using an eval is about 11% faster than using a loop """ funcs = [] for name, t in schema: funcs.append('cast_{}(row[{}])'.format(t.__name__, name)) return eval('lambda row: [{}]'.format(','.join(funcs))) row_munger1 = munger1(schema) def row_munger2(row): out = [] for name, t in schema: if t == str: out.append(cast_str(row[name])) elif t == int: out.append(cast_int(row[name])) else: out.append(cast_float(row[name])) return out data.append([str(j) for j in range(ncols)]) def randdata(t): import random if t == str: return '%020x' % random.randrange(16 ** 20) elif t == int: return random.randint(0, 100000) else: return random.random() for i in range(100): data.append([str(randdata(schema[i][1])) for i in range(ncols)]) cdf = new_partition_data_file(fs, 'foo.msg') import time n = 30000 s = time.time() for i in range(n): row = data[i % 100] row = row_munger1(row) cdf.insert(row) print('Munger 1', round(float(n)/(time.time() - s), 3), 'rows/s') s = time.time() for i in range(n): row = data[i % 100] row = row_munger2(row) cdf.insert(row) print('Munger 2', round(float(n) / (time.time() - s), 3), 'rows/s')
def test_basic_config(self): config_root = fsopendir('temp://') config_root.createfile(CONFIG_FILE) config_file_syspath = config_root.getsyspath(CONFIG_FILE) with open(config_file_syspath, 'w') as f: f.write(""" library: category: development remotes: census: s3://test.library.civicknowledge.com/census public: s3://test.library.civicknowledge.com/public restricted: s3://test.library.civicknowledge.com/restricted test: s3://test.library.civicknowledge.com/test """) with self.assertRaises(ConfigurationError): config = load(config_root.getsyspath('/')) if 'AMBRY_DB' in os.environ: del os.environ['AMBRY_DB'] with open(config_file_syspath, 'w') as f: f.write(""" library: category: development filesystem_root: /tmp/foo/bar database: postgres://foo:bar@baz:5432/ambry remotes: census: s3://test.library.civicknowledge.com/census public: s3://test.library.civicknowledge.com/public restricted: s3://test.library.civicknowledge.com/restricted test: s3://test.library.civicknowledge.com/test """) config = load(config_root.getsyspath('/')) config.account = None self.assertEquals('postgres://*****:*****@baz:5432/ambry', config.library.database) self.assertEquals('/tmp/foo/bar', config.library.filesystem_root) self.assertEqual(2, len(config.loaded)) self.assertEqual(config_file_syspath, config.loaded[0]) with open(config_file_syspath, 'w') as f: f.write(""" library: filesystem_root: /foo/root """) os.environ['AMBRY_DB'] = 'sqlite:////library.db' with open(config_file_syspath, 'w') as f: f.write("""""") os.environ['AMBRY_DB'] = 'sqlite:////{root}/library.db' os.environ['AMBRY_ROOT'] = '/tmp/foo/bar' config = load(config_root.getsyspath('/')) lf = LibraryFilesystem(config) self.assertEqual('sqlite://///tmp/foo/bar/library.db', lf.database_dsn) self.assertEqual('/tmp/foo/bar/downloads/a/b', lf.downloads('a', 'b'))
def start_library(self): console = self.console from ...tools import get_moya_dir from os.path import join, abspath project_path = None if self.args.location is not None: library_path = self.args.location else: try: project_path = get_moya_dir(self.args.project_location) except: console.error( "Please run 'moya start library' inside your project directory, or specifiy the -o switch" ) return False library_path = abspath(join(project_path, './local/')) cfg = None if not self.args.location and project_path: from ... import build cfg = build.read_config(project_path, self.get_settings()) if not self.args.acceptdefaults: console.table([ [ Cell("Moya Library Wizard", bold=True, fg="green", center=True) ], [ """This will ask you a few questions, then create a new library in your Moya project based on your answers. Default values are shown in grey (simply hit return to accept defaults). Some defaults may be taken from your ".bashrc" file, if it exists. """ ] ]) author = self.get_author_details() library = {} library["title"] = LibraryTitle.ask(console, default=self.args.title) longname = self.args.longname or make_name(author["organization"], library["title"]) longname = library["longname"] = LibraryLongName.ask(console, default=longname) library["url"] = LibraryURL.ask(console, default="") library["namespace"] = LibraryNamespace.ask(console, default="") mount = None appname = None do_mount = DoMount.ask(console, default="yes") if do_mount: mount = Mount.ask(console, default=self.args.mount or "/{}/".format(make_name(library["title"]))) appname = AppName.ask(console, default=self.args.name or make_name(library["title"])) data = dict(author=author, library=library, timezone=self.get_timezone()) actions = [] from ...command.sub import library_template from fs.memoryfs import MemoryFS from fs.opener import fsopendir memfs = MemoryFS() templatebuilder.compile_fs_template(memfs, library_template.template, data=data) dest_fs = fsopendir(join(library_path, library["longname"]), create_dir=True, writeable=True) continue_overwrite = 'overwrite' if not dest_fs.isdirempty('.'): if self.args.force: continue_overwrite = 'overwrite' elif self.args.new: continue_overwrite = 'new' else: continue_overwrite = DirNotEmpty.ask(console, default="cancel") if continue_overwrite != 'cancel': if continue_overwrite == 'overwrite': from fs.utils import copydir copydir(memfs, dest_fs) actions.append("Written library files to {}".format( dest_fs.getsyspath('.'))) elif continue_overwrite == 'new': files_copied = copy_new(memfs, dest_fs) table = [[ Cell("{} new file(s) written".format(len(files_copied)), fg="green", bold=True, center=True), ]] for path in files_copied: table.append( [Cell(dest_fs.desc(path), bold=True, fg="black")]) console.table(table) return 0 if cfg: project_cfg = cfg['project'] location = project_cfg['location'] server_name = "main" if location: with fsopendir(project_path) as project_fs: with project_fs.opendir(location) as server_fs: from lxml.etree import fromstring, ElementTree, parse from lxml.etree import XML, Comment server_xml_path = server_fs.getsyspath( project_cfg['startup']) root = parse(server_xml_path) import_tag = XML( '<import location="./local/{longname}" />\n\n'. format(**library)) import_tag.tail = "\n" install_tag = None if mount: tag = '<install name="{appname}" lib="{longname}" mount="{mount}" />' else: tag = '<install name="{appname}" lib="{longname}" />' install_tag = XML( tag.format(appname=appname, longname=longname, mount=mount)) install_tag.tail = "\n\n" def has_child(node, tag, **attribs): for el in node.findall(tag): #items = dict(el.items()) if all( el.get(k, None) == v for k, v in attribs.items()): return True return False for server in root.findall( "{{http://moyaproject.com}}server[@docname='{}']" .format(server_name)): add_import_tag = not has_child( server, "{http://moyaproject.com}import", location="./local/{}".format(longname)) add_install_tag = not has_child( server, "{http://moyaproject.com}install", lib=longname) and install_tag is not None if add_import_tag or add_install_tag: comment = Comment( "Added by 'moya start library'") comment.tail = "\n" server.append(comment) if add_import_tag: server.append(import_tag) actions.append("Added <import> tag") if add_install_tag: server.append(install_tag) actions.append("Added <install> tag") if mount: actions.append( "Mounted application on {}".format( mount)) root.write(server_xml_path) table = [[ Cell("Library files written successfully!", fg="green", bold=True, center=True) ]] actions_text = "\n".join(" * " + action for action in actions) table.append([Cell(actions_text, fg="blue", bold=True)]) table.append([ """A new library has been added to the project, containing some simple example functionality.\nSee http://moyaproject.com/docs/creatinglibraries/ for more information.""" ]) console.table(table) return 0 console.text("No project files written.", fg="red", bold=True).nl() return -1
def start_project(self): console = self.console if not self.args.acceptdefaults: console.table([ [ Cell("Moya Project Wizard", bold=True, fg="green", center=True) ], [ """This will ask you a few questions, then create a new Moya project based on your answers. Default values are shown in blue (hit return to accept defaults). Some defaults may be taken from your ".moyarc" file, if it exists.""" ] ]) author = self.get_author_details() project = {} project["title"] = ProjectTitle.ask(console, default=self.args.title) longname = make_name(author["organization"], project["title"]) project["database"] = Database.ask(console, default='y') if project["database"]: project["auth"] = Auth.ask(console, default='y') project['signup'] = Signup.ask(console, default='y') project["pages"] = Pages.ask(console, default='y') project["feedback"] = Feedback.ask(console, default='y') project["blog"] = Blog.ask(console, default='y') project["comments"] = project.get("feedback", False) or project.get( "pages", False) project["wysihtml5"] = project.get("feedback", False) or project.get( "pages", False) project['jsonrpc'] = JSONRPC.ask(console, default='y') dirname = longname.split('.', 1)[-1].replace('.', '_') dirname = ProjectDirName.ask(console, default="./" + dirname) data = dict(author=author, project=project, timezone=self.get_timezone(), secret=make_secret()) from ...command.sub import project_template from fs.memoryfs import MemoryFS from fs.opener import fsopendir memfs = MemoryFS() templatebuilder.compile_fs_template(memfs, project_template.template, data=data) dest_fs = fsopendir(self.args.location or dirname, create_dir=True, writeable=True) continue_overwrite = 'overwrite' if not dest_fs.isdirempty('.'): if self.args.force: continue_overwrite = 'overwrite' elif self.args.new: continue_overwrite = 'new' else: continue_overwrite = DirNotEmpty.ask(console, default="cancel") if continue_overwrite == 'overwrite': from fs.utils import copydir copydir(memfs, dest_fs) console.table([ [ Cell("Project files written successfully!", fg="green", bold=True, center=True) ], [ """See readme.txt in the project directory for the next steps.\n\nBrowse to http://moyaproject.com/gettingstarted/ if you need further help.""" ] ]) return 0 elif continue_overwrite == 'new': files_copied = copy_new(memfs, dest_fs) table = [[ Cell("{} new file(s) written".format(len(files_copied)), fg="green", bold=True, center=True), ]] for path in files_copied: table.append([Cell(dest_fs.desc(path), bold=True, fg="black")]) console.table(table) return 0 console.text("No project files written.", fg="red", bold=True).nl() return -1
def copy_file(fn, fp, tmppath): path = join(tmppath, fp) fn_new = fn + '_copy' this_fs = fsopendir(path) this_fs.copy(fn, fn_new) return fn_new
def test_datafile_read_write(self): from ambry_sources.mpf import MPRowsFile from fs.opener import fsopendir import datetime from random import randint, random from uuid import uuid4 fs = fsopendir('temp://') # fs = fsopendir('/tmp/hdf5/') # fs = fsopendir('/tmp/pmpf') N = 50000 # Basic read/ write tests. row = lambda: [ None, 1, random(), str(uuid4()), datetime.date(randint(2000, 2015), randint(1, 12), 10), datetime.date(randint(2000, 2015), randint(1, 12), 10) ] headers = list('abcdefghi')[:len(row())] rows = [row() for i in range(N)] with Timer() as t: df = MPRowsFile(fs, 'foobar') w = df.writer w.headers = headers w.meta['source']['url'] = 'blah blah' for i in range(N): w.insert_row(rows[i]) w.close() print('MSGPack write ', float(N) / t.elapsed, w.n_rows) with Timer() as t: count = 0 i = 0 s = 0 r = df.reader for i, row in enumerate(r): count += 1 r.close() print('MSGPack read ', float(N) / t.elapsed, i, count, s) with Timer() as t: count = 0 r = df.reader for row in r.rows: count += 1 r.close() print('MSGPack rows ', float(N) / t.elapsed) with Timer() as t: count = 0 r = df.reader for row in r.raw: count += 1 r.close() print('MSGPack raw ', float(N) / t.elapsed)
def is_local(fs_path): """ Determine if file is a local file """ (dirurl, filename) = opener.pathsplit(fs_path) fs = opener.fsopendir(dirurl) return fs.hassyspath(filename)
def run(self): parser = self.get_argparse() args = parser.parse_args(sys.argv[1:]) if args.version is None: major, minor = __version__.split('.')[:2] version = "{}.{}".format(major, minor) else: version = args.version try: with open(expanduser(args.settings), 'rt') as f_ini: cfg = SettingsContainer.read_from_file(f_ini) print("Read settings from {}".format(args.settings)) except IOError: cfg = SettingsContainer() from ..docgen.extracter import Extracter from ..docgen.builder import Builder from ..command import doc_project location = dirname(doc_project.__file__) try: base_docs_fs = OSFS('text') except FSError: sys.stderr.write('run me from moya/docs directory\n') return -1 extract_fs = OSFS(join('doccode', version), create=True) languages = [ d for d in base_docs_fs.listdir(dirs_only=True) if len(d) == 2 ] def do_extract(): print("Extracting docs v{}".format(version)) utils.remove_all(extract_fs, '/') try: archive, context, doc = moya_build.build_server( location, 'settings.ini') except Exception: raise return -1 extract_fs.makedir("site/docs", recursive=True) extract_fs.makedir("site/tags", recursive=True) #extract_fs.makedir("libs") with extract_fs.opendir('site/tags') as tags_fs: extracter = Extracter(archive, tags_fs) const_data = {} builtin_tags = [] for namespace in self.builtin_namespaces: xmlns = getattr(namespaces, namespace, None) if xmlns is None: raise ValueError( "XML namespace '{}' is not in namespaces.py". format(namespace)) namespace_tags = archive.registry.get_elements_in_xmlns( xmlns).values() builtin_tags.extend(namespace_tags) extracter.extract_tags(builtin_tags, const_data=const_data) for language in languages: with extract_fs.makeopendir("site/docs") as language_fs: doc_extracter = Extracter(None, language_fs) docs_fs = base_docs_fs.opendir(language) doc_extracter.extract_site_docs(docs_fs, dirname=language) if args.extract: do_extract() if args.build: theme_path = cfg.get('paths', 'theme', None) dst_path = join('html', version) if theme_path is None: theme_fs = OSFS('theme') else: theme_fs = fsopendir(theme_path) output_path = cfg.get('paths', 'output', None) if output_path is None: output_base_fs = OSFS(dst_path, create=True) else: output_root_base_fs = fsopendir(output_path) output_base_fs = output_root_base_fs.makeopendir( dst_path, recursive=True) #output_base_fs = OSFS(join('html', version), create=True) utils.remove_all(output_base_fs, '/') def do_build(): print("Building docs v{}".format(version)) lib_info = {} lib_paths = {} for long_name, lib in self.document_libs: lib_info[long_name] = moya_build.get_lib_info(lib) lib_paths[long_name] = output_base_fs.getsyspath( join('libs', long_name, 'index.html')) for language in languages: docs_fs = base_docs_fs.makeopendir(language) output_fs = output_base_fs.makeopendir(language) utils.remove_all(output_fs, '/') with extract_fs.opendir("site") as extract_site_fs: builder = Builder(extract_site_fs, output_fs, theme_fs) from ..tools import timer with timer('render time'): builder.build({ "libs": lib_info, "lib_paths": lib_paths }) # output_base_fs.makedir("libs", allow_recreate=True) # for long_name, lib in self.document_libs: # source_path = extract_fs.getsyspath(join("libs", long_name)) # output_path = output_base_fs.getsyspath('libs') # cmd_template = 'moya --debug doc build {} --theme libtheme --source "{}" --output "{}"' # cmd = cmd_template.format(lib, source_path, output_path) # os.system(cmd) def extract_build(): do_extract() do_build() do_build() if not args.nobrowser: import webbrowser index_url = "file://" + output_base_fs.getsyspath( 'en/index.html') print(index_url) webbrowser.open(index_url) if args.watch: print("Watching for changes...") observer = Observer() path = base_docs_fs.getsyspath('/') reload_watcher = ReloadChangeWatcher(base_docs_fs, extract_build) observer.schedule(reload_watcher, path, recursive=True) observer.start() while 1: try: time.sleep(0.1) except: break return 0
def delete(fs_path): """ Delete the file on storage """ (dirurl, filename) = opener.pathsplit(fs_path) fs = opener.fsopendir(dirurl) fs.remove(filename)
def test_datafile_read_write(self): import datetime from random import randint, random from contexttimer import Timer from uuid import uuid4 fs = fsopendir('temp://') N = 50000 # Basic read/write tests. def rand_date_a(): return datetime.date(randint(2000, 2015), randint(1, 12), 10) epoch = datetime.date(1970, 1, 1) def rand_date_b(): return (datetime.date(randint(2000, 2015), randint(1, 12), 10) - epoch).total_seconds() row = lambda: (0, 1, random(), str(uuid4()), rand_date_b(), rand_date_b()) headers = list('abcdefghi')[:len(row())] rows = [row() for i in range(N)] def write_large_blocks(): df = HDFPartition(fs, path='foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: w.headers = headers type_index = w.meta['schema'][0].index('type') pos_index = w.meta['schema'][0].index('pos') columns = w.meta['schema'][1:] for column in columns: column[type_index] = type(rows[0][column[pos_index] - 1]).__name__ w.insert_rows(rows) print('HDF write large', float(N) / t.elapsed, w.n_rows) def write_small_blocks(): df = HDFPartition(fs, path='foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: w.headers = headers type_index = w.meta['schema'][0].index('type') pos_index = w.meta['schema'][0].index('pos') columns = w.meta['schema'][1:] for column in columns: column[type_index] = type(rows[0][column[pos_index] - 1]).__name__ for i in range(N): w.insert_row(rows[i]) print('HDF write small', float(N) / t.elapsed, w.n_rows) write_large_blocks() write_small_blocks() # timing reader. df = HDFPartition(fs, 'foobar') with Timer() as t: count = 0 i = 0 s = 0 r = df.reader for i, row in enumerate(r): count += 1 r.close() print('HDFPartition iter ', float(N) / t.elapsed, i, count, s) with Timer() as t: count = 0 r = df.reader for row in r.rows: count += 1 r.close() print('HDFPartition rows ', float(N) / t.elapsed) with Timer() as t: count = 0 r = df.reader for row in r.raw: count += 1 r.close() print('HDFPartition raw ', float(N) / t.elapsed)
def test_fixed(self): cache_fs = fsopendir(self.setup_temp_dir()) spec = self.sources['simple_fixed'] s = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) f = MPRowsFile(cache_fs, spec.name).load_rows(s) self.assertEqual(f.headers, ['id', 'uuid', 'int', 'float'])
def build_server(fs, settings_path, server_element="main", no_console=False, rebuild=False, validate_db=False, breakpoint=False, master_settings=None): """Build a server""" start = time() archive = Archive() console = archive.console try: archive, context, doc = build(fs, settings_path, rebuild=rebuild, master_settings=master_settings) console = archive.console except errors.ParseError as e: if not no_console: line, col = e.position console.document_error(text_type(e), e.path, e.code, line, col) return None except errors.ElementError as element_error: if not no_console: line = element_error.source_line col = 0 console.document_error(text_type(element_error), element_error.element._location, element_error.element._code, line, col) raise errors.StartupFailedError('Failed to build project') if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) archive.project_fs = fs try: app, server = doc.get_element(server_element) except errors.ElementNotFoundError: raise errors.StartupFailedError( "no <server> element called '{}' found in the project (check setting [project]/startup)" .format(server_element)) error_msg = None docs_location = archive.cfg.get('project', 'location') try: server.startup(archive, context, fs.opendir(docs_location), breakpoint=breakpoint) except errors.StartupFailedError as error: error_msg = text_type(error) #raise except errors.ElementError as e: raise except Exception as e: failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError( "{} document(s) failed to build".format(failed)) if hasattr(e, '__moyaconsole__'): e.__moyaconsole__(console) error_msg = text_type(e) raise errors.StartupFailedError(error_msg or 'Failed to build project') failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError(error_msg or 'Failed to build project') # archive.finalize() archive.init_media() archive.init_data() if validate_db: from . import db if db.validate_all(archive, console) == 0: startup_log.debug('models validated successfully') else: msg = "Models failed to validate, see 'moya db validate' for more information" raise errors.StartupFailedError(msg) startup_log.info("%s built %.1fms", server, (time() - start) * 1000.0) return ServerBuildResult(archive=archive, context=context, server=server)
def test_datafile_read_write(self): from fs.opener import fsopendir import datetime from random import randint, random from contexttimer import Timer from uuid import uuid4 fs = fsopendir('mem://') # fs = fsopendir('/tmp/pmpf') N = 50000 # Basic read/ write tests. def rand_date(): return datetime.date(randint(2000, 2015), randint(1, 12), 10) def rand_datetime(): return datetime.datetime(randint(2000, 2015), randint(1, 12), 10) def rand_time(): return datetime.time(randint(0, 23), randint(0, 59), 10) row = lambda: (None, 1, random(), str(uuid4()), rand_date(), rand_datetime(), rand_time()) headers = list('abcdefghi')[:len(row())] rows = [row() for i in range(N)] def write_large_blocks(): df = MPRowsFile(fs, 'foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: w.headers = headers w.insert_rows(rows) print('MSGPack write L', float(N) / t.elapsed, w.n_rows) def write_small_blocks(): df = MPRowsFile(fs, 'foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: for i in range(N): w.headers = headers w.insert_row(rows[i]) print('MSGPack write S', float(N) / t.elapsed, w.n_rows) print() # Write the whole file with insert_rows() which writes all of the rows at once. write_large_blocks() # Write the file in blocks, with insert_rows collecting rows into a cache, then writting the # cached blocks. write_small_blocks() df = MPRowsFile(fs, 'foobar') with Timer() as t: count = 0 i = 0 s = 0 r = df.reader for i, row in enumerate(r): count += 1 r.close() print('MSGPack read ', float(N) / t.elapsed, i, count, s) with Timer() as t: count = 0 r = df.reader for row in r.rows: count += 1 r.close() print('MSGPack rows ', float(N) / t.elapsed) with Timer() as t: count = 0 r = df.reader for row in r.raw: count += 1 r.close() print('MSGPack raw ', float(N) / t.elapsed)
def test_load_check_headers(self): """Just check that all of the sources can be loaded without exceptions""" cache_fs = fsopendir('temp://') headers = { 'mz_with_zip_xl': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ], 'mz_no_zip': [u('id'), u('uuid'), u('int'), u('float')], 'namesu8': [ u('origin_english'), u('name_english'), u('origin_native'), u('name_native') ], 'sf_zip': [u('id'), u('uuid'), u('int'), u('float')], 'simple': [u('id'), u('uuid'), u('int'), u('float')], 'csv_no_csv': [u('id'), u('uuid'), u('int'), u('float')], 'mz_with_zip': [u('id'), u('uuid'), u('int'), u('float')], 'rpeople': [u('name'), u('size')], 'rent07': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ], 'simple_fixed': [u('id'), u('uuid'), u('int'), u('float')], 'altname': [u('id'), u('foo'), u('bar'), u('baz')], 'rentcsv': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ], 'renttab': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ], 'multiexcel': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ], 'rent97': [ u('id'), u('gvid'), u('renter_cost_gt_30'), u('renter_cost_gt_30_cv'), u('owner_cost_gt_30_pct'), u('owner_cost_gt_30_pct_cv') ] } for source_name, spec in self.sources.items(): print(source_name) s = get_source(spec, cache_fs, callback=lambda x, y: (x, y)) f = MPRowsFile(cache_fs, spec.name) if f.exists: f.remove() f.load_rows(s) with f.reader as r: if spec.name in headers: self.assertEqual(headers[spec.name], r.headers)
def get_fs(self, path): if path is None: path = join(get_moya_dir(), './documentation') fs = fsopendir(path, create_dir=True) fs.dir_mode = int('777', 8) return fs
if REFDOC_ROOT.startswith('rpc://'): try: l_refdoc_fs = RPCFS('http://' + REFDOC_ROOT[len('rpc://'):]) except RemoteConnectionError: lpath = os.path.join(INSTANCE_ROOT, 'refdocs') logger.error('Could not connect to configured REFDOCS. ' 'Using {0}.'.format(lpath)) l_refdoc_fs = fsopendir(lpath, create_dir=True) else: l_refdoc_fs = fsopendir(REFDOC_ROOT, create_dir=True) return l_refdoc_fs refdoc_fs = _refdocs_init() workspace_fs = fsopendir(os.path.join(INSTANCE_ROOT, 'scratch'), create_dir=True) local_fs = fsopendir('/') class ExposedDocument(object): def __init__(self, desc, fspath, fs, ts=None, efield=None): self.desc = desc self.path = fspath self.fs = fs self.ts = ts self.efield = efield self._get_fs_prefix() def _get_fs_prefix(self): if self.fs == refdoc_fs: self._prefix = os.path.join('/expose', REFDOC_PREFIX)
def run(self): args = self.args application = WSGIApplication(self.location, self.get_settings(), args.server) archive = application.archive filesystems = archive.filesystems fs = None if args.fs: try: fs = filesystems[args.fs] except KeyError: self.console.error("No filesystem called '%s'" % args.fs) return -1 if args.tree is not None: if fs is None: self.console.error("Filesystem required") return -1 with fs.opendir(args.tree) as tree_fs: tree_fs.tree() return if args.listdir: if fs is None: self.console.error("Filesystem required") return -1 for path in fs.opendir(args.listdir).listdir(): if fs.isdir(path): self.console(path, fg="cyan", bold=True).nl() else: self.console(path).nl() elif args.cat: if fs is None: self.console.error("Filesystem required") return -1 contents = fs.getcontents(args.cat) self.console.cat(contents, args.cat) elif args.open: if fs is None: self.console.error("Filesystem required") return -1 filepath = fs.getsyspath(args.open, allow_none=True) if filepath is None: self.console.error( "No system path for '%s' in filesystem '%s'" % (args.open, args.fs)) return -1 import subprocess if os.name == 'mac': subprocess.call(('open', filepath)) elif os.name == 'nt': subprocess.call(('start', filepath), shell=True) elif os.name == 'posix': subprocess.call(('xdg-open', filepath)) else: self.console.error( "Don't know how to open files on this platform (%s)" % os.name) elif args.syspath: if fs is None: self.console.error("Filesystem required (use -cat FILESYSTEM)") return -1 if not fs.exists(args.syspath): self.console.error( "No file called '%s' found in filesystem '%s'" % (args.syspath, args.fs)) return -1 syspath = fs.getsyspath(args.syspath, allow_none=True) if syspath is None: self.console.error( "No system path for '%s' in filesystem '%s'" % (args.syspath, args.fs)) else: self.console(syspath).nl() elif args.copy: if len(args.copy) == 1: src = '/' dst = args.copy[0] elif len(args.copy) == 2: src, dst = args.copy else: self.console.error("--copy requires 1 or 2 arguments") return -1 if fs.isdir(src): src_fs = fs.opendir(src) dst_fs = fsopendir(dst, create_dir=True) if not args.force and not dst_fs.isdirempty('/'): response = raw_input( "'%s' is not empty. Copying may overwrite directory contents. Continue? " % dst) if response.lower() not in ('y', 'yes'): return 0 from fs.utils import copydir copydir(src_fs, dst_fs) else: with fs.open(src, 'rb') as read_f: with open(dst, 'wb') as write_f: while 1: chunk = read_f.read(16384) if not chunk: break write_f.write(chunk) else: table = [[ Cell("Name", bold=True), Cell("Type", bold=True), Cell("Location", bold=True) ]] if fs is None: list_filesystems = filesystems.items() else: list_filesystems = [(args.fs, fs)] for name, fs in sorted(list_filesystems): if isinstance(fs, MultiFS): location = '\n'.join( mount_fs.desc('/') for mount_fs in fs.fs_sequence) fg = "yellow" elif isinstance(fs, MountFS): mount_desc = [] for path, dirmount in fs.mount_tree.items(): mount_desc.append('%s->%s' % (path, dirmount.fs.desc('/'))) location = '\n'.join(mount_desc) fg = "magenta" else: syspath = fs.getsyspath('/', allow_none=True) if syspath is not None: location = syspath fg = "green" else: try: location = fs.desc('/') except FSError as e: location = text_type(e) fg = "red" else: fg = "blue" table.append([ Cell(name), Cell(type(fs).__name__), Cell(location, bold=True, fg=fg) ]) self.console.table(table, header=True)
def load(self, fs, settings_path=None): self.loaded = True self.load_fs = fs self.loaded_ini = fs.desc('lib.ini') try: self._cfg = cfg = SettingsContainer.read(fs, 'lib.ini') except FSError as e: raise errors.LibraryLoadError( 'failed to load lib.ini from "{path}" ({exc})', path=fs.desc("lib.ini"), exc=e, lib=self) def cfgget(section, key, bool=False, default=Ellipsis): try: if bool: value = cfg[section][key].strip().lower() in ('yes', 'true') else: value = cfg[section][key] except KeyError: if default is Ellipsis: raise errors.LibraryLoadError( "required key [{}]/{} not found in lib.ini".format( section, key), lib=self) return default else: return value self.long_name = cfgget('lib', 'name') if self.long_name in self.archive.libs: raise errors.LibraryLoadError( "already loaded this library from '{}'".format( self.loaded_ini), lib=self, diagnosis= "Check for a previous <import> that loads this library") py_requires = cfgget('lib', 'pyrequires', default=None) if py_requires: try: version_ok = pyversion.check(py_requires) except ValueError as e: raise errors.LibraryLoadError( "bad Py version specification in [lib]/pyrequires ({})". format(text_type(e)), lib=self) if not version_ok: versions = ", ".join( "Python {}.{}".format(*v) for v in pyversion.list_compatible(py_requires)) raise errors.LibraryLoadError( "one of the following Python versions required: {versions}", lib=self.long_name, versions=versions) self.title = cfgget('lib', 'title', default=None) self.url = cfgget('lib', 'url', default=None) try: self.version = Version(cfgget('lib', 'version')) except ValueError as e: raise errors.LibraryLoadError(text_type(e), lib=self.long_name) self.namespace = cfgget('lib', 'namespace') self.docs_location = cfgget('lib', 'location', default=None) self.tests_location = cfgget('tests', 'location', default=None) self.system_settings = { 'templates_directory': self.long_name or '', 'data_directory': self.long_name or '' } project_cfg = self.archive.cfg settings = SettingsSectionContainer() def update_settings(section): settings.update( (k, SettingContainer(v)) for k, v in iteritems(cfg[section])) if 'author' in cfg: self.author.update(cfg['author']) if 'lib' in cfg: self.libinfo.update(cfg['lib']) if 'settings' in cfg: update_settings('settings') if 'templates' in cfg: self.templates_info = cfg['templates'] if 'data' in cfg: self.data_info = cfg['data'] location = cfgget('data', 'location') try: self.data_fs = fs.opendir(location) except FSError as e: raise errors.LibraryLoadError( "Unable to read data from {path} ({exc})", path=location, exc=e, lib=self) if 'documentation' in cfg: self.documentation_location = cfg['documentation'].get( 'location', './docs') if 'translations' in cfg: i18n = cfg['translations'] self.translations_location = i18n.get('location', './translations') self.default_language = i18n.get('default_language', 'en') self.languages = split_commas(i18n.get('languages', 'en')) self._localedir = self.load_fs.getsyspath( self.translations_location) if self.languages: startup_log.debug('%s reading translations %s', self, textual_list(self.languages, 'and')) self.translations.read('messages', self._localedir, self.languages) if project_cfg and ('lib:' + self.long_name) in project_cfg: update_settings("lib:" + self.long_name) self.settings = settings for section_name, section in iteritems(cfg): if ':' in section_name: what, name = section_name.split(':', 1) else: continue if what.startswith('py'): if self.no_py: continue try: version_ok = pyversion.check(what) except ValueError as e: raise errors.LibraryLoadError( "Bad Py version specification ({})".format( text_type(e)), lib=self) if version_ok: location = cfgget(section_name, 'location') py_fs = fs.opendir(location) try: fs_import(self, py_fs, name or self.long_name) except errors.StartupFailedError as e: raise errors.LibraryLoadError(text_type(e), exc=e, lib=self) except Exception as e: raise #console.exception(e, tb=True).div() raise errors.LibraryLoadError( "Error in Python extension", py_exception=e, lib=self) elif what == 'media': location = cfgget(section_name, 'location') try: media_fs = fs.opendir(location) except FSError as e: raise errors.LibraryLoadError( "Unable to read media from {path} ({exc})", path=location, exc=e, lib=self) if media_fs.hassyspath('/'): self.media[name] = fsopendir(media_fs.getsyspath('/')) else: self.media[name] = media_fs if self.docs_location: with self.load_fs.opendir(self.docs_location) as docs_fs: self.import_documents(docs_fs, recurse=True)