def test_definition_for_zoom(self): fs = MemoryFS() fs.writetext('1.sql', 'select 1') fs.writetext('2.sql', 'select 2') layer = Layer("water", {"fields": {}, "description": "Waterbody and ocean areas", "sql": [{"minzoom": 0, "maxzoom": 4, "file": "1.sql"}, {"minzoom": 5, "maxzoom": 8, "file": "2.sql"}] }, fs) for i in range(0, 5): self.assertEqual(layer.definition_for_zoom(i), Definition("water", "select 1", 0, 4, None)) for i in range(5, 9): self.assertEqual(layer.definition_for_zoom(i), Definition("water", "select 2", 5, 8, None)) self.assertIsNone(layer.definition_for_zoom(9), None)
def test_export_import_round_trip(self, def_to_xml): # The HtmlDescriptor definition_to_xml tries to write to the filesystem # before returning an xml object. Patch this to just return the xml. def_to_xml.return_value = lxml.etree.Element('html') # Mock out the process_xml # Expect it to return a child descriptor for the SplitTestDescriptor when called. self.module_system.process_xml = Mock() # Write out the xml. xml_obj = self.split_test_module.definition_to_xml(MemoryFS()) self.assertEquals(xml_obj.get('user_partition_id'), '0') self.assertIsNotNone(xml_obj.get('group_id_to_child')) # Read the xml back in. fields, children = SplitTestDescriptor.definition_from_xml(xml_obj, self.module_system) self.assertEquals(fields.get('user_partition_id'), '0') self.assertIsNotNone(fields.get('group_id_to_child')) self.assertEquals(len(children), 2)
def setUp(self): super(BaseVerticalBlockTest, self).setUp() # construct module course = xml.CourseFactory.build() sequence = xml.SequenceFactory.build(parent=course) vertical = xml.VerticalFactory.build(parent=sequence) self.course = self.process_xml(course) xml.HtmlFactory(parent=vertical, url_name='test-html-1', text=self.test_html_1) xml.HtmlFactory(parent=vertical, url_name='test-html-2', text=self.test_html_2) self.course = self.process_xml(course) course_seq = self.course.get_children()[0] self.module_system = get_test_system() self.module_system.descriptor_runtime = self.course._runtime # pylint: disable=protected-access self.course.runtime.export_fs = MemoryFS() self.vertical = course_seq.get_children()[0] self.vertical.xmodule_runtime = self.module_system
def test_apply(self): mfs = MemoryFS() subs = ['sub.{}'.format(i) for i in range(3)] for d in subs: mfs.makedir(d) d = Directory('.', mfs) mfs.touch('test1.txt') mfs.touch('test2.txt') o0 = ini.OpAddToBroadcastFile('test1.txt') o1 = ini.OpAddToBroadcastFile('test2.txt') obc = ini.OpBroadcastFile(['sub*']) r = RoutineOnDirectory(d, [o0, o1, obc]) r.work() target_files = [] for d in subs: for f in ['test1.txt', 'test2.txt']: target_files.append(d + '/' + f) for i, t in enumerate(target_files): with self.subTest(i): self.assertTrue(mfs.exists(t))
class DummySystem(ImportSystem): @patch('xmodule.modulestore.xml.OSFS', lambda directory: MemoryFS()) def __init__(self, load_error_modules): xmlstore = XMLModuleStore("data_dir", course_dirs=[], load_error_modules=load_error_modules) super(DummySystem, self).__init__( xmlstore=xmlstore, course_id='/'.join([ORG, COURSE, 'test_run']), course_dir='test_dir', error_tracker=Mock(), parent_tracker=Mock(), load_error_modules=load_error_modules, policy={}, ) def render_template(self, template, context): raise Exception("Shouldn't be called")
def _my_fs(module): """Create a mock filesystem to be used in examples.""" my_fs = MemoryFS() if module == "fs.base": my_fs.makedir("Desktop") my_fs.makedir("Videos") my_fs.touch("Videos/starwars.mov") my_fs.touch("file.txt") elif module == "fs.info": my_fs.touch("foo.tar.gz") my_fs.settext("foo.py", "print('Hello, world!')") my_fs.makedir("bar") elif module in {"fs.walk", "fs.glob"}: my_fs.makedir("dir1") my_fs.makedir("dir2") my_fs.settext("foo.py", "print('Hello, world!')") my_fs.touch("foo.pyc") my_fs.settext("bar.py", "print('ok')\n\n# this is a comment\n") my_fs.touch("bar.pyc") return my_fs
def _create( self, fs: Optional[FS], name: Optional[str], id_: Optional[str], metadata: Optional[dict], combinatorial: bool = False, sequential: bool = False, seed: Optional[int] = None, sum_intra_duplicates: bool = True, sum_inter_duplicates: bool = False, ) -> None: """Start a new data package. All metadata elements should follow the `datapackage specification <https://frictionlessdata.io/specs/data-package/>`__. Licenses are specified as a list in ``metadata``. The default license is the `Open Data Commons Public Domain Dedication and License v1.0 <http://opendatacommons.org/licenses/pddl/>`__. """ name = clean_datapackage_name(name or uuid.uuid4().hex) check_name(name) self.fs = fs or MemoryFS() self.metadata = { "profile": "data-package", "name": name, "id": id_ or uuid.uuid4().hex, "licenses": (metadata or {}).get("licenses", DEFAULT_LICENSES), "resources": [], "created": datetime.datetime.utcnow().isoformat("T") + "Z", "combinatorial": combinatorial, "sequential": sequential, "seed": seed, "sum_intra_duplicates": sum_intra_duplicates, "sum_inter_duplicates": sum_inter_duplicates, } for k, v in (metadata or {}).items(): if k not in self.metadata: self.metadata[k] = v self.data = []
class DummySystem(ImportSystem): # lint-amnesty, pylint: disable=abstract-method, missing-class-docstring @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) def __init__(self, load_error_modules, course_id=None): xmlstore = XMLModuleStore("data_dir", source_dirs=[], load_error_modules=load_error_modules) if course_id is None: course_id = CourseKey.from_string('/'.join( [ORG, COURSE, 'test_run'])) course_dir = "test_dir" error_tracker = Mock() super(DummySystem, self).__init__( # lint-amnesty, pylint: disable=super-with-arguments xmlstore=xmlstore, course_id=course_id, course_dir=course_dir, error_tracker=error_tracker, load_error_modules=load_error_modules, field_data=KvsFieldData(DictKeyValueStore()), )
class DummySystem(ImportSystem): @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) def __init__(self, load_error_modules): xmlstore = XMLModuleStore("data_dir", course_dirs=[], load_error_modules=load_error_modules) course_id = "/".join([ORG, COURSE, 'test_run']) course_dir = "test_dir" policy = {} error_tracker = Mock() parent_tracker = Mock() super(DummySystem, self).__init__( xmlstore, course_id, course_dir, policy, error_tracker, parent_tracker, load_error_modules=load_error_modules, )
class DummySystem(ImportSystem): @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) def __init__(self, load_error_modules): xmlstore = XMLModuleStore("data_dir", course_dirs=[], load_error_modules=load_error_modules) course_id = SlashSeparatedCourseKey(ORG, COURSE, 'test_run') course_dir = "test_dir" error_tracker = Mock() parent_tracker = Mock() super(DummySystem, self).__init__( xmlstore=xmlstore, course_id=course_id, course_dir=course_dir, error_tracker=error_tracker, parent_tracker=parent_tracker, load_error_modules=load_error_modules, field_data=KvsFieldData(DictKeyValueStore()), )
def do_cairo(): WIDTH, HEIGHT = 32, 32 surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, WIDTH, HEIGHT) ctx = cairo.Context(surface) ctx.scale(WIDTH, HEIGHT) # Normalizing the canvas pat = cairo.LinearGradient(0.0, 0.0, 0.0, 1.0) pat.add_color_stop_rgba(1, 0.7, 0, 0, 0.5) # First stop, 50% opacity pat.add_color_stop_rgba(0, 0.9, 0.7, 0.2, 1) # Last stop, 100% opacity ctx.rectangle(0, 0, 1, 1) # Rectangle(x0, y0, x1, y1) ctx.set_source(pat) ctx.fill() ctx.translate(0.1, 0.1) # Changing the current transformation matrix ctx.move_to(0, 0) # Arc(cx, cy, radius, start_angle, stop_angle) ctx.arc(0.2, 0.1, 0.1, -math.pi / 2, 0) ctx.line_to(0.5, 0.1) # Line to (x,y) # Curve(x1, y1, x2, y2, x3, y3) ctx.curve_to(0.5, 0.2, 0.5, 0.4, 0.2, 0.8) ctx.close_path() ctx.set_source_rgb(0.3, 0.2, 0.5) # Solid color ctx.set_line_width(0.02) ctx.stroke() # Prepare an _in memory_ file system and write the image to a file. memfs = MemoryFS() with memfs.open("translation.png", "wb") as fout: surface.write_to_png(fout) del ctx surface.finish() del surface return memfs
def test_copy_files(self): mfs = MemoryFS() mfs.touch('txt1.txt') mfs.touch('txt2.txt') mfs.makedir('sub1') mfs.makedir('sub2') new_files = [ 'sub1/txt1.txt', 'sub1/txt2.txt', 'sub2/txt1.txt', 'sub2/txt2.txt' ] for n in new_files: self.assertFalse(mfs.exists(n)) d = Directory('.', mfs) targets = d.listdir_as_observable().filter(match_directory(['sub*'])) sources = d.listdir_as_observable().filter(match_file(['txt*'])) sources.subscribe(lambda f: print(f.path.s)) sources_list = [] sources.subscribe(sources_list.append) results = (targets.flat_map( lambda d: d.sync(sources)).to_list().to_blocking().first()) self.assertEqual(len(results), 4) for n in new_files: self.assertTrue(mfs.exists(n))
def test_listdir(self) -> None: fs = MemoryFS() fs.makedirs("/b$") fs.makedirs("/b$/dir1") fs.makedirs("/b$/dir2") fs.writetext("/b$/file1.txt", "file1") fs.writetext("/b$/file2.txt", "file2") fs.writetext("/b$/dir1/file1.txt", "file1") fs.writetext("/b$/dir1/file2.txt", "file2") fs.writetext("/b$/dir2/file1.txt", "file1") fs.writetext("/b$/dir2/file2.txt", "file2") c = COWFS(fs) path = "/b$/dir1/file2.txt" c.writetext(path, "xxxx") # Now the COW version is different. But it should still have # the old unchanged files. self.assertTrue(c.exists("/b$/dir1/file1.txt")) # Yes, but... self.assertEqual({"dir1", "dir2", "file1.txt", "file2.txt"}, set(c.listdir("/b$")))
def override_export_fs(block): """ Hack required for some legacy XBlocks which inherit XModuleDescriptor.add_xml_to_node() instead of the usual XmlSerializationMixin.add_xml_to_node() method. This method temporarily replaces a block's runtime's 'export_fs' system with an in-memory filesystem. This method also abuses the XmlParserMixin.export_to_file() API to prevent the XModule export code from exporting each block as two files (one .olx pointing to one .xml file). The export_to_file was meant to be used only by the customtag XModule but it makes our lives here much easier. """ fs = WrapFS(MemoryFS()) fs.makedir('course') fs.makedir( 'course/static' ) # Video XBlock requires this directory to exists, to put srt files etc. old_export_fs = block.runtime.export_fs block.runtime.export_fs = fs if hasattr(block, 'export_to_file'): old_export_to_file = block.export_to_file block.export_to_file = lambda: False old_global_export_to_file = XmlParserMixin.export_to_file XmlParserMixin.export_to_file = lambda _: False # So this applies to child blocks that get loaded during export try: yield fs except: raise finally: block.runtime.export_fs = old_export_fs if hasattr(block, 'export_to_file'): block.export_to_file = old_export_to_file XmlParserMixin.export_to_file = old_global_export_to_file
class DummySystem(ImportSystem): @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) def __init__(self, load_error_modules): xmlstore = XMLModuleStore("data_dir", course_dirs=[], load_error_modules=load_error_modules) course_id = SlashSeparatedCourseKey(ORG, COURSE, 'test_run') course_dir = "test_dir" error_tracker = Mock() super(DummySystem, self).__init__( xmlstore=xmlstore, course_id=course_id, course_dir=course_dir, error_tracker=error_tracker, load_error_modules=load_error_modules, mixins=(InheritanceMixin, XModuleMixin), field_data=KvsFieldData(DictKeyValueStore()), ) def render_template(self, _template, _context): raise Exception("Shouldn't be called")
def setUp(self): super().setUp() # construct module: course/sequence/vertical - problems # \_ nested_vertical / problems course = xml.CourseFactory.build() sequence = xml.SequenceFactory.build(parent=course) vertical = xml.VerticalFactory.build(parent=sequence) self.course = self.process_xml(course) xml.HtmlFactory(parent=vertical, url_name='test-html', text=self.test_html) xml.ProblemFactory(parent=vertical, url_name='test-problem', text=self.test_problem) nested_vertical = xml.VerticalFactory.build(parent=vertical) xml.HtmlFactory(parent=nested_vertical, url_name='test_html_nested', text=self.test_html_nested) xml.ProblemFactory(parent=nested_vertical, url_name='test_problem_nested', text=self.test_problem_nested) self.course = self.process_xml(course) course_seq = self.course.get_children()[0] self.module_system = get_test_system() self.module_system.descriptor_runtime = self.course._runtime self.course.runtime.export_fs = MemoryFS() self.vertical = course_seq.get_children()[0] self.vertical.xmodule_runtime = self.module_system self.html_block = self.vertical.get_children()[0] self.problem_block = self.vertical.get_children()[1] self.problem_block.has_score = True self.problem_block.graded = True self.extra_vertical_block = self.vertical.get_children()[2] # VerticalBlockWithMixins self.nested_problem_block = self.extra_vertical_block.get_children()[1] self.nested_problem_block.has_score = True self.nested_problem_block.graded = True self.username = "******" self.default_context = {"bookmarked": False, "username": self.username}
def setUp(self): self.fs = MemoryFS()
def test_metadata_import_export(self): """Two checks: - unknown metadata is preserved across import-export - inherited metadata doesn't leak to children. """ system = self.get_system() v = 'March 20 17:00' url_name = 'test1' start_xml = ''' <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course>'''.format(due=v, org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) compute_inherited_metadata(descriptor) print(descriptor, descriptor._model_data) self.assertEqual(descriptor.lms.due, Date().from_json(v)) # Check that the child inherits due correctly child = descriptor.get_children()[0] self.assertEqual(child.lms.due, Date().from_json(v)) self.assertEqual(child._inheritable_metadata, child._inherited_metadata) self.assertEqual(2, len(child._inherited_metadata)) self.assertEqual('1970-01-01T00:00:00Z', child._inherited_metadata['start']) self.assertEqual(v, child._inherited_metadata['due']) # Now export and check things resource_fs = MemoryFS() exported_xml = descriptor.export_to_xml(resource_fs) # Check that the exported xml is just a pointer print("Exported xml:", exported_xml) pointer = etree.fromstring(exported_xml) self.assertTrue(is_pointer_tag(pointer)) # but it's a special case course pointer self.assertEqual(pointer.attrib['course'], COURSE) self.assertEqual(pointer.attrib['org'], ORG) # Does the course still have unicorns? with resource_fs.open( 'course/{url_name}.xml'.format(url_name=url_name)) as f: course_xml = etree.fromstring(f.read()) self.assertEqual(course_xml.attrib['unicorn'], 'purple') # the course and org tags should be _only_ in the pointer self.assertTrue('course' not in course_xml.attrib) self.assertTrue('org' not in course_xml.attrib) # did we successfully strip the url_name from the definition contents? self.assertTrue('url_name' not in course_xml.attrib) # Does the chapter tag now have a due attribute? # hardcoded path to child with resource_fs.open('chapter/ch.xml') as f: chapter_xml = etree.fromstring(f.read()) self.assertEqual(chapter_xml.tag, 'chapter') self.assertFalse('due' in chapter_xml.attrib)
def test_metadata_import_export(self): """Two checks: - unknown metadata is preserved across import-export - inherited metadata doesn't leak to children. """ system = self.get_system() v = 'March 20 17:00' url_name = 'test1' start_xml = ''' <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course>'''.format(due=v, org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) compute_inherited_metadata(descriptor) # pylint: disable=W0212 print(descriptor, descriptor._field_data) self.assertEqual(descriptor.due, ImportTestCase.date.from_json(v)) # Check that the child inherits due correctly child = descriptor.get_children()[0] self.assertEqual(child.due, ImportTestCase.date.from_json(v)) # need to convert v to canonical json b4 comparing self.assertEqual( ImportTestCase.date.to_json(ImportTestCase.date.from_json(v)), child.xblock_kvs.inherited_settings['due']) # Now export and check things descriptor.runtime.export_fs = MemoryFS() node = etree.Element('unknown') descriptor.add_xml_to_node(node) # Check that the exported xml is just a pointer print("Exported xml:", etree.tostring(node)) self.assertTrue(is_pointer_tag(node)) # but it's a special case course pointer self.assertEqual(node.attrib['course'], COURSE) self.assertEqual(node.attrib['org'], ORG) # Does the course still have unicorns? with descriptor.runtime.export_fs.open( 'course/{url_name}.xml'.format(url_name=url_name)) as f: course_xml = etree.fromstring(f.read()) self.assertEqual(course_xml.attrib['unicorn'], 'purple') # the course and org tags should be _only_ in the pointer self.assertTrue('course' not in course_xml.attrib) self.assertTrue('org' not in course_xml.attrib) # did we successfully strip the url_name from the definition contents? self.assertTrue('url_name' not in course_xml.attrib) # Does the chapter tag now have a due attribute? # hardcoded path to child with descriptor.runtime.export_fs.open('chapter/ch.xml') as f: chapter_xml = etree.fromstring(f.read()) self.assertEqual(chapter_xml.tag, 'chapter') self.assertFalse('due' in chapter_xml.attrib)
def test_update_from_single_version(self) -> None: fs = MemoryFS() mv = Multiversioned(fs) d = { "file1.txt": "file1", "file2.txt": "file2", "dir1": { "file1.txt": "file1", "file2.txt": "file2" }, "dir2": { "file1.txt": "file1", "file2.txt": "file2" }, } bundle_lidvid = LIDVID("urn:nasa:pds:b::1.0") bundle_lid = bundle_lidvid.lid() no_lidvids: Set[LIDVID] = set() def create_bundle() -> None: lidvids = [create_collection(bundle_lid, c) for c in ["c1", "c2"]] contents = dictionary_to_contents(set(lidvids), d) mv.add_contents_if(is_new, bundle_lid, contents) def create_collection(bundle_lid: LID, c: str) -> LIDVID: lid = bundle_lid.extend_lid(c) lidvids = [create_product(lid, p) for p in ["p1", "p2"]] contents = dictionary_to_contents(set(lidvids), d) return mv.add_contents_if(is_new, lid, contents) def create_product(coll_lid: LID, p: str) -> LIDVID: lid = coll_lid.extend_lid(p) contents = dictionary_to_contents(no_lidvids, d) return mv.add_contents_if(is_new, lid, contents) create_bundle() vv = VersionView(mv, bundle_lidvid) c = COWFS(vv, MemoryFS(), MemoryFS()) path = "/b$/c2$/p1$/dir1/file2.txt" c.writetext(path, "xxxx") latest_lidvid = mv.latest_lidvid(LID("urn:nasa:pds:b")) # Update from the COWFS. mv.update_from_single_version(is_new, c) self.assertNotEqual(latest_lidvid, mv.latest_lidvid(LID("urn:nasa:pds:b"))) latest_lidvid = mv.latest_lidvid(LID("urn:nasa:pds:b")) # changed files are changed self.assertEqual("file2", fs.readtext("b/c2/p1/v$1.0/dir1/file2.txt")) self.assertEqual("xxxx", fs.readtext("b/c2/p1/v$2.0/dir1/file2.txt")) # unchanged files are unchanged self.assertEqual("file1", fs.readtext("b/c2/p1/v$1.0/dir1/file1.txt")) self.assertEqual("file1", fs.readtext("b/c2/p1/v$2.0/dir1/file1.txt")) # Change started in b/c2/p1. Check which versions are affected. self.assertEqual( VID("2.0"), cast(LIDVID, mv.latest_lidvid(LID("urn:nasa:pds:b"))).vid()) self.assertEqual( VID("1.0"), cast(LIDVID, mv.latest_lidvid(LID("urn:nasa:pds:b:c1"))).vid()) self.assertEqual( VID("2.0"), cast(LIDVID, mv.latest_lidvid(LID("urn:nasa:pds:b:c2"))).vid()) self.assertEqual( VID("2.0"), cast(LIDVID, mv.latest_lidvid(LID("urn:nasa:pds:b:c2:p1"))).vid(), ) self.assertEqual( VID("1.0"), cast(LIDVID, mv.latest_lidvid(LID("urn:nasa:pds:b:c2:p2"))).vid(), ) # Now try updating again. Nothing should change. mv.update_from_single_version(is_new, c) self.assertEqual(latest_lidvid, mv.latest_lidvid(LID("urn:nasa:pds:b")))
def test_priority(self): """Test priority order is working""" m1 = MemoryFS() m2 = MemoryFS() m3 = MemoryFS() m1.writebytes("name", b"m1") m2.writebytes("name", b"m2") m3.writebytes("name", b"m3") multi_fs = MultiFS(auto_close=False) multi_fs.add_fs("m1", m1) multi_fs.add_fs("m2", m2) multi_fs.add_fs("m3", m3) self.assertEqual(multi_fs.readbytes("name"), b"m3") m1 = MemoryFS() m2 = MemoryFS() m3 = MemoryFS() m1.writebytes("name", b"m1") m2.writebytes("name", b"m2") m3.writebytes("name", b"m3") multi_fs = MultiFS(auto_close=False) multi_fs.add_fs("m1", m1) multi_fs.add_fs("m2", m2, priority=10) multi_fs.add_fs("m3", m3) self.assertEqual(multi_fs.readbytes("name"), b"m2") m1 = MemoryFS() m2 = MemoryFS() m3 = MemoryFS() m1.writebytes("name", b"m1") m2.writebytes("name", b"m2") m3.writebytes("name", b"m3") multi_fs = MultiFS(auto_close=False) multi_fs.add_fs("m1", m1) multi_fs.add_fs("m2", m2, priority=10) multi_fs.add_fs("m3", m3, priority=10) self.assertEqual(multi_fs.readbytes("name"), b"m3") m1 = MemoryFS() m2 = MemoryFS() m3 = MemoryFS() m1.writebytes("name", b"m1") m2.writebytes("name", b"m2") m3.writebytes("name", b"m3") multi_fs = MultiFS(auto_close=False) multi_fs.add_fs("m1", m1, priority=11) multi_fs.add_fs("m2", m2, priority=10) multi_fs.add_fs("m3", m3, priority=10) self.assertEqual(multi_fs.readbytes("name"), b"m1")
def start_project(self): console = self.console if not self.args.acceptdefaults: console.table([[Cell("Moya Project Wizard", bold=True, fg="green", center=True)], ["""This will ask you a few questions, then create a new Moya project based on your answers. Default values are shown in blue (hit return to accept defaults). Some defaults may be taken from your ".moyarc" file, if it exists."""]]) author = self.get_author_details() project = {} project["title"] = ProjectTitle.ask(console, default=self.args.title) longname = make_name(author["organization"], project["title"]) project["database"] = Database.ask(console, default='y') if project["database"]: project["auth"] = Auth.ask(console, default='y') project['signup'] = Signup.ask(console, default='y') project["pages"] = Pages.ask(console, default='y') project["feedback"] = Feedback.ask(console, default='y') project["blog"] = Blog.ask(console, default='y') project["comments"] = project.get("feedback", False) or project.get("pages", False) project["wysihtml5"] = project.get("feedback", False) or project.get("pages", False) project['jsonrpc'] = JSONRPC.ask(console, default='y') dirname = longname.split('.', 1)[-1].replace('.', '_') dirname = ProjectDirName.ask(console, default="./" + dirname) data = dict(author=author, project=project, timezone=self.get_timezone()) from ...command.sub import project_template from fs.memoryfs import MemoryFS from fs.opener import fsopendir memfs = MemoryFS() templatebuilder.compile_fs_template(memfs, project_template.template, data=data) dest_fs = fsopendir(self.args.location or dirname, create_dir=True, writeable=True) continue_overwrite = 'overwrite' if not dest_fs.isdirempty('.'): if self.args.force: continue_overwrite = 'overwrite' elif self.args.new: continue_overwrite = 'new' else: continue_overwrite = DirNotEmpty.ask(console, default="cancel") if continue_overwrite == 'overwrite': from fs.utils import copydir copydir(memfs, dest_fs) console.table([[Cell("Project files written successfully!", fg="green", bold=True, center=True)], ["""See readme.txt in the project directory for the next steps.\n\nBrowse to http://moyaproject.com/gettingstarted/ if you need further help."""]]) return 0 elif continue_overwrite == 'new': files_copied = copy_new(memfs, dest_fs) table = [[ Cell("{} new file(s) written".format(len(files_copied)), fg="green", bold=True, center=True), ]] for path in files_copied: table.append([Cell(dest_fs.desc(path), bold=True, fg="black")]) console.table(table) return 0 console.text("No project files written.", fg="red", bold=True).nl() return -1
This is a test file {{%- if readme %}} @readme.txt Readme file ----------- ${{message}} {{%- endif %}} @templates/base.html <h1>${title}</h1> <ul> {% for fruit in fruits %} <li>${fruit}</li> {% endfor %} </ul> @settings/production.ini @foo/bar/baz/ @author Bob """ from fs.osfs import OSFS from fs.memoryfs import MemoryFS fs = OSFS('__test__', create=True) fs = MemoryFS() td = dict(message="Hello, World!", readme=True) compile_fs_template(fs, template, td) fs.tree()
def setUp(self): super(SplitTestModuleTest, self).setUp() self.course_id = 'test_org/test_course_number/test_run' # construct module course = xml.CourseFactory.build() sequence = xml.SequenceFactory.build(parent=course) split_test = SplitTestModuleFactory( parent=sequence, attribs={ 'user_partition_id': '0', 'group_id_to_child': '{"0": "i4x://edX/xml_test_course/html/split_test_cond0", "1": "i4x://edX/xml_test_course/html/split_test_cond1"}' # pylint: disable=line-too-long }) xml.HtmlFactory(parent=split_test, url_name='split_test_cond0', text='HTML FOR GROUP 0') xml.HtmlFactory(parent=split_test, url_name='split_test_cond1', text='HTML FOR GROUP 1') self.course = self.process_xml(course) self.course_sequence = self.course.get_children()[0] self.module_system = get_test_system() self.module_system.descriptor_runtime = self.course._runtime # pylint: disable=protected-access self.course.runtime.export_fs = MemoryFS() # Create mock partition service, as these tests are running with XML in-memory system. self.course.user_partitions = [ self.user_partition, UserPartition( MINIMUM_STATIC_PARTITION_ID, 'second_partition', 'Second Partition', [ Group(unicode(MINIMUM_STATIC_PARTITION_ID + 1), 'abel'), Group(unicode(MINIMUM_STATIC_PARTITION_ID + 2), 'baker'), Group("103", 'charlie') ], MockUserPartitionScheme()) ] partitions_service = MockPartitionService( self.course, course_id=self.course.id, track_function=Mock(name='track_function'), ) self.module_system._services['partitions'] = partitions_service # pylint: disable=protected-access # Mock user_service user user_service = Mock() user = Mock(username='******', email='*****@*****.**', is_staff=False, is_active=True) user_service._django_user = user self.module_system._services['user'] = user_service # pylint: disable=protected-access self.split_test_module = self.course_sequence.get_children()[0] self.split_test_module.bind_for_student(self.module_system, user.id) # Create mock modulestore for getting the course. Needed for rendering the HTML # view, since mock services exist and the rendering code will not short-circuit. mocked_modulestore = Mock() mocked_modulestore.get_course.return_value = self.course self.split_test_module.system.modulestore = mocked_modulestore
def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create_dir): from fs.memoryfs import MemoryFS memfs = MemoryFS() if create_dir: memfs = memfs.makeopendir(fs_path) return memfs, None
def test_manage_fs_obj(self): mem_fs = MemoryFS() with opener.manage_fs(mem_fs) as open_mem_fs: self.assertIs(mem_fs, open_mem_fs) self.assertFalse(mem_fs.isclosed())
def test_tilejson(self): with MemoryFS() as fs: tj = json.loads( Config('''{"metadata": {"id":"v1"}}''', fs).tilejson("http://localhost/{id}/{z}/{x}/{y}.mvt")) self.assertEqual(tj["tilejson"], "2.2.0") self.assertEqual(tj["format"], "pbf") self.assertEqual(tj["scheme"], "xyz") self.assertEqual(tj["tiles"], ["http://localhost/v1/{z}/{x}/{y}.mvt"]) with MemoryFS() as fs: fs.writetext("water.sql.jinja2", "select 1") fs.writetext("ne-admin.sql.jinja2", "select 2") fs.writetext("admin.sql.jinja2", "select 3") fs.writetext("country.sql.jinja2", "select 4") tj = json.loads( Config(sample_config, fs).tilejson("http://localhost/{id}/{z}/{x}/{y}.mvt")) self.assertEqual(tj["name"], "name for tilejson, optional") self.assertEqual(tj["description"], "description for tilejson, optional") self.assertEqual(tj["attribution"], "attribution for tilejson, optional") self.assertEqual(tj["version"], "version for tilejson, optional") self.assertEqual(tj["bounds"], [-180, -85.05112877980659, 180, 85.0511287798066]) self.assertEqual(tj["center"], [0, 0]) self.assertEqual(tj["minzoom"], 0) self.assertEqual(tj["maxzoom"], 14) # tilejsons have a list of layers you need to iterate through water = {} admin = {} country_names = {} self.assertEqual(len(tj["vector_layers"]), 3) for l in tj["vector_layers"]: if l["id"] == "water": water = l elif l["id"] == "admin": admin = l elif l["id"] == "country_names": country_names = l self.assertEqual(water["id"], "water") self.assertEqual(water["description"], "Waterbody and ocean areas") self.assertEqual(water["minzoom"], 0) self.assertEqual(water["maxzoom"], 8) self.assertEqual(water["geometry"], "unknown") self.assertEqual(water["fields"], {"water": "Type of water"}) self.assertEqual(admin["id"], "admin") self.assertEqual(admin["description"], "Administrative boundaries") self.assertEqual(admin["minzoom"], 1) self.assertEqual(admin["maxzoom"], 10) self.assertEqual(admin["geometry"], "polygon") self.assertEqual(admin["fields"], {"admin_level": "Level of admin boundary"}) self.assertEqual(country_names["id"], "country_names") self.assertEqual(country_names["description"], "Points for country names") self.assertEqual(country_names["minzoom"], 3) self.assertEqual(country_names["maxzoom"], 14) self.assertEqual(country_names["geometry"], "unknown") self.assertEqual(country_names["fields"], { "area": "Area of country", "name": "Name of country" })
def setUp(self): fs = MultiFS() mem_fs = MemoryFS() fs.add_fs("mem", mem_fs, write=True) self.fs = fs self.mem_fs = mem_fs
def start_library(self): console = self.console from ...tools import get_moya_dir from os.path import join, abspath project_path = None if self.args.location is not None: library_path = self.args.location else: try: project_path = get_moya_dir(self.args.project_location) except: console.error("Please run 'moya start library' inside your project directory, or specifiy the -o switch") return False library_path = abspath(join(project_path, './local/')) cfg = None if not self.args.location and project_path: from ... import build cfg = build.read_config(project_path, self.get_settings()) if not self.args.acceptdefaults: console.table([[Cell("Moya Library Wizard", bold=True, fg="green", center=True)], ["""This will ask you a few questions, then create a new library in your Moya project based on your answers. Default values are shown in grey (simply hit return to accept defaults). Some defaults may be taken from your ".bashrc" file, if it exists. """]]) author = self.get_author_details() library = {} library["title"] = LibraryTitle.ask(console, default=self.args.title) longname = self.args.longname or make_name(author["organization"], library["title"]) longname = library["longname"] = LibraryLongName.ask(console, default=longname) library["url"] = LibraryURL.ask(console, default="") library["namespace"] = LibraryNamespace.ask(console, default="") mount = None appname = None do_mount = DoMount.ask(console, default="yes") if do_mount: mount = Mount.ask(console, default=self.args.mount or "/{}/".format(make_name(library["title"]))) appname = AppName.ask(console, default=self.args.name or make_name(library["title"])) data = dict(author=author, library=library, timezone=self.get_timezone()) actions = [] from ...command.sub import library_template from fs.memoryfs import MemoryFS from fs.opener import fsopendir memfs = MemoryFS() templatebuilder.compile_fs_template(memfs, library_template.template, data=data) dest_fs = fsopendir(join(library_path, library["longname"]), create_dir=True, writeable=True) continue_overwrite = 'overwrite' if not dest_fs.isdirempty('.'): if self.args.force: continue_overwrite = 'overwrite' elif self.args.new: continue_overwrite = 'new' else: continue_overwrite = DirNotEmpty.ask(console, default="cancel") if continue_overwrite != 'cancel': if continue_overwrite == 'overwrite': from fs.utils import copydir copydir(memfs, dest_fs) actions.append("Written library files to {}".format(dest_fs.getsyspath('.'))) elif continue_overwrite == 'new': files_copied = copy_new(memfs, dest_fs) table = [[ Cell("{} new file(s) written".format(len(files_copied)), fg="green", bold=True, center=True), ]] for path in files_copied: table.append([Cell(dest_fs.desc(path), bold=True, fg="black")]) console.table(table) return 0 if cfg: project_cfg = cfg['project'] location = project_cfg['location'] server_name = "main" if location: with fsopendir(project_path) as project_fs: with project_fs.opendir(location) as server_fs: from lxml.etree import fromstring, ElementTree, parse from lxml.etree import XML, Comment server_xml_path = server_fs.getsyspath(project_cfg['startup']) root = parse(server_xml_path) import_tag = XML('<import location="./local/{longname}" />\n\n'.format(**library)) import_tag.tail = "\n" install_tag = None if mount: tag = '<install name="{appname}" lib="{longname}" mount="{mount}" />' else: tag = '<install name="{appname}" lib="{longname}" />' install_tag = XML(tag.format(appname=appname, longname=longname, mount=mount)) install_tag.tail = "\n\n" def has_child(node, tag, **attribs): for el in node.findall(tag): #items = dict(el.items()) if all(el.get(k, None) == v for k, v in attribs.items()): return True return False for server in root.findall("{{http://moyaproject.com}}server[@docname='{}']".format(server_name)): add_import_tag = not has_child(server, "{http://moyaproject.com}import", location="./local/{}".format(longname)) add_install_tag = not has_child(server, "{http://moyaproject.com}install", lib=longname) and install_tag is not None if add_import_tag or add_install_tag: comment = Comment("Added by 'moya start library'") comment.tail = "\n" server.append(comment) if add_import_tag: server.append(import_tag) actions.append("Added <import> tag") if add_install_tag: server.append(install_tag) actions.append("Added <install> tag") if mount: actions.append("Mounted application on {}".format(mount)) root.write(server_xml_path) table = [[Cell("Library files written successfully!", fg="green", bold=True, center=True)]] actions_text = "\n".join(" * " + action for action in actions) table.append([Cell(actions_text, fg="blue", bold=True)]) table.append(["""A new library has been added to the project, containing some simple example functionality.\nSee http://moyaproject.com/docs/creatinglibraries/ for more information."""]) console.table(table) return 0 console.text("No project files written.", fg="red", bold=True).nl() return -1
def make_fs(self): fs = MultiFS() mem_fs = MemoryFS() fs.add_fs("mem", mem_fs, write=True) return fs