Example #1
0
    def test_regenerate_inplace(self):
        # take a generated example where a base layer has changed
        # regenerate in place
        # make some assertions
        bu = build.Builder()
        bu.log_level = "WARNING"
        bu.output_dir = "out"
        bu.series = "trusty"
        bu.name = "foo"
        bu.charm = "trusty/b"
        bu.hide_metrics = True
        bu()
        base = path("out/trusty/foo")
        self.assertTrue(base.exists())

        # verify the 1st gen worked
        self.assertTrue((base / "a").exists())
        self.assertTrue((base / "README.md").exists())

        # now regenerate from the target
        with utils.cd("out/trusty/foo"):
            bu = build.Builder()
            bu.log_level = "WARNING"
            bu.output_dir = path(os.getcwd())
            bu.series = "trusty"
            # The generate target and source are now the same
            bu.name = "foo"
            bu.charm = "."
            bu.hide_metrics = True
            bu()
            base = bu.output_dir
            self.assertTrue(base.exists())

            # Check that the generated layer.yaml makes sense
            cy = base / "layer.yaml"
            config = yaml.load(cy.open())
            self.assertEquals(config["includes"], ["trusty/a", "interface:mysql"])
            self.assertEquals(config["is"], "foo")

            # We can even run it more than once
            bu()
            cy = base / "layer.yaml"
            config = yaml.load(cy.open())
            self.assertEquals(config["includes"], ["trusty/a", "interface:mysql"])
            self.assertEquals(config["is"], "foo")

            # We included an interface, we should be able to assert things about it
            # in its final form as well
            provides = base / "hooks/relations/mysql/provides.py"
            requires = base / "hooks/relations/mysql/requires.py"
            self.assertTrue(provides.exists())
            self.assertTrue(requires.exists())

            # and that we generated the hooks themselves
            for kind in ["joined", "changed", "broken", "departed"]:
                self.assertTrue((base / "hooks" / "mysql-relation-{}".format(kind)).exists())

            # and ensure we have an init file (the interface doesn't its added)
            init = base / "hooks/relations/mysql/__init__.py"
            self.assertTrue(init.exists())
Example #2
0
    def test_merge(self):
        testdir = path(l2emod.__file__).parent / 'testtex'
        with make_temp_directory() as tmdir:
            fn = testdir / 'example1.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()

            fn = testdir / 'example2.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            l2e = latex2edx(nfn, output_dir=tmdir, do_merge=True)
            l2e.convert()

            cfn = path(tmdir) / 'course/2013_Fall.xml'
            self.assertTrue(os.path.exists(cfn))

            self.assertIn('<chapter url_name="Unit_1"', open(cfn).read())
            self.assertIn('<chapter url_name="Unit_2"', open(cfn).read())

            cfn = path(tmdir) / 'chapter/Unit_1.xml'
            self.assertTrue(os.path.exists(cfn))

            cfn = path(tmdir) / 'chapter/Unit_2.xml'
            self.assertTrue(os.path.exists(cfn))
def get_problem(base_dir, problem_id, goproblems_url, save_html=False):
    """Get the given problem.

    :param str base_dir: the base dir where the problem is to be saved
    :param int problem_id: the id of the problem to be downloaded
    :param str goproblems_url: the url where the problems can be found
    :param boolean save_html: save the downloaded HTML
    """
    if save_html:
        html_dir = check_directory(path(base_dir) / 'html')
        html_file = html_dir / ('%d.html' % problem_id)
        if html_file.exists():
            print "already downloaded %d" % problem_id
        return False

    response = requests.get(goproblems_url % problem_id)

    if save_html:
        with open(html_file, 'w') as f:
            f.write(response.text)

    if not response.ok:
        raise requests.HTTPError(response.text)

    problem, difficulty, sgf, rating = parse_problem(response.text)

    problem_dir = check_directory(path(base_dir) / difficulty.replace(' ', '_'))

    problem_name = '%s_%s.sgf' % (difficulty, (problem or str(problem_id)))
    if rating:
        problem_name = '[%s]' % rating + problem_name

    with open(problem_dir / problem_name, 'w') as f:
        f.write(sgf)
    return True
Example #4
0
    def __init__(self, *location_parts, **k):
        self.httpopen = k.pop('httpopen', asynchttp.httpopen)

        location_parts = list(location_parts)

        first = location_parts.pop(0)
        while len(location_parts) > 0:
            next = location_parts.pop(0)

            if next.startswith("http:"):
                first = next
            elif first.startswith("http:"):
                first = net.httpjoin(first, next)
            else:
                # path will handle relative vs. absolute
                first = path.path(first)
                if first.ext:
                    first = first.parent
                first = first / next
                first = first.normpath()

        self.web = first.startswith("http:")
        self.content_location = path.path(first)
        self.content = None
        self._load_callback = None
        self.meta = None
Example #5
0
    def test_example1(self):
        testdir = path(l2emod.__file__).parent / 'testtex'
        fn = testdir / 'example1.tex'
        print "file %s" % fn
        with make_temp_directory() as tmdir:
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()
            xbfn = nfn[:-4] + '.xbundle'
            self.assertTrue(os.path.exists(xbfn))
            # xb = open(xbfn).read()

            # self.assertIn('<chapter display_name="Unit 1" start="2013-11-22" url_name="Unit_1">', xb)
            xml = etree.parse(xbfn).getroot()
            chapter = xml.find('.//chapter')
            self.assertTrue(chapter.get('display_name') == 'Unit 1')
            self.assertTrue(chapter.get('start') == '2013-11-22')
            self.assertTrue(chapter.get('url_name') == 'Unit_1')

            cfn = path(tmdir) / 'course/2013_Fall.xml'
            self.assertTrue(os.path.exists(cfn))

            cfn = path(tmdir) / 'chapter/Unit_1.xml'
            self.assertTrue(os.path.exists(cfn))

            # self.assertIn('<sequential display_name="Introduction" due="2013-11-22" url_name="Introduction"', open(cfn).read())
            xml = etree.parse(cfn).getroot()
            seq = xml.find('.//sequential')
            self.assertTrue(seq.get('display_name') == 'Introduction')
            self.assertTrue(seq.get('due') == '2013-11-22')
            self.assertTrue(seq.get('url_name') == 'Introduction')

            self.assertIn('<problem url_name="p1"/>', open(cfn).read())
Example #6
0
def get_course_syllabus_section(course, section_key):
    """
    This returns the snippet of html to be rendered on the syllabus page,
    given the key for the section.

    Valid keys:
    - syllabus
    - guest_syllabus
    """

    # Many of these are stored as html files instead of some semantic
    # markup. This can change without effecting this interface when we find a
    # good format for defining so many snippets of text/html.

    if section_key in ['syllabus', 'guest_syllabus']:
        try:
            filesys = course.system.resources_fs
            # first look for a run-specific version
            dirs = [path("syllabus") / course.url_name, path("syllabus")]
            filepath = find_file(filesys, dirs, section_key + ".html")
            with filesys.open(filepath) as html_file:
                return replace_static_urls(
                    html_file.read().decode('utf-8'),
                    getattr(course, 'data_dir', None),
                    course_id=course.id,
                    static_asset_path=course.static_asset_path,
                )
        except ResourceNotFoundError:
            log.exception(
                u"Missing syllabus section %s in course %s",
                section_key, course.location.to_deprecated_string()
            )
            return "! Syllabus missing !"

    raise KeyError("Invalid about key " + str(section_key))
    def setUp(self):
        super(ImportTestCase, self).setUp()
        self.url = reverse_course_url('import_handler', self.course.id)
        self.content_dir = path(tempfile.mkdtemp())

        def touch(name):
            """ Equivalent to shell's 'touch'"""
            with file(name, 'a'):
                os.utime(name, None)

        # Create tar test files -----------------------------------------------
        # OK course:
        good_dir = tempfile.mkdtemp(dir=self.content_dir)
        os.makedirs(os.path.join(good_dir, "course"))
        with open(os.path.join(good_dir, "course.xml"), "w+") as f:
            f.write('<course url_name="2013_Spring" org="EDx" course="0.00x"/>')

        with open(os.path.join(good_dir, "course", "2013_Spring.xml"), "w+") as f:
            f.write('<course></course>')

        self.good_tar = os.path.join(self.content_dir, "good.tar.gz")
        with tarfile.open(self.good_tar, "w:gz") as gtar:
            gtar.add(good_dir)

        # Bad course (no 'course.xml' file):
        bad_dir = tempfile.mkdtemp(dir=self.content_dir)
        touch(os.path.join(bad_dir, "bad.xml"))
        self.bad_tar = os.path.join(self.content_dir, "bad.tar.gz")
        with tarfile.open(self.bad_tar, "w:gz") as btar:
            btar.add(bad_dir)

        self.unsafe_common_dir = path(tempfile.mkdtemp(dir=self.content_dir))
Example #8
0
    def fetch(self):
        try:
            fetcher = get_fetcher(self.url)
        except FetchError:
            # We might be passing a local dir path directly
            # which fetchers don't currently  support
            self.directory = path(self.url)
        else:
            if hasattr(fetcher, "path") and fetcher.path.exists():
                self.directory = path(fetcher.path)
            else:
                if not self.target_repo.exists():
                    self.target_repo.makedirs_p()
                self.directory = path(fetcher.fetch(self.target_repo))

        if not self.directory.exists():
            raise OSError(
                "Unable to locate {}. "
                "Do you need to set {}?".format(
                    self.url, self.ENVIRON))

        self.config_file = self.directory / self.CONFIG_FILE
        if not self.config_file.exists():
            if self.OLD_CONFIG and (self.directory / self.OLD_CONFIG).exists():
                self.config_file = (self.directory / self.OLD_CONFIG)
        self._name = self.config.name
        return self
Example #9
0
def render_webadmin_config(service_name,
                           source=path("/opt/admin-ui/config/default.yml"),
                           dest=path('/etc/cf-webadmin.yml')):
    secretctx = contexts.StoredContext(utils.secrets_file, {})
    orchctx = contexts.OrchestratorRelation()
    dbctx = contexts.MysqlRelation()
    ccdbctx = contexts.CloudControllerDBRelation()
    natsctx = contexts.NatsRelation()
    uaadbctx = None  # XXX
    template = yaml.safe_load(source.text())

    domain = orchctx[orchctx.name][0]['domain']
    ui_secret = secretctx['ui_secret']
    ccdb_uri = ccdbctx[ccdbctx.name][0]['dsn']  # ccdbctx.get('dsn', unit=0) would be nice
    db_uri = dbctx[dbctx.name][0]['dsn']
    nats_uri = 'nats://{user}:{password}@{address}:{port}'.format(**natsctx[natsctx.name][0])
    uaadb_uri = ''  # XXX

    template['uaa_client']['secret'] = ui_secret
    template['cloud_controller_uri'] = 'http://api.%s' % domain
    template['ccdb_uri'] = ccdb_uri
    template['db_uri'] = db_uri
    template['uaadb_uri'] = uaadb_uri
    template['mbus'] = nats_uri

    dest.write_text(yaml.safe_dump(template))
 def _copy_manager_blueprint(self):
     inputs_path, mb_path = util.generate_unique_configurations(
         workdir=self.workdir,
         original_inputs_path=self.env.cloudify_config_path,
         original_manager_blueprint_path=self.env._manager_blueprint_path)
     self.test_manager_blueprint_path = path(mb_path)
     self.test_inputs_path = path(inputs_path)
Example #11
0
    def __init__(self, dir, cfg=CFG, mem=MEM, sym=SYM, skipweb=SKIPWEB,
                 registry=REGISTRY, tcp=TCP, ssl=SSL,
                 savevars=SAVEVARS, savevarsfile=SAVEVARSFILE):

        print "%s: Upgrading %s (%s)..." % (self.__class__.__name__, dir, sym)

        self.mem = mem
        self.sym = sym
        self.skipweb = skipweb
        self.registry = registry
        self.tcp = tcp
        self.ssl = ssl

        # setup_script_environment() may cause the creation of a default
        # config.xml, so we must check for it here
        noconfigure = self.has_config(dir)

        cli = self.setup_script_environment(dir)
        bin = self.setup_previous_omero_env(sym, savevarsfile)

        # Need lib/python set above
        import path
        self.cfg = path.path(cfg)
        self.dir = path.path(dir)

        self.stop(bin)

        self.configure(cli, noconfigure)
        self.directories(cli)

        self.save_env_vars(savevarsfile, savevars.split())
        self.start(cli)
Example #12
0
 def on_step_run(self):
     # `get_step_options` is provided by the `StepOptionsController` mixin.
     options = self.get_step_options()
     if options.run_pstrace and options.script:
         app_values = self.get_app_values()
         exe_path = path(app_values['pstrace_exe'])
         script = path(options.script)
         if not exe_path.isfile():
             logger.error('[PSTraceLauncher] invalid exe-path: %s' %
                          exe_path.abspath())
         elif not script.isfile():
             logger.error('[PSTraceLauncher] invalid script-path: %s' %
                          script.abspath())
         elif os.name != 'nt':
             logger.error('[PSTraceLauncher] This plugin is only supported '
                          'on Windows')
         else:
             pstrace_processes = [p for p in psutil.process_iter() if
                                  safe_psutil_attr(p, 'exe') ==
                                  exe_path.abspath()]
             if not pstrace_processes:
                 if options.delay_ms > 0:
                     logger.info('[PSTraceLauncher] delay: %s ms',
                                 options.delay_ms)
                     gtk.timeout_add(options.delay_ms, self._execute,
                                     exe_path, script)
                 else:
                     self._execute(exe_path, script)
             else:
                 logger.info('[PSTraceLauncher] skipping, since PSTrace is '
                             'already running as process %s',
                             [p.pid for p in pstrace_processes])
     self.complete_step()
Example #13
0
def person_detail(person_id):
    ctx = dal.get_person(person_id, missing=NotFound)
    ctx.update(dal.get_mandate2012_details(person_id))

    if 'picture_filename' in ctx:
        picture_rel_path = path('mandate-pictures') / ctx['picture_filename']
        if (path(flask.current_app.static_folder) / picture_rel_path).isfile():
            ctx['picture_url'] = flask.url_for(
                'static',
                filename=picture_rel_path,
            )

    for item in ctx['recent_activity']:
        if item['type'] == 'proposal':
            item['url'] = flask.url_for(
                '.policy_proposal',
                proposal_id=item['proposal_id'],
            )

        elif item['type'] == 'question':
            item['url'] = flask.url_for(
                '.person_question',
                question_id=item['question_id'],
            )

    return flask.render_template('person_detail.html', **ctx)
Example #14
0
    def from_upload(cls, upload, addon, platforms, send_signal=True):
        data = utils.parse_addon(upload, addon)
        try:
            license = addon.versions.latest().license_id
        except Version.DoesNotExist:
            license = None
        v = cls.objects.create(addon=addon, version=data['version'],
                               license_id=license)
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))
        # appversions
        AV = ApplicationsVersions
        for app in data.get('apps', []):
            AV(version=v, min=app.min, max=app.max,
               application_id=app.id).save()
        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [Platform.objects.get(id=amo.PLATFORM_ALL.id)]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload, v, platform, parse_data=data)

        v.disable_old_files()
        # After the upload has been copied to all
        # platforms, remove the upload.
        path.path(upload.path).unlink()
        if send_signal:
            version_uploaded.send(sender=v)
        return v
Example #15
0
    def __init__(self, filename):
        """
        Constructor for the File class. A File is an abstraction of the file itself along with all of the version
        control metadata that it uses.

        :param filename: The name of the file on disk in the directory that the command was executed.
        """
        # Set up the member variables
        self.filename = filename
        self.current_file = path(filename)
        self.datafile = path('.vcs/' + filename)
        self.branches = {'main': []}
        self.current_branch = 'main'  # Default branch name

        if not self.current_file.exists():
            #File doesn't exist.
            exit_with_message("File does <%s> not exist." % self.filename)

        # Some operations are allowed if not in VCS (add).
        self.in_vcs = self.datafile.exists()

        if self.in_vcs:
            #Load file history and branch information.
            data = self.datafile.bytes()
            parsed_data = json.loads(data)
            self.branches = parsed_data['branches']
            self.current_branch = parsed_data['current_branch']

            if self.branches.get(self.current_branch) is None:
                # "Current Branch" not in Branches list.
                exit_with_message("There was a problem with your datafile.")
Example #16
0
 def test_spool_no_time_no_user(self):
     """Ensure `spool` works when no `time` attribute is supplied, and no
     `user` attribute exists.
     """
     c = CallFile(self.call, self.action, spool_dir=self.spool_dir)
     c.spool()
     ok_((path(c.spool_dir) / path(c.filename)).abspath().exists())
 def _getFileStats(self, pathString):
     """
         Private method to get stats for a file
         
         :Parameters:
             fileId : string
                 A string uniquely identifying a file on this Monitor.
               
         :return: stats
         :rtype: monitors.FileStats
  
     """
     stats = monitors.FileStats()
     
     stats.baseName = pathModule.path(pathString).name
     stats.owner = pathModule.path(pathString).owner
     stats.size = pathModule.path(pathString).size
     stats.mTime = pathModule.path(pathString).mtime
     stats.cTime = pathModule.path(pathString).ctime
     stats.aTime = pathModule.path(pathString).atime
     if pathModule.path(pathString).isfile():
         stats.type = monitors.FileType.File
     elif pathModule.path(pathString).isdir():
         stats.type = monitors.FileType.Dir
     elif pathModule.path(pathString).islink():
         stats.type = monitors.FileType.Link
     elif pathModule.path(pathString).ismount():
         stats.type = monitors.FileType.Mount
     else:
         stats.type = monitors.FileType.Unknown
                 
     return stats
Example #18
0
    def test_crop_view(self):
        """avatar gets cropped"""
        with open('%s/%s' % (settings.MEDIA_ROOT, 'misago.png')) as avatar:
            handler_link = reverse('misago:usercp_upload_avatar_handler')
            ajax_header = {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
            response = self.client.post(handler_link,
                                        data={'new-avatar': avatar},
                                        **ajax_header)
            self.assertEqual(response.status_code, 200)

            crop_link = reverse('misago:usercp_crop_new_avatar')
            response = self.client.post(crop_link, data={'crop': '1245'})
            self.assertEqual(response.status_code, 200)

            test_crop = '619,201,150,150,0,0,150,150'
            response = self.client.post(crop_link, data={'crop': test_crop})
            self.assertEqual(response.status_code, 302)

            avatar_dir = store.get_existing_avatars_dir(self.user)
            avatar = path('%s/%s_tmp.png' % (avatar_dir, self.user.pk))
            self.assertFalse(avatar.exists())

            avatar = path('%s/%s_org.png' % (avatar_dir, self.user.pk))
            self.assertTrue(avatar.exists())
            self.assertTrue(avatar.isfile())
Example #19
0
    def test_edXmath1(self):
        testdir = path(l2emod.__file__).parent / 'testtex'  
        fn = testdir / 'example6.tex'
        print "file %s" % fn
        with make_temp_directory() as tmdir:
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.mkdir('%s/dnd' % tmdir)
            os.system('cp %s/quadratic.tex %s/dnd/'  % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()
            xbfn = nfn[:-4]+'.xbundle'
            self.assertTrue(os.path.exists(xbfn))
            xb = open(xbfn).read()

            cfn = path(tmdir) / 'problem/p1.xml'
            assert(os.path.exists(cfn))
            os.system('ls -sFC %s' % tmdir)
            os.system('ls -sFC %s/problem' % tmdir)
            self.assertTrue(os.path.exists(cfn))
            data = open(cfn).read()
            expect = r"""[mathjax]\begin{eqnarray}
S(\rho) &amp;=&amp;  -\lambda_{1} \log \lambda_{1} -\lambda_{2} \log \lambda_{2} \\
        &amp;=&amp;  H((1+r)/2)
\end{eqnarray}[/mathjax]"""
            assert(expect in data)
    def test_theme_outside_repo(self):
        # Need to create a temporary theme, and defer decorating the function
        # until it is done, which leads to this strange nested-function style
        # of test.

        # Make a temp directory as a theme.
        themes_dir = path(mkdtemp_clean())
        tmp_theme = "temp_theme"
        template_dir = themes_dir / tmp_theme / "lms/templates"
        template_dir.makedirs()
        with open(template_dir / "footer.html", "w") as footer:
            footer.write("<footer>TEMPORARY THEME</footer>")

        dest_path = path(settings.COMPREHENSIVE_THEME_DIR) / tmp_theme
        create_symlink(themes_dir / tmp_theme, dest_path)

        @with_comprehensive_theme(tmp_theme)
        def do_the_test(self):
            """A function to do the work so we can use the decorator."""
            resp = self.client.get('/')
            self.assertEqual(resp.status_code, 200)
            self.assertContains(resp, "TEMPORARY THEME")

        do_the_test(self)
        # remove symlinks before running subsequent tests
        delete_symlink(dest_path)
    def on_step_swapped(self, original_step_number, new_step_number):
        if not self.get_step_value('grab_frame'):
            print '[VideoFrameGrabber] on_step_swapped():'\
                    'frame grab disabled for step %d' % (new_step_number)
            return

        if self.get_app_value('frame_output_dir'):
            output_dir = path(self.get_app_value('frame_output_dir'))
        else:
            output_dir = path('.')
        filename = '%d-%d.jpg' % (original_step_number, new_step_number)
        filename = output_dir.joinpath(filename)

        print '[VideoFrameGrabber] on_step_swapped():'\
                'save image of %d -> %d to %s' % (original_step_number,
                        new_step_number, filename)

        # Grab frame from dmf_device_controller plugin
        service = get_service_instance_by_name(
                'microdrop.gui.dmf_device_controller', env='microdrop')
        frame = service.grab_frame()

        if frame:
            # Save frame to file
            cv.SaveImage(filename, frame)
def edit_path(path_or_obj, start_text):
    f = path.path(path_or_obj)
    editor = os.getenv("VISUAL") or os.getenv("EDITOR")
    if not editor:
        if platform.system() == "Windows":
            editor = "Notepad.exe"
        else:
            editor = "vi"
    f.write_text(start_text)

    # If absolute, then use the path
    # as is (ticket:4246). Otherwise,
    # use which.py to find it.
    editor_obj = path.path(editor)
    if editor_obj.isabs():
        editor_path = editor
    else:
        from omero_ext.which import which
        editor_path = which(editor)

    pid = os.spawnl(os.P_WAIT, editor_path, editor_path, f)
    if pid:
        re = RuntimeError("Couldn't spawn editor: %s" % editor)
        re.pid = pid
        raise re
Example #23
0
def read_file(asg, spelling, decl):
    ast = decl.get_ast_context()
    sm = ast.get_source_manager()
    loc = decl.get_location()
    fid = sm.get_file_id(loc)
    if not fid.is_invalid():
        filename = sm.get_filename(loc).str()
        if filename:
            filename = str(path(filename).abspath())
            filenode = asg.add_file(filename, proxy=HeaderProxy)
            filenode.language = asg._language
            asg._nodes[spelling]['_header'] = filenode.globalname
            while True:
                loc = sm.get_include_loc(fid)
                fid = sm.get_file_id(loc)
                if not fid.is_invalid():
                    includename = sm.get_filename(loc).str()
                    if includename:
                        includename = str(path(includename).abspath())
                        filenode = asg.add_file(includename, proxy=HeaderProxy)
                        asg._include_edges[filename] = filenode._node
                        filename = includename
                    else:
                        break
                else:
                    break
Example #24
0
def load(find_functions, search_path=None):
    """
    Load the modules in the search_path.
    If search_path is None, then load modules in the same folder as the function looking for them.
    """
    caller_module = inspect.getmodule(inspect.stack()[1][0])
    system_files = [caller_module.__file__]
    module_path = path(caller_module.__file__).abspath().dirname()
    sys_files = set()
    for f in system_files:
        if f.endswith(".pyo") or f.endswith(".pyc"):
            f = f[:-3] + "py"
        sys_files.add(path(f).abspath())
    if search_path is None:
        search_path = module_path
    else:
        search_path = path(search_path).abspath()
    fcts = {}
# Search for python, cython and modules
    for f in (search_path.files("*.py") +
              search_path.files("*.pyx") +
              search_path.files(sys_modules)):
        if f not in sys_files:
            module_name = f.namebase
            pack_name = '%s.%s_%s' % (caller_module.__name__,
                                      bad_chars.sub('_', module_path),
                                      module_name)
            try:
                mod_desc = imp.find_module(module_name, [search_path])
                mod = imp.load_module(pack_name, *mod_desc)
                fcts.update(find_functions(mod))
            except ImportError as ex:
                print("Warning, cannot import module '{0}' from {1}: {2}"
                      .format(module_name, caller_module.__name__, ex), file=sys.stderr)
    return fcts
Example #25
0
 def factory(reserve_only=False):
     temp_dir = mkdtemp()
     if reserve_only:
         # It'll still be cleaned at the end
         path(temp_dir).rmtree(ignore_errors=True)
     created.add(temp_dir)
     return path(temp_dir)
Example #26
0
    def default_avatar(self, db_settings):
        if db_settings['default_avatar'] == 'gallery':
            try:
                avatars_list = []
                try:
                    # First try, _default path
                    galleries = path(settings.STATICFILES_DIRS[0]).joinpath('avatars').joinpath('_default')
                    avatars_list += galleries.files('*.gif')
                    avatars_list += galleries.files('*.jpg')
                    avatars_list += galleries.files('*.jpeg')
                    avatars_list += galleries.files('*.png')
                except Exception as e:
                    pass
                # Second try, all paths
                if not avatars_list:
                    avatars_list = []
                    for directory in path(settings.STATICFILES_DIRS[0]).joinpath('avatars').dirs():
                        if not directory[-7:] == '_locked' and not directory[-7:] == '_thumbs':
                            avatars_list += directory.files('*.gif')
                            avatars_list += directory.files('*.jpg')
                            avatars_list += directory.files('*.jpeg')
                            avatars_list += directory.files('*.png')
                if avatars_list:
                    # Pick random avatar from list
                    self.avatar_type = 'gallery'
                    self.avatar_image = '/'.join(path(choice(avatars_list)).splitall()[-2:])
                    return True
            except Exception as e:
                pass

        self.avatar_type = 'gravatar'
        self.avatar_image = None
        return True
Example #27
0
    def setup_method(self, method):
        # Non-temp directories
        build_dir = path() / "build"
        top_dir = path() / ".." / ".." / ".."
        etc_dir = top_dir / "etc"

        # Necessary fiels
        prefs_file = build_dir / "prefs.class"
        internal_cfg = etc_dir / "internal.cfg"
        master_cfg = etc_dir / "master.cfg"

        # Temp directories
        tmp_dir = create_path(folder=True)
        tmp_etc_dir = tmp_dir / "etc"
        tmp_grid_dir = tmp_etc_dir / "grid"
        tmp_lib_dir = tmp_dir / "lib"
        tmp_var_dir = tmp_dir / "var"

        # Setup tmp dir
        [x.makedirs() for x in (tmp_grid_dir, tmp_lib_dir, tmp_var_dir)]
        prefs_file.copy(tmp_lib_dir)
        master_cfg.copy(tmp_etc_dir)
        internal_cfg.copy(tmp_etc_dir)

        # Other setup
        self.cli = MockCLI()
        self.cli.dir = tmp_dir
        self.cli.register("admin", AdminControl, "TEST")
        self.cli.register("config", PrefsControl, "TEST")
Example #28
0
def pypi():
    module_name, dry_run, _ = config.common_arguments()

    tmp_dir = make_virtualenv()
    install_cmd = '%s/bin/pip install %s' % (tmp_dir, module_name)

    package_index = 'pypi'
    pypi = config.arguments['--pypi']
    if pypi:
        install_cmd += '-i %s' % pypi
        package_index = pypi

    try:
        result = shell.execute(install_cmd, dry_run=dry_run)
        if result:
            log.info('Successfully installed %s from %s',
                     module_name, package_index)
        else:
            log.error('Failed to install %s from %s',
                      module_name, package_index)

        verification.run_test_command()
    except:
        log.exception(
            'error installing %s from %s', module_name, package_index
        )
        raise Exception(
            'Error installing %s from %s', module_name, package_index
        )

    path(tmp_dir).rmtree(path(tmp_dir))
 def testOldTemplates(self):
     old_templates = path(__file__).dirname() / ".." / "old_templates.xml"
     old_templates.copy(
         path(self.cli.dir) / "etc" / "templates" / "grid" /
         "templates.xml")
     with pytest.raises(NonZeroReturnCode):
         self.cli.invoke(self.args, strict=True)
Example #30
0
	def __init__(self, call, action, archive=None, filename=None, tempdir=None,
			user=None, spool_dir=None):
		"""Create a new `CallFile` obeject.

		:param obj call: A `pycall.Call` instance.
		:param obj action: Either a `pycall.actions.Application` instance
			or a `pycall.actions.Context` instance.
		:param bool archive: Should Asterisk archive the call file?
		:param str filename: Filename of the call file.
		:param str tempdir: Temporary directory to store the call file before
			spooling.
		:param str user: Username to spool the call file as.
		:param str spool_dir: Directory to spool the call file to.
		:rtype: `CallFile` object.
		"""
		self.call = call
		self.action = action
		self.archive = archive
		self.user = user
		self.spool_dir = spool_dir or self.DEFAULT_SPOOL_DIR

		if filename and tempdir:
			self.filename = path(filename)
			self.tempdir = path(tempdir)
		else:
			f = path(mkstemp(suffix='.call')[1])
			self.filename = f.name
			self.tempdir = f.parent
Example #31
0
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'

_system = 'lms'

_report_dir = REPO_ROOT / 'reports' / _system
_report_dir.makedirs_p()

NOSE_ARGS = [
    '--id-file',
    REPO_ROOT / '.testids' / _system / 'noseids',
    '--xunit-file',
    _report_dir / 'nosetests.xml',
]

# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"

STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"

COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT

COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out.  This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"

USE_I18N = True
LANGUAGE_CODE = 'en'  # tests assume they will get English.
Example #32
0
import shutil
import os

#import util # ~maxwl/util/lib/python/util.py
#import measure_prop # ~maxwl/mp/lib/python/measure_prop.py

parser = argparse.ArgumentParser()
parser.add_argument("workdir", type=path)
args = parser.parse_args()
workdir = args.workdir

if not workdir.exists():
    workdir.makedirs()

shutil.copy(__file__, workdir / "run.py")
experiment_dir = path(os.getcwd())
project_dir = experiment_dir / ".." / ".."
os.chdir(workdir)
workdir = path(".")

import logging
logging.basicConfig(format="%(asctime)s %(levelname)s:%(message)s")
logger = logging.getLogger('log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler("stdout.txt")
fh.setLevel(logging.DEBUG)  # >> this determines the file level
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)  # >> this determines the output level
# create formatter and add it to the handlers
Example #33
0
def _write_chunk(request, courselike_key):
    """
    Write the OLX file data chunk from the given request to the local filesystem.
    """
    # Upload .tar.gz to local filesystem for one-server installations not using S3 or Swift
    data_root = path(settings.GITHUB_REPO_ROOT)
    subdir = base64.urlsafe_b64encode(repr(courselike_key))
    course_dir = data_root / subdir
    filename = request.FILES['course-data'].name

    courselike_string = text_type(courselike_key) + filename
    # Do everything in a try-except block to make sure everything is properly cleaned up.
    try:
        # Use sessions to keep info about import progress
        _save_request_status(request, courselike_string, 0)

        if not filename.endswith('.tar.gz'):
            _save_request_status(request, courselike_string, -1)
            return JsonResponse(
                {
                    'ErrMsg': _('We only support uploading a .tar.gz file.'),
                    'Stage': -1
                },
                status=415)

        temp_filepath = course_dir / filename
        if not course_dir.isdir():
            os.mkdir(course_dir)

        logging.debug(u'importing course to {0}'.format(temp_filepath))

        # Get upload chunks byte ranges
        try:
            matches = CONTENT_RE.search(request.META["HTTP_CONTENT_RANGE"])
            content_range = matches.groupdict()
        except KeyError:  # Single chunk
            # no Content-Range header, so make one that will work
            content_range = {'start': 0, 'stop': 1, 'end': 2}

        # stream out the uploaded files in chunks to disk
        if int(content_range['start']) == 0:
            mode = "wb+"
        else:
            mode = "ab+"
            size = os.path.getsize(temp_filepath)
            # Check to make sure we haven't missed a chunk
            # This shouldn't happen, even if different instances are handling
            # the same session, but it's always better to catch errors earlier.
            if size < int(content_range['start']):
                _save_request_status(request, courselike_string, -1)
                log.warning(
                    "Reported range %s does not match size downloaded so far %s",
                    content_range['start'], size)
                return JsonResponse(
                    {
                        'ErrMsg': _('File upload corrupted. Please try again'),
                        'Stage': -1
                    },
                    status=409)
            # The last request sometimes comes twice. This happens because
            # nginx sends a 499 error code when the response takes too long.
            elif size > int(content_range['stop']) and size == int(
                    content_range['end']):
                return JsonResponse({'ImportStatus': 1})

        with open(temp_filepath, mode) as temp_file:
            for chunk in request.FILES['course-data'].chunks():
                temp_file.write(chunk)

        size = os.path.getsize(temp_filepath)

        if int(content_range['stop']) != int(content_range['end']) - 1:
            # More chunks coming
            return JsonResponse({
                "files": [{
                    "name":
                    filename,
                    "size":
                    size,
                    "deleteUrl":
                    "",
                    "deleteType":
                    "",
                    "url":
                    reverse_course_url('import_handler', courselike_key),
                    "thumbnailUrl":
                    ""
                }]
            })

        log.info(u"Course import %s: Upload complete", courselike_key)
        with open(temp_filepath, 'rb') as local_file:
            django_file = File(local_file)
            storage_path = course_import_export_storage.save(
                u'olx_import/' + filename, django_file)
        import_olx.delay(request.user.id, text_type(courselike_key),
                         storage_path, filename, request.LANGUAGE_CODE)

    # Send errors to client with stage at which error occurred.
    except Exception as exception:  # pylint: disable=broad-except
        _save_request_status(request, courselike_string, -1)
        if course_dir.isdir():
            shutil.rmtree(course_dir)
            log.info(u"Course import %s: Temp data cleared", courselike_key)

        log.exception("error importing course")
        return JsonResponse({
            'ErrMsg': str(exception),
            'Stage': -1
        },
                            status=400)

    return JsonResponse({'ImportStatus': 1})
Example #34
0
"""
Settings for bok choy tests
"""

import os
from path import path

########################## Prod-like settings ###################################
# These should be as close as possible to the settings we use in production.
# As in prod, we read in environment and auth variables from JSON files.
# Unlike in prod, we use the JSON files stored in this repo.
# This is a convenience for ensuring (a) that we can consistently find the files
# and (b) that the files are the same in Jenkins as in local dev.
os.environ['SERVICE_VARIANT'] = 'bok_choy'
os.environ['CONFIG_ROOT'] = path(__file__).abspath().dirname()  #pylint: disable=E1120

from .aws import *  # pylint: disable=W0401, W0614
from xmodule.x_module import prefer_xmodules

######################### Testing overrides ####################################

# Needed for the `reset_db` management command
INSTALLED_APPS += ('django_extensions', )

# Redirect to the test_root folder within the repo
TEST_ROOT = CONFIG_ROOT.dirname().dirname() / "test_root"  #pylint: disable=E1120
GITHUB_REPO_ROOT = (TEST_ROOT / "data").abspath()
LOG_DIR = (TEST_ROOT / "log").abspath()

# Configure Mongo modulestore to use the test folder within the repo
for store in ["default", "direct"]:
def test_mongo_proc(mongo_proc, mongo_proc2, mongo_proc3):
    for m in (mongo_proc, mongo_proc2, mongo_proc3):
        assert path('/tmp/mongo.{port}.log'.format(port=m.port)).isfile()
Example #36
0
import re
import os
import pandas as pd
from path import path

exp_name = 'exp9'

# define directories:
home_dir = '/share/ScratchGeneral/jamtor/'
project_dir = home_dir + '/projects/hgsoc_repeats/RNA-seq/'
results_dir = project_dir + '/results/'

report_dir = project_dir + '/reports/' + exp_name + '/'
ribo_dir = results_dir + '/star/ribo/' + exp_name + '/'
gc_dir = results_dir + '/star/GC/' + exp_name + '/'
htseq_dir = path(results_dir + '/htseq/' + exp_name + '/')
rkey_dir = project_dir + '/raw_files/fullsamples/bowtell_primary/'
record_dir = project_dir + '/record/'

if not os.path.exists(record_dir):
    os.makedirs(record_dir)

print('The report_dir is ' + report_dir)
print('The ribo_dir is ' + ribo_dir)
print('The gc_dir is ' + gc_dir)
print('The htseq_dir is ' + htseq_dir)
print('The record_dir is ' + record_dir)

### 1. Fetch directory names in reports directory ###

rep = []
# This is a minimal settings file allowing us to run "update_assets"
# in the Dockerfile for the production image of the edxapp CMS

from openedx.core.lib.derived import derive_settings

from path import Path as path

from ..common import *

DATABASES = {"default": {}}

XQUEUE_INTERFACE = {"url": None, "django_auth": None}

# We need to override STATIC_ROOT because for CMS, edX appends the value of
# "EDX_PLATFORM_REVISION" to it by default and we don't want to use this.
# We should use Django's ManifestStaticFilesStorage for this purpose.
STATIC_URL = "/static/studio/"
STATIC_ROOT = path("/edx/app/edxapp/staticfiles/studio")

########################## Derive Any Derived Settings  #######################

derive_settings(__name__)
Example #38
0
 def test_report_dir_without_files(self):
     os.remove(self.test_file.name)
     pavelib.quality._prepare_report_dir(path(self.test_dir))  # pylint: disable=protected-access
     assert os.listdir(path(self.test_dir)) == []
Example #39
0
 def setUp(self):
     super().setUp()
     self.report_dir = path(tempfile.mkdtemp())
     self.addCleanup(shutil.rmtree, self.report_dir)
Example #40
0
 def __init__(self, filepath):
     self.filepath = path(filepath)
     self.uncommitted = {}
     if self.filepath.exists():
         with open(self.filepath) as stream:
             self.uncommitted = json.load(stream) 
Example #41
0
    'ALLOW_PUBLIC_ACCOUNT_CREATION': True,

    # Whether or not the dynamic EnrollmentTrackUserPartition should be registered.
    'ENABLE_ENROLLMENT_TRACK_USER_PARTITION': True,
}

ENABLE_JASMINE = False

############################# SOCIAL MEDIA SHARING #############################
SOCIAL_SHARING_SETTINGS = {
    # Note: Ensure 'CUSTOM_COURSE_URLS' has a matching value in lms/envs/common.py
    'CUSTOM_COURSE_URLS': False
}

############################# SET PATH INFORMATION #############################
PROJECT_ROOT = path(__file__).abspath().dirname().dirname()  # /edx-platform/cms
REPO_ROOT = PROJECT_ROOT.dirname()
COMMON_ROOT = REPO_ROOT / "common"
OPENEDX_ROOT = REPO_ROOT / "openedx"
CMS_ROOT = REPO_ROOT / "cms"
LMS_ROOT = REPO_ROOT / "lms"
ENV_ROOT = REPO_ROOT.dirname()  # virtualenv dir /edx-platform is in

GITHUB_REPO_ROOT = ENV_ROOT / "data"

sys.path.append(REPO_ROOT)
sys.path.append(PROJECT_ROOT / 'djangoapps')
sys.path.append(COMMON_ROOT / 'djangoapps')

# For geolocation ip database
GEOIP_PATH = REPO_ROOT / "common/static/data/geoip/GeoIP.dat"
Example #42
0
 def test_report_dir_with_files(self):
     assert os.path.exists(self.test_file.name)
     pavelib.quality._prepare_report_dir(path(self.test_dir))  # pylint: disable=protected-access
     assert not os.path.exists(self.test_file.name)
Example #43
0
def import_olx(self, user_id, course_key_string, archive_path, archive_name,
               language):
    """
    Import a course or library from a provided OLX .tar.gz archive.
    """
    courselike_key = CourseKey.from_string(course_key_string)
    try:
        user = User.objects.get(pk=user_id)
    except User.DoesNotExist:
        with respect_language(language):
            self.status.fail(_(u'Unknown User ID: {0}').format(user_id))
        return
    if not has_course_author_access(user, courselike_key):
        with respect_language(language):
            self.status.fail(_(u'Permission denied'))
        return

    is_library = isinstance(courselike_key, LibraryLocator)
    is_course = not is_library
    if is_library:
        root_name = LIBRARY_ROOT
        courselike_module = modulestore().get_library(courselike_key)
        import_func = import_library_from_xml
    else:
        root_name = COURSE_ROOT
        courselike_module = modulestore().get_course(courselike_key)
        import_func = import_course_from_xml

    # Locate the uploaded OLX archive (and download it from S3 if necessary)
    # Do everything in a try-except block to make sure everything is properly cleaned up.
    data_root = path(settings.GITHUB_REPO_ROOT)
    subdir = base64.urlsafe_b64encode(
        repr(courselike_key).encode('utf-8')).decode('utf-8')
    course_dir = data_root / subdir
    try:
        self.status.set_state(u'Unpacking')

        if not archive_name.endswith(u'.tar.gz'):
            with respect_language(language):
                self.status.fail(
                    _(u'We only support uploading a .tar.gz file.'))
                return

        temp_filepath = course_dir / get_valid_filename(archive_name)
        if not course_dir.isdir():
            os.mkdir(course_dir)

        LOGGER.debug(u'importing course to {0}'.format(temp_filepath))

        # Copy the OLX archive from where it was uploaded to (S3, Swift, file system, etc.)
        if not course_import_export_storage.exists(archive_path):
            LOGGER.info(u'Course import %s: Uploaded file %s not found',
                        courselike_key, archive_path)
            with respect_language(language):
                self.status.fail(_(u'Tar file not found'))
            return
        with course_import_export_storage.open(archive_path, 'rb') as source:
            with open(temp_filepath, 'wb') as destination:

                def read_chunk():
                    """
                    Read and return a sequence of bytes from the source file.
                    """
                    return source.read(FILE_READ_CHUNK)

                for chunk in iter(read_chunk, b''):
                    destination.write(chunk)
        LOGGER.info(u'Course import %s: Download from storage complete',
                    courselike_key)
        # Delete from source location
        course_import_export_storage.delete(archive_path)

        # If the course has an entrance exam then remove it and its corresponding milestone.
        # current course state before import.
        if is_course:
            if courselike_module.entrance_exam_enabled:
                fake_request = RequestFactory().get(u'/')
                fake_request.user = user
                from contentstore.views.entrance_exam import remove_entrance_exam_milestone_reference
                # TODO: Is this really ok?  Seems dangerous for a live course
                remove_entrance_exam_milestone_reference(
                    fake_request, courselike_key)
                LOGGER.info(
                    u'entrance exam milestone content reference for course %s has been removed',
                    courselike_module.id)
    # Send errors to client with stage at which error occurred.
    except Exception as exception:  # pylint: disable=broad-except
        if course_dir.isdir():
            shutil.rmtree(course_dir)
            LOGGER.info(u'Course import %s: Temp data cleared', courselike_key)

        LOGGER.exception(u'Error importing course %s',
                         courselike_key,
                         exc_info=True)
        self.status.fail(text_type(exception))
        return

    # try-finally block for proper clean up after receiving file.
    try:
        tar_file = tarfile.open(temp_filepath)
        try:
            safetar_extractall(tar_file, (course_dir + u'/'))
        except SuspiciousOperation as exc:
            LOGGER.info(u'Course import %s: Unsafe tar file - %s',
                        courselike_key, exc.args[0])
            with respect_language(language):
                self.status.fail(_(u'Unsafe tar file. Aborting import.'))
            return
        finally:
            tar_file.close()

        LOGGER.info(u'Course import %s: Uploaded file extracted',
                    courselike_key)
        self.status.set_state(u'Verifying')
        self.status.increment_completed_steps()

        # find the 'course.xml' file
        def get_all_files(directory):
            """
            For each file in the directory, yield a 2-tuple of (file-name,
            directory-path)
            """
            for directory_path, _dirnames, filenames in os.walk(directory):
                for filename in filenames:
                    yield (filename, directory_path)

        def get_dir_for_filename(directory, filename):
            """
            Returns the directory path for the first file found in the directory
            with the given name.  If there is no file in the directory with
            the specified name, return None.
            """
            for name, directory_path in get_all_files(directory):
                if name == filename:
                    return directory_path
            return None

        dirpath = get_dir_for_filename(course_dir, root_name)
        if not dirpath:
            with respect_language(language):
                self.status.fail(
                    _(u'Could not find the {0} file in the package.').format(
                        root_name))
                return

        dirpath = os.path.relpath(dirpath, data_root)
        LOGGER.debug(u'found %s at %s', root_name, dirpath)

        LOGGER.info(u'Course import %s: Extracted file verified',
                    courselike_key)
        self.status.set_state(u'Updating')
        self.status.increment_completed_steps()

        courselike_items = import_func(modulestore(),
                                       user.id,
                                       settings.GITHUB_REPO_ROOT, [dirpath],
                                       load_error_modules=False,
                                       static_content_store=contentstore(),
                                       target_id=courselike_key)

        new_location = courselike_items[0].location
        LOGGER.debug(u'new course at %s', new_location)

        LOGGER.info(u'Course import %s: Course import successful',
                    courselike_key)
    except Exception as exception:  # pylint: disable=broad-except
        LOGGER.exception(u'error importing course', exc_info=True)
        self.status.fail(text_type(exception))
    finally:
        if course_dir.isdir():
            shutil.rmtree(course_dir)
            LOGGER.info(u'Course import %s: Temp data cleared', courselike_key)

        if self.status.state == u'Updating' and is_course:
            # Reload the course so we have the latest state
            course = modulestore().get_course(courselike_key)
            if course.entrance_exam_enabled:
                entrance_exam_chapter = modulestore().get_items(
                    course.id,
                    qualifiers={u'category': u'chapter'},
                    settings={u'is_entrance_exam': True})[0]

                metadata = {
                    u'entrance_exam_id':
                    text_type(entrance_exam_chapter.location)
                }
                CourseMetadata.update_from_dict(metadata, course, user)
                from contentstore.views.entrance_exam import add_entrance_exam_milestone
                add_entrance_exam_milestone(course.id, entrance_exam_chapter)
                LOGGER.info(u'Course %s Entrance exam imported', course.id)
Example #44
0
import os
from werkzeug.wsgi import DispatcherMiddleware
from paste.cgiapp import CGIApplication
from path import path
from webob.dec import wsgify

VIEWER_HOME = path(__file__).abspath().parent / 'maps'


def create_mapserver_app():
    mapserv_cgi = CGIApplication({}, os.environ.get('MAPSERV_BIN', 'mapserv'))

    @wsgify
    def mapserv_wrapper(request):
        request.GET['map'] = VIEWER_HOME / 'money.map'
        request.GET['SRS'] = 'EPSG:3857'
        return request.get_response(mapserv_cgi)

    return mapserv_wrapper


def initialize(app):
    app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {
        '/mapserv': create_mapserver_app(),
    })
Example #45
0
class RequirementDownloader(object):
    build_dir = path(build_prefix).abspath()
    pkginfo_from_file = IndexManager.pkginfo_from_file
    parse_requirements = staticmethod(parse_requirements)

    def __init__(self, req_set, finder=None, upgrade=False, seen=None):
        #@@ start with req_set??
        self.req_set = req_set
        self.upgrade = False
        download_dir = req_set.download_dir
        self.download_dir = path(download_dir)
        self.finder = finder
        self.seen = seen
        self.skip = []
        if self.seen is None:
            self.seen = set()
        self.errors = []

    # toupe for pip
    options = type('Options', (),
                   dict(skip_requirements_regex='', default_vcs=''))

    @classmethod
    def req_set_from_file(cls, filename, download_dir, deplinks=None):
        src_dir = path(src_prefix).abspath()

        finder = cls.package_finder(deplinks)

        requirement_set = RequirementSet(build_dir=cls.build_dir,
                                         src_dir=src_dir,
                                         download_dir=download_dir,
                                         download_cache=None,
                                         upgrade=False,
                                         ignore_installed=True,
                                         ignore_dependencies=False)

        options = cls.options()
        names = []
        for req in cls.parse_requirements(filename,
                                          finder=finder,
                                          options=options):
            requirement_set.add_requirement(req)
            names.append(req.req)
        yield requirement_set
        yield finder

    @staticmethod
    def readzip(archive, name):
        return archive.read(name)

    @staticmethod
    def readtar(archive, name):
        return archive.extractfile(name).read()

    @staticmethod
    def find_file(names, tail):
        try:
            return next(n for n in names if n.endswith(tail))
        except StopIteration:
            return None

    @classmethod
    def depinfo_for_file(cls, filename):
        if filename.endswith('.zip'):
            archive = zipfile.ZipFile(filename)
            names = archive.namelist()
            read = partial(cls.readzip, archive)
        elif filename.endswith('gz') or filename.endswith('bz2'):
            archive = tarfile.TarFile.open(filename)
            names = archive.getnames()
            read = partial(cls.readtar, archive)
        dl_file = cls.find_file(names, '.egg-info/dependency_links.txt')
        reqs_file = cls.find_file(names, '.egg-info/requires.txt')
        deplinks = dl_file and read(dl_file) or ''
        requires = reqs_file and read(reqs_file) or ''
        return [x.strip() for x in deplinks.split('\n') if x],\
               [x.strip() for x in requires.split('\n') if x and not x.startswith('[')]

    def download_url(self, link):
        target_url = link.url.split('#', 1)[0]
        resp = requests.get(target_url)
        logger.info('Downloading: %s' % target_url)
        outfile = self.download_dir / link.filename
        outfile.write_bytes(resp.content)
        # requests.iter_content
        pkginfo = self.pkginfo_from_file(outfile)
        return pkginfo, outfile

    def temp_req(self, name, content=None):
        fp = path(tempfile.gettempdir()) / ('temp-req-%s.txt' % name)
        if content:
            logger.debug("Reqs for %s:\n%s", name, content)
            fp.write_text(content)
        return fp

    def handle_requirement(self, req, finder):
        """
        Download requirement, return a new requirement set of
        requirements dependencies.
        """
        if req.editable:
            msg = "Editables not supported: %s" % req
            logger.warn(msg)
            self.errors.append("%s: %s" % (req, msg))
            return

        try:
            url = finder.find_requirement(req, self.upgrade)
        except DistributionNotFound:
            msg = "No distribution found for %s" % req.name
            logger.warn(msg)
            self.errors.append(msg)
            return

        if url.hash in self.seen:
            logger.debug('Seen: %s', url)
            self.skip.append(url)
            return

        try:
            pkginfo, outfile = self.download_url(url)
        except HTTPError, e:
            msg = "Issue with download: %s" % e
            logger.error(msg)
            self.errors.append("%s: %s" % (req, msg))
            return
        except TypeError:
            raise
Example #46
0
# Include a non-ascii character in STUDIO_NAME and STUDIO_SHORT_NAME to uncover possible
# UnicodeEncodeErrors in tests. Also use lazy text to reveal possible json dumps errors
STUDIO_NAME = ugettext_lazy(u"Your Platform 𝓢𝓽𝓾𝓭𝓲𝓸")
STUDIO_SHORT_NAME = ugettext_lazy(u"𝓢𝓽𝓾𝓭𝓲𝓸")

# Allow all hosts during tests, we use a lot of different ones all over the codebase.
ALLOWED_HOSTS = ['*']

# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')

THIS_UUID = uuid4().hex[:5]

TEST_ROOT = path('test_root')

# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = STATIC_ROOT / "webpack-stats.json"

GITHUB_REPO_ROOT = TEST_ROOT / "data"
DATA_DIR = TEST_ROOT / "data"
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"

# For testing "push to lms"
FEATURES['ENABLE_EXPORT_GIT'] = True
GIT_REPO_EXPORT_DIR = TEST_ROOT / "export_course_repos"

# TODO (cpennington): We need to figure out how envs/test.py can inject things into common.py so that we don't have to repeat this sort of thing  # lint-amnesty, pylint: disable=line-too-long
STATICFILES_DIRS = [
Example #47
0
from .common import *

from openedx.core.lib.logsettings import get_logger_config
import os

from path import Path as path
from xmodule.modulestore.modulestore_settings import convert_module_store_setting_if_needed

# SERVICE_VARIANT specifies name of the variant used, which decides what JSON
# configuration files are read during startup.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)

# CONFIG_ROOT specifies the directory where the JSON configuration
# files are expected to be found. If not specified, use the project
# directory.
CONFIG_ROOT = path(os.environ.get('CONFIG_ROOT', ENV_ROOT))

# CONFIG_PREFIX specifies the prefix of the JSON configuration files,
# based on the service variant. If no variant is use, don't use a
# prefix.
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""

############### ALWAYS THE SAME ################################

DEBUG = False

EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'

# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
Example #48
0
def create_export_tarball(course_module, course_key, context, status=None):
    """
    Generates the export tarball, or returns None if there was an error.

    Updates the context with any error information if applicable.
    """
    name = course_module.url_name
    export_file = NamedTemporaryFile(prefix=name + '.', suffix=".tar.gz")
    root_dir = path(mkdtemp())

    try:
        if isinstance(course_key, LibraryLocator):
            export_library_to_xml(modulestore(), contentstore(), course_key,
                                  root_dir, name)
        else:
            export_course_to_xml(modulestore(), contentstore(),
                                 course_module.id, root_dir, name)

        if status:
            status.set_state(u'Compressing')
            status.increment_completed_steps()
        LOGGER.debug(u'tar file being generated at %s', export_file.name)
        with tarfile.open(name=export_file.name, mode='w:gz') as tar_file:
            tar_file.add(root_dir / name, arcname=name)

    except SerializationError as exc:
        LOGGER.exception(u'There was an error exporting %s',
                         course_key,
                         exc_info=True)
        parent = None
        try:
            failed_item = modulestore().get_item(exc.location)
            parent_loc = modulestore().get_parent_location(
                failed_item.location)

            if parent_loc is not None:
                parent = modulestore().get_item(parent_loc)
        except:  # pylint: disable=bare-except
            # if we have a nested exception, then we'll show the more generic error message
            pass

        context.update({
            'in_err':
            True,
            'raw_err_msg':
            str(exc),
            'edit_unit_url':
            reverse_usage_url("container_handler", parent.location)
            if parent else "",
        })
        if status:
            status.fail(
                json.dumps({
                    'raw_error_msg': context['raw_err_msg'],
                    'edit_unit_url': context['edit_unit_url']
                }))
        raise
    except Exception as exc:
        LOGGER.exception(u'There was an error exporting %s',
                         course_key,
                         exc_info=True)
        context.update({
            'in_err': True,
            'edit_unit_url': None,
            'raw_err_msg': str(exc)
        })
        if status:
            status.fail(json.dumps({'raw_error_msg': context['raw_err_msg']}))
        raise
    finally:
        if os.path.exists(root_dir / name):
            shutil.rmtree(root_dir / name)

    return export_file
Example #49
0
def export_handler(request, tag=None, package_id=None, branch=None, version_guid=None, block=None):
    """
    The restful handler for exporting a course.

    GET
        html: return html page for import page
        application/x-tgz: return tar.gz file containing exported course
        json: not supported

    Note that there are 2 ways to request the tar.gz file. The request header can specify
    application/x-tgz via HTTP_ACCEPT, or a query parameter can be used (?_accept=application/x-tgz).

    If the tar.gz file has been requested but the export operation fails, an HTML page will be returned
    which describes the error.
    """
    location = BlockUsageLocator(package_id=package_id, branch=branch, version_guid=version_guid, block_id=block)
    if not has_course_access(request.user, location):
        raise PermissionDenied()

    old_location = loc_mapper().translate_locator_to_location(location)
    course_module = modulestore().get_item(old_location)

    # an _accept URL parameter will be preferred over HTTP_ACCEPT in the header.
    requested_format = request.REQUEST.get('_accept', request.META.get('HTTP_ACCEPT', 'text/html'))

    export_url = location.url_reverse('export') + '?_accept=application/x-tgz'
    if 'application/x-tgz' in requested_format:
        name = old_location.name
        export_file = NamedTemporaryFile(prefix=name + '.', suffix=".tar.gz")
        root_dir = path(mkdtemp())

        try:
            export_to_xml(modulestore('direct'), contentstore(), old_location, root_dir, name, modulestore())

            logging.debug('tar file being generated at {0}'.format(export_file.name))
            with tarfile.open(name=export_file.name, mode='w:gz') as tar_file:
                tar_file.add(root_dir / name, arcname=name)
        except SerializationError, e:
            logging.exception('There was an error exporting course {0}. {1}'.format(course_module.location, unicode(e)))
            unit = None
            failed_item = None
            parent = None
            try:
                failed_item = modulestore().get_instance(course_module.location.course_id, e.location)
                parent_locs = modulestore().get_parent_locations(failed_item.location, course_module.location.course_id)

                if len(parent_locs) > 0:
                    parent = modulestore().get_item(parent_locs[0])
                    if parent.location.category == 'vertical':
                        unit = parent
            except:
                # if we have a nested exception, then we'll show the more generic error message
                pass

            unit_locator = loc_mapper().translate_location(old_location.course_id, parent.location, False, True)

            return render_to_response('export.html', {
                'context_course': course_module,
                'in_err': True,
                'raw_err_msg': str(e),
                'failed_module': failed_item,
                'unit': unit,
                'edit_unit_url': unit_locator.url_reverse("unit") if parent else "",
                'course_home_url': location.url_reverse("course"),
                'export_url': export_url
            })
        except Exception, e:
            logging.exception('There was an error exporting course {0}. {1}'.format(course_module.location, unicode(e)))
            return render_to_response('export.html', {
                'context_course': course_module,
                'in_err': True,
                'unit': None,
                'raw_err_msg': str(e),
                'course_home_url': location.url_reverse("course"),
                'export_url': export_url
            })
Example #50
0
 def temp_req(self, name, content=None):
     fp = path(tempfile.gettempdir()) / ('temp-req-%s.txt' % name)
     if content:
         logger.debug("Reqs for %s:\n%s", name, content)
         fp.write_text(content)
     return fp
Example #51
0
def decompresser_base(base, date_fond, dates_majo, cache='cache'):

    # Vérification des paramètres
    base = base.upper()
    if base not in bases:
        raise NomBaseException()
    if not isinstance(date_fond, datetime) or not isinstance(dates_majo, list):
        raise ValueError()
    for date in dates_majo:
        if not isinstance(date, datetime):
            raise ValueError()

    # Créer le répertoire rattaché à ce dump complet
    rep = os.path.join(cache, 'bases-xml', date_fond.strftime('%Y%m%d-%H%M%S'))
    path(rep).mkdir_p()

    # Décompresser le dump complet
    date = date_fond.strftime('%Y%m%d-%H%M%S')
    if not os.path.exists(os.path.join(rep, 'fond-' + date)) or \
       os.path.exists(os.path.join(rep, 'fond-' + date, 'erreur-tar')):

        if os.path.exists(os.path.join(rep, 'fond-' + date, 'erreur-tar')):
            shutil.rmtree(os.path.join(rep, 'fond-' + date))
        path(os.path.join(rep, 'fond-' + date)).mkdir_p()
        open(os.path.join(rep, 'fond-' + date, 'erreur-tar'), 'w').close()
        subprocess.call([
            'tar', 'xzf',
            os.path.join(cache, 'tar', base + '-fond-' + date + '.tar.gz'),
            '-C',
            os.path.join(rep, 'fond-' + date)
        ])
        os.remove(os.path.join(rep, 'fond-' + date, 'erreur-tar'))

    # Inscrire cette livraison dans la base de données
    try:
        entree_livraison = Livraison.get(Livraison.date == date_fond)
    except Livraison.DoesNotExist:
        entree_livraison = Livraison.create(date=date_fond,
                                            type='fondation',
                                            base=base,
                                            precedent=None,
                                            fondation=None)
    entree_livraison_fondation = entree_livraison

    # Décompresser les dumps incrémentaux
    for date_majo in dates_majo:

        date = date_majo.strftime('%Y%m%d-%H%M%S')
        if not os.path.exists(os.path.join(rep, 'majo-' + date)) or \
           os.path.exists(os.path.join(rep, date)) or \
           os.path.exists(os.path.join(rep, 'majo-' + date, 'erreur-tar')):

            if os.path.exists(os.path.join(rep, date)):
                shutil.rmtree(os.path.join(rep, date), True)
                shutil.rmtree(os.path.join(rep, 'majo-' + date), True)
            if os.path.exists(os.path.join(rep, 'majo-' + date, 'erreur-tar')):
                shutil.rmtree(os.path.join(rep, 'majo-' + date), True)
            path(os.path.join(rep, date)).mkdir_p()
            open(os.path.join(rep, date, 'erreur-tar'), 'w').close()
            subprocess.call([
                'tar', 'xzf',
                os.path.join(cache, 'tar', base + '-majo-' + date + '.tar.gz'),
                '-C', rep
            ])
            os.rename(os.path.join(rep, date),
                      os.path.join(rep, 'majo-' + date))
            os.remove(os.path.join(rep, 'majo-' + date, 'erreur-tar'))

        # Inscrire cette livraison dans la base de données
        try:
            entree_livraison = Livraison.get(Livraison.date == date_majo)
        except Livraison.DoesNotExist:
            entree_livraison = Livraison.create(
                date=date_majo,
                type='miseajour',
                base=base,
                precedent=entree_livraison,
                fondation=entree_livraison_fondation)
Example #52
0
def import_handler(request, tag=None, package_id=None, branch=None, version_guid=None, block=None):
    """
    The restful handler for importing a course.

    GET
        html: return html page for import page
        json: not supported
    POST or PUT
        json: import a course via the .tar.gz file specified in request.FILES
    """
    location = BlockUsageLocator(package_id=package_id, branch=branch, version_guid=version_guid, block_id=block)
    if not has_course_access(request.user, location):
        raise PermissionDenied()

    old_location = loc_mapper().translate_locator_to_location(location)

    if 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
        if request.method == 'GET':
            raise NotImplementedError('coming soon')
        else:
            data_root = path(settings.GITHUB_REPO_ROOT)
            course_subdir = "{0}-{1}-{2}".format(old_location.org, old_location.course, old_location.name)
            course_dir = data_root / course_subdir

            filename = request.FILES['course-data'].name
            if not filename.endswith('.tar.gz'):
                return JsonResponse(
                    {
                        'ErrMsg': _('We only support uploading a .tar.gz file.'),
                        'Stage': 1
                    },
                    status=415
                )
            temp_filepath = course_dir / filename

            if not course_dir.isdir():
                os.mkdir(course_dir)

            logging.debug('importing course to {0}'.format(temp_filepath))

            # Get upload chunks byte ranges
            try:
                matches = CONTENT_RE.search(request.META["HTTP_CONTENT_RANGE"])
                content_range = matches.groupdict()
            except KeyError:    # Single chunk
                # no Content-Range header, so make one that will work
                content_range = {'start': 0, 'stop': 1, 'end': 2}

            # stream out the uploaded files in chunks to disk
            if int(content_range['start']) == 0:
                mode = "wb+"
            else:
                mode = "ab+"
                size = os.path.getsize(temp_filepath)
                # Check to make sure we haven't missed a chunk
                # This shouldn't happen, even if different instances are handling
                # the same session, but it's always better to catch errors earlier.
                if size < int(content_range['start']):
                    log.warning(
                        "Reported range %s does not match size downloaded so far %s",
                        content_range['start'],
                        size
                    )
                    return JsonResponse(
                        {
                            'ErrMsg': _('File upload corrupted. Please try again'),
                            'Stage': 1
                        },
                        status=409
                    )
                # The last request sometimes comes twice. This happens because
                # nginx sends a 499 error code when the response takes too long.
                elif size > int(content_range['stop']) and size == int(content_range['end']):
                    return JsonResponse({'ImportStatus': 1})

            with open(temp_filepath, mode) as temp_file:
                for chunk in request.FILES['course-data'].chunks():
                    temp_file.write(chunk)

            size = os.path.getsize(temp_filepath)

            if int(content_range['stop']) != int(content_range['end']) - 1:
                # More chunks coming
                return JsonResponse({
                    "files": [{
                                  "name": filename,
                                  "size": size,
                                  "deleteUrl": "",
                                  "deleteType": "",
                                  "url": location.url_reverse('import'),
                                  "thumbnailUrl": ""
                              }]
                })

            else:   # This was the last chunk.

                # Use sessions to keep info about import progress
                session_status = request.session.setdefault("import_status", {})
                key = location.package_id + filename
                session_status[key] = 1
                request.session.modified = True

                # Do everything from now on in a try-finally block to make sure
                # everything is properly cleaned up.
                try:

                    tar_file = tarfile.open(temp_filepath)
                    try:
                        safetar_extractall(tar_file, (course_dir + '/').encode('utf-8'))
                    except SuspiciousOperation as exc:
                        return JsonResponse(
                            {
                                'ErrMsg': 'Unsafe tar file. Aborting import.',
                                'SuspiciousFileOperationMsg': exc.args[0],
                                'Stage': 1
                            },
                            status=400
                        )
                    finally:
                        tar_file.close()

                    session_status[key] = 2
                    request.session.modified = True

                    # find the 'course.xml' file
                    def get_all_files(directory):
                        """
                        For each file in the directory, yield a 2-tuple of (file-name,
                        directory-path)
                        """
                        for dirpath, _dirnames, filenames in os.walk(directory):
                            for filename in filenames:
                                yield (filename, dirpath)

                    def get_dir_for_fname(directory, filename):
                        """
                        Returns the dirpath for the first file found in the directory
                        with the given name.  If there is no file in the directory with
                        the specified name, return None.
                        """
                        for fname, dirpath in get_all_files(directory):
                            if fname == filename:
                                return dirpath
                        return None

                    fname = "course.xml"

                    dirpath = get_dir_for_fname(course_dir, fname)

                    if not dirpath:
                        return JsonResponse(
                            {

                                'ErrMsg': _('Could not find the course.xml file in the package.'),
                                'Stage': 2
                            },
                            status=415
                        )

                    logging.debug('found course.xml at {0}'.format(dirpath))

                    if dirpath != course_dir:
                        for fname in os.listdir(dirpath):
                            shutil.move(dirpath / fname, course_dir)

                    _module_store, course_items = import_from_xml(
                        modulestore('direct'),
                        settings.GITHUB_REPO_ROOT,
                        [course_subdir],
                        load_error_modules=False,
                        static_content_store=contentstore(),
                        target_location_namespace=old_location,
                        draft_store=modulestore()
                    )

                    new_location = course_items[0].location
                    logging.debug('new course at {0}'.format(new_location))

                    session_status[key] = 3
                    request.session.modified = True

                    auth.add_users(request.user, CourseInstructorRole(new_location), request.user)
                    auth.add_users(request.user, CourseStaffRole(new_location), request.user)
                    logging.debug('created all course groups at {0}'.format(new_location))

                # Send errors to client with stage at which error occurred.
                except Exception as exception:   # pylint: disable=W0703
                    log.exception(
                        "error importing course"
                    )
                    return JsonResponse(
                        {
                            'ErrMsg': str(exception),
                            'Stage': session_status[key]
                        },
                        status=400
                    )

                finally:
                    shutil.rmtree(course_dir)

                return JsonResponse({'Status': 'OK'})
    elif request.method == 'GET':  # assume html
        course_module = modulestore().get_item(old_location)
        return render_to_response('import.html', {
            'context_course': course_module,
            'successful_import_redirect_url': location.url_reverse("course"),
            'import_status_url': location.url_reverse("import_status", "fillerName"),
        })
    else:
        return HttpResponseNotFound()
Example #53
0
    def test_tester_layer(self):
        bu = build.Builder()
        bu.log_level = "WARNING"
        bu.output_dir = "out"
        bu.series = "trusty"
        bu.name = "foo"
        bu.charm = "trusty/tester"
        bu.hide_metrics = True
        bu.report = False
        remove_layer_file = self.dirname / 'trusty/tester/to_remove'
        remove_layer_file.touch()
        self.addCleanup(remove_layer_file.remove_p)
        with mock.patch.object(build.builder, 'log') as log:
            bu()
            log.warn.assert_called_with(
                'Please add a `repo` key to your layer.yaml, '
                'with a url from which your layer can be cloned.')
        base = path('out/trusty/foo')
        self.assertTrue(base.exists())

        # Verify ignore rules applied
        self.assertFalse((base / ".bzr").exists())
        self.assertEqual((base / "ignore").text(), "mysql\n")
        self.assertEqual((base / "exclude").text(), "test-base\n")
        self.assertEqual((base / "override-ignore").text(), "tester\n")
        self.assertEqual((base / "override-exclude").text(), "tester\n")
        self.assertFalse((base / "tests/00-setup").exists())
        self.assertFalse((base / "tests/15-configs").exists())
        self.assertTrue((base / "tests/20-deploy").exists())
        actions = yaml.load((base / "actions.yaml").text())
        resources = yaml.load((base / "resources.yaml").text())
        self.assertNotIn("test-base", actions)
        self.assertIn("mysql", actions)
        self.assertIn("tester", actions)
        self.assertIn("test-base", resources)
        self.assertNotIn("mysql", resources)
        self.assertIn("tester", resources)

        # Metadata should have combined provides fields
        metadata = base / "metadata.yaml"
        self.assertTrue(metadata.exists())
        metadata_data = yaml.load(metadata.open())
        self.assertIn("shared-db", metadata_data['provides'])
        self.assertIn("storage", metadata_data['provides'])
        # The maintainer, maintainers values should only be from the top layer.
        self.assertIn("maintainer", metadata_data)
        self.assertEqual(metadata_data['maintainer'], "Tester <*****@*****.**>")
        self.assertNotIn("maintainers", metadata_data)
        # The tags list must be de-duplicated.
        self.assertEqual(metadata_data['tags'], ["databases"])

        # Config should have keys but not the ones in deletes
        config = base / "config.yaml"
        self.assertTrue(config.exists())
        config_data = yaml.load(config.open())['options']
        self.assertIn("bind-address", config_data)
        self.assertNotIn("vip", config_data)
        self.assertIn("key", config_data)
        self.assertEqual(config_data["key"]["default"], None)
        # Issue #99 where strings lose their quotes in a charm build.
        self.assertIn("numeric-string", config_data)
        default_value = config_data['numeric-string']['default']
        self.assertEqual(default_value, "0123456789", "value must be a string")
        # Issue 218, ensure proper order of layer application
        self.assertEqual(config_data['backup_retention_count']['default'], 7,
                         'Config from layers was merged in wrong order')

        cyaml = base / "layer.yaml"
        self.assertTrue(cyaml.exists())
        cyaml_data = yaml.load(cyaml.open())
        self.assertEquals(cyaml_data['includes'],
                          ['trusty/test-base', 'trusty/mysql'])
        self.assertEquals(cyaml_data['is'], 'foo')
        self.assertEquals(cyaml_data['options']['mysql']['qux'], 'one')

        self.assertTrue((base / "hooks/config-changed").exists())

        # Files from the top layer as overrides
        start = base / "hooks/start"
        self.assertTrue(start.exists())
        self.assertIn("Overridden", start.text())

        self.assertTrue((base / "README.md").exists())
        self.assertEqual("dynamic tactics", (base / "README.md").text())

        sigs = base / ".build.manifest"
        self.assertTrue(sigs.exists())
        data = json.load(sigs.open())
        self.assertEquals(data['signatures']["README.md"], [
            u'foo', "static", u'cfac20374288c097975e9f25a0d7c81783acdbc81'
            '24302ff4a731a4aea10de99'
        ])

        self.assertEquals(data["signatures"]['metadata.yaml'], [
            u'foo', "dynamic",
            u'5691b0c0aaf43d0f27d8eca6afbd1145aa3f4307456757e592827f002cf603f2'
        ])

        storage_attached = base / "hooks/data-storage-attached"
        storage_detaching = base / "hooks/data-storage-detaching"
        self.assertTrue(storage_attached.exists())
        self.assertTrue(storage_detaching.exists())
        self.assertIn("Hook: data", storage_attached.text())
        self.assertIn("Hook: data", storage_detaching.text())

        # confirm that files removed from a base layer get cleaned up
        self.assertTrue((base / 'to_remove').exists())
        remove_layer_file.remove()
        bu()
        self.assertFalse((base / 'to_remove').exists())
Example #54
0
 def get_id(self, dump_file):
     """ Returns restore point ID for a given filepath
     """
     return md5.new(path(dump_file).basename()).hexdigest()
Example #55
0
    def test_regenerate_inplace(self):
        # take a generated example where a base layer has changed
        # regenerate in place
        # make some assertions
        bu = build.Builder()
        bu.log_level = "WARNING"
        bu.output_dir = "out"
        bu.series = "trusty"
        bu.name = "foo"
        bu.charm = "trusty/b"
        bu.hide_metrics = True
        bu.report = False
        bu()
        base = path('out/trusty/foo')
        self.assertTrue(base.exists())

        # verify the 1st gen worked
        self.assertTrue((base / "a").exists())
        self.assertTrue((base / "README.md").exists())

        # now regenerate from the target
        with utils.cd("out/trusty/foo"):
            bu = build.Builder()
            bu.log_level = "WARNING"
            bu.output_dir = path(os.getcwd())
            bu.series = "trusty"
            # The generate target and source are now the same
            bu.name = "foo"
            bu.charm = "."
            bu.hide_metrics = True
            bu.report = False
            bu()
            base = bu.output_dir
            self.assertTrue(base.exists())

            # Check that the generated layer.yaml makes sense
            cy = base / "layer.yaml"
            config = yaml.load(cy.open())
            self.assertEquals(config["includes"],
                              ["trusty/a", "interface:mysql"])
            self.assertEquals(config["is"], "foo")

            # We can even run it more than once
            bu()
            cy = base / "layer.yaml"
            config = yaml.load(cy.open())
            self.assertEquals(config["includes"],
                              ["trusty/a", "interface:mysql"])
            self.assertEquals(config["is"], "foo")

            # We included an interface, we should be able to assert things about it
            # in its final form as well
            provides = base / "hooks/relations/mysql/provides.py"
            requires = base / "hooks/relations/mysql/requires.py"
            self.assertTrue(provides.exists())
            self.assertTrue(requires.exists())

            # and that we generated the hooks themselves
            for kind in ["joined", "changed", "broken", "departed"]:
                self.assertTrue((base / "hooks" /
                                 "mysql-relation-{}".format(kind)).exists())

            # and ensure we have an init file (the interface doesn't its added)
            init = base / "hooks/relations/mysql/__init__.py"
            self.assertTrue(init.exists())
Example #56
0
def telecharger_base(base, livraison='tout', cache='cache'):

    # Vérification des paramètres
    base = base.upper()
    if base not in bases:
        raise NomBaseException()
    if livraison not in ['fondation','tout'] and \
       not isinstance(livraison, datetime):
        livraison = datetime.strptime(livraison, '%Y%m%d-%H%M%S')
    if serveurs[base][0] != 'ftp':
        raise NonImplementeException()

    # Créer le dossier de cache des fichiers téléchargés
    path(os.path.join(cache, 'tar')).mkdir_p()

    # Connexion FTP
    serveur = serveurs[base][0] + ':' + \
              '//' + serveurs[base][2] + ':' + serveurs[base][3] + \
              '@' + serveurs[base][1] + serveurs[base][4]

    connexion_ftp = ftplib.FTP(serveurs[base][1], \
                               serveurs[base][2], \
                               serveurs[base][3])

    # Reconnaître les dates des fichiers
    connexion_ftp.cwd(serveurs[base][4])
    fichiers = connexion_ftp.nlst()
    date_fond = None
    dates_majo = []

    for fichier in fichiers:

        # Si c’est un fichier de dump complet
        try:
            datetime.strptime(fichier, fichiers_fond[base])
            if date_fond: raise FondationNonUniqueException()
            date_fond = datetime.strptime(fichier, fichiers_fond[base])
        except ValueError:
            pass

        # Si c’est un fichier de dump incrémental
        try:
            dates_majo.append(datetime.strptime(fichier, fichiers_majo[base]))
        except ValueError:
            pass

    # Normaliser les paramètres relatifs aux dates
    dates_majo.sort()
    if not date_fond:
        raise FondationNonTrouveeException()
    if livraison == 'fondation':
        livraison = date_fond
    if livraison == 'tout':
        livraison = dates_majo[-1]
    dates_majo = [date for date in dates_majo if date <= livraison]

    # Téléchargement du dump complet
    telecharger_cache(
        serveur + date_fond.strftime(fichiers_fond[base]),
        os.path.join(cache, 'tar',
                     base + date_fond.strftime('-fond-%Y%m%d-%H%M%S.tar.gz')))

    # Téléchargement des dumps incrémentaux
    for date_majo in dates_majo:

        telecharger_cache(
            serveur + date_majo.strftime(fichiers_majo[base]),
            os.path.join(
                cache, 'tar',
                base + date_majo.strftime('-majo-%Y%m%d-%H%M%S.tar.gz')))

    # Clôturer la connexion
    connexion_ftp.close()

    return date_fond, dates_majo
Example #57
0
            # Mongo modulestore beneath mixed.
            # Returns the entire collection with *all* courses' asset metadata.
            return store.asset_collection
        else:
            # Split modulestore beneath mixed.
            # Split stores all asset metadata in the structure collection.
            return store.db_connection.structures


THIS_UUID = uuid4().hex

COMMON_DOCSTORE_CONFIG = {
    'host': MONGO_HOST,
    'port': MONGO_PORT_NUM,
}
DATA_DIR = path(__file__).dirname().parent.parent / "tests" / "data" / "xml-course-root"
TEST_DATA_DIR = 'common/test/data/'

XBLOCK_MIXINS = (InheritanceMixin, XModuleMixin)


MIXED_MODULESTORE_BOTH_SETUP = MixedModulestoreBuilder([
    ('draft', MongoModulestoreBuilder()),
    ('split', VersioningModulestoreBuilder())
])
DRAFT_MODULESTORE_SETUP = MixedModulestoreBuilder([('draft', MongoModulestoreBuilder())])
SPLIT_MODULESTORE_SETUP = MixedModulestoreBuilder([('split', VersioningModulestoreBuilder())])
MIXED_MODULESTORE_SETUPS = (
    DRAFT_MODULESTORE_SETUP,
    SPLIT_MODULESTORE_SETUP,
)
Example #58
0
 def tearDown(self):
     path("out").rmtree_p()
Example #59
0
    def on_task_filter(self, task, config):
        if not task.accepted:
            log.debug('Scanning not needed')
            return
        config = self.prepare_config(config)
        accepted_series = {}
        paths = set()
        for entry in task.accepted:
            if 'series_parser' in entry:
                if entry['series_parser'].valid:
                    accepted_series.setdefault(entry['series_parser'].name,
                                               []).append(entry)
                    for folder in config['path']:
                        try:
                            paths.add(entry.render(folder))
                        except RenderError as e:
                            log.error('Error rendering path `%s`: %s', folder,
                                      e)
                else:
                    log.debug('entry %s series_parser invalid', entry['title'])
        if not accepted_series:
            log.warning(
                'No accepted entries have series information. exists_series cannot filter them'
            )
            return

        # scan through
        # For speed, only test accepted entries since our priority should be after everything is accepted.
        for series in accepted_series:
            # make new parser from parser in entry
            series_parser = accepted_series[series][0]['series_parser']
            for folder in paths:
                folder = path(folder).expanduser()
                if not folder.isdir():
                    log.warn('Directory %s does not exist' % folder, log)
                    continue

                for filename in folder.walk(errors='warn'):
                    # run parser on filename data
                    try:
                        disk_parser = get_plugin_by_name(
                            'parsing').instance.parse_series(
                                data=filename.name, name=series_parser.name)
                    except ParseWarning as pw:
                        disk_parser = pw.parsed
                        log_once(pw.value, logger=log)
                    if disk_parser.valid:
                        log.debug('name %s is same series as %s',
                                  filename.name, series)
                        log.debug('disk_parser.identifier = %s',
                                  disk_parser.identifier)
                        log.debug('disk_parser.quality = %s',
                                  disk_parser.quality)
                        log.debug('disk_parser.proper_count = %s',
                                  disk_parser.proper_count)

                        for entry in accepted_series[series]:
                            log.debug('series_parser.identifier = %s',
                                      entry['series_parser'].identifier)
                            if disk_parser.identifier != entry[
                                    'series_parser'].identifier:
                                log.trace('wrong identifier')
                                continue
                            log.debug('series_parser.quality = %s',
                                      entry['series_parser'].quality)
                            if config.get(
                                    'allow_different_qualities') == 'better':
                                if entry[
                                        'series_parser'].quality > disk_parser.quality:
                                    log.trace('better quality')
                                    continue
                            elif config.get('allow_different_qualities'):
                                if disk_parser.quality != entry[
                                        'series_parser'].quality:
                                    log.trace('wrong quality')
                                    continue
                            log.debug('entry parser.proper_count = %s',
                                      entry['series_parser'].proper_count)
                            if disk_parser.proper_count >= entry[
                                    'series_parser'].proper_count:
                                entry.reject('proper already exists')
                                continue
                            else:
                                log.trace('new one is better proper, allowing')
                                continue
Example #60
0
 def setUp(self):
     self.dirname = path(pkg_resources.resource_filename(__name__, ""))
     os.environ["LAYER_PATH"] = self.dirname
     os.environ["INTERFACE_PATH"] = self.dirname / "interfaces"
     path("out").rmtree_p()