def getScenario(self):
     if not self.isComplete(): return None
     QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
     try:
         checkedid = self.bngroup.checkedId()
         if   checkedid==0:
             index = self.comboTemplates.currentIndex()
             defname = unicode(self.comboTemplates.itemData(index).toString())
             defscenario = scenario.Scenario.getDefault(defname,scenario.guiscenarioversion)
             xmldom = defscenario.toXmlDom()
             scen = scenario.Scenario.fromSchemaName(scenario.guiscenarioversion)
             scen.setStore(xmldom)
         elif checkedid==1:
             path = self.pathOpen.path()
             if path.endswith('.gotmresult'):
                 try:
                     result = data.Result()
                     result.load(path)
                 except Exception,e:
                     raise Exception('An error occurred while loading the result: '+str(e))
                 scen = result.scenario
                 result.unlink()
             elif path.endswith('.xml'):
                 try:
                     scen = scenario.Scenario.fromSchemaName(scenario.guiscenarioversion)
                     scen.load(path)
                 except Exception,e:
                     raise Exception('An error occurred while loading the scenario: '+str(e))
             else:
Exemple #2
0
def extract_setup_py_cfg(path):
    try:
        if path.endswith('.tar.gz'):
            t = tarfile.open(name=path, mode='r')
            try:
                ti = t.next()
                while ti:
                    # Issue 14160 appears to still be around in some form.
                    if not ti.issym():
                        name = os.path.basename(ti.name)
                        if name in ('setup.py', 'setup.cfg'):
                            content = t.extractfile(ti)
                            if content:
                                yield name, content.read()
                    ti = t.next()
            finally:
                t.close()
        elif path.endswith('.zip'):
            z = zipfile.ZipFile(path, 'r')
            try:
                for n in ('setup..py', 'setup.cfg'):
                    try:
                        yield n, z.open(n).read()
                    except:
                        pass
            finally:
                z.close()
        else:
            yield 'failed', 'Cannot handle path %s' % path
    except Exception as e:
        yield 'failed', str(e)
Exemple #3
0
    def index(self, environ, start_response):

        path = environ.get('PATH_INFO')

        path = os.path.basename(path)

        if path == '':
            path = 'display.html'

        stuff_i_care_about = ('display.html', 'display.css', 'display.js',
                              'jquery-1.4.3.min.js', 'jquery-textfill-0.1.js')

        if not path in stuff_i_care_about:
            start_response('404 NOT FOUND',
                           [('Content-Type', 'text/plain'),
                            ('Content-Length', 0)])
            return []

        data = open(os.path.join(
                os.path.dirname(__file__),
                path)).read()

        data = data % environ

        if path.endswith(".js"):
            type = "text/javascript"
        elif path.endswith(".css"):
            type = "text/css"
        else:
            type = "text/html"

        start_response('200 OK', [('Content-Type', type),
                                  ('Content-Length', len(data))])
        return [data]
Exemple #4
0
def tree(directory):
    # print(os.listdir(directory))

    x, y, z = 0,0,0
    for filename in os.listdir(directory):
        path = directory +'\\'+ filename
        if os.path.isdir(path):

            print('dir ' + path)
            tt = tree(path)
            x += tt[0]
            y += tt[1]
            z += tt[2]
        elif os.path.isfile(path):

            if  path.endswith('.py') or path.endswith('.java') :

                print('file ' + path)
                tt = lines(path)
                x += tt[0]
                y += tt[1]
                z += tt[2]
        else:
        	print('else ' + path)
    return [x,y,z]
Exemple #5
0
    def from_url(url):
        """Assumes a valid URL if the scheme is specified.  For example,
        'file:///C:/My%20Documents/test.vt'.  If only a filename is
        specified, it converts the filename to a URL.

        """
        if '://' in url:
            scheme = url.split('://', 1)[0]
        elif url.startswith('untitled:'):
            scheme = 'untitled'
        else:
            scheme = 'file'
            url = BaseLocator.convert_filename_to_url(url)
        if scheme == 'untitled':
            return UntitledLocator.from_url(url)
        elif scheme == 'db':
            return DBLocator.from_url(url)
        elif scheme == 'file':
            old_uses_query = urlparse.uses_query
            urlparse.uses_query = urlparse.uses_query + ['file']
            scheme, host, path, query, fragment = urlparse.urlsplit(str(url))
            urlparse.uses_query = old_uses_query
            path = url2pathname(path)
            if path.endswith(".vt"):
                return ZIPFileLocator.from_url(url)
            elif path.endswith(".xml"):
                return XMLFileLocator.from_url(url)
        return None
Exemple #6
0
def f_reload(phenny, input): 
   """Reloads a module, for use by admins only.""" 
   if not input.admin: return

   name = input.group(2)
   if name == phenny.config.owner: 
      return phenny.reply('What?')

   if (not name) or (name == '*'): 
      phenny.setup()
      return phenny.reply('done')

   if not sys.modules.has_key(name): 
      return phenny.reply('%s: no such module!' % name)

   # Thanks to moot for prodding me on this
   path = sys.modules[name].__file__
   if path.endswith('.pyc') or path.endswith('.pyo'): 
      path = path[:-1]
   if not os.path.isfile(path): 
      return phenny.reply('Found %s, but not the source file' % name)

   module = imp.load_source(name, path)
   sys.modules[name] = module
   if hasattr(module, 'setup'): 
      module.setup(phenny)

   mtime = os.path.getmtime(module.__file__)
   modified = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(mtime))

   phenny.register(vars(module))
   phenny.bind_commands()

   phenny.reply('%r (version: %s)' % (module, modified))
 def getScenario(self,callback=None,completecallback=None):
     if not self.isComplete(): return None
     QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
     try:
         checkedid = self.bngroup.checkedId()
         if   checkedid==0:
             #index = self.comboTemplates.currentIndex()
             #defname = unicode(self.comboTemplates.itemData(index).toString())
             #defscenario = core.scenario.Scenario.getDefault(defname,core.scenario.guiscenarioversion)
             #xmldom = defscenario.toXmlDom()
             scen = core.scenario.Scenario.fromSchemaName(core.scenario.guiscenarioversion)
             #scen.setStore(xmldom)
             scen.fillMissingValues()
             scen.resetChanged()
         elif checkedid==1:
             path = self.pathOpen.path()
             if path.endswith('.gotmresult'):
                 try:
                     res = core.result.Result()
                     res.load(path)
                 except Exception,e:
                     raise Exception('An error occurred while loading the result: '+str(e))
                 scen = res.scenario.addref()  # Note: the scenario version will be guiscenarioversion, set by Result
                 res.release()
             elif path.endswith('.xml'):
                 try:
                     scen = core.scenario.Scenario.fromSchemaName(core.scenario.guiscenarioversion)
                     scen.load(path)
                 except Exception,e:
                     raise Exception('An error occurred while loading the scenario: '+str(e))
             else:
Exemple #8
0
def sendpkm():

    print 'Note: you must exit the GTS before sending a pkm'
    print '4th Gen Pokemon files are currently unsupported.'
    print 'Enter the path or drag the pkm file here'
    print '(Type Back to go back)'

    while True:
        path = raw_input().strip()

        if path == "Back" or path == "back": return
               
        path = os.path.normpath(path)
        if system() != 'Windows':
            path = path.replace('\\', '')

        if path.startswith('"') or path.startswith("'"):
            path = path[1:]
        if path.endswith('"') or path.endswith("'"):
            path = path[:-1]
        if os.path.exists(path) and path.lower().endswith('.pkm'): break
        else:
            print 'Invalid file name, try again'
            continue
        
    sendingpkm(path)
Exemple #9
0
 def read_file(self, path):
     try:
         if not path.endswith('.htm') and not path.endswith('.html'):
             path += '.html'
         page = get_static_page_by_path(path)
         if path == 'NLTK Wordnet Browser Help.html':
             word = '* Help *'
         else:
             txt = '<title>' + frame_title + ' display of: '
             ind_0 = page.find(txt)
             if ind_0 == -1:
                 err_mess = 'This file is not in NLTK Browser format!'
                 self.panel.nb.h_w.show_msg(err_mess)
                 return
             ind_1 = page.find('of: ') + len('of: ')
             ind_2 = page.find('</title>')
             word = page[ind_1:ind_2]
             page = page[:ind_0] + page[ind_2+len('</title>'):]
         current_page = self.panel.nb.add_html_page()
         self.panel.nb.SetPageText(current_page,word)
         self.panel.show_page_and_word(page, word)
         return current_page
     except:
         excpt = str(sys.exc_info())
         self.panel.nb.h_w.show_msg('Unexpected error; File: ' + \
                                             path + ' ; ' + excpt)
Exemple #10
0
def f_reload(ircbot, input):
    """Reloads a module, for use by admins only."""
    if not input.admin:
        return

    name = input.group(2)
    if name == ircbot.config.owner:
        return ircbot.reply("What?")

    if (not name) or (name == "*"):
        ircbot.setup()
        return ircbot.reply("done")

    if not sys.modules.has_key(name):
        return ircbot.reply("%s: no such module!" % name)

    # Thanks to moot for prodding me on this
    path = sys.modules[name].__file__
    if path.endswith(".pyc") or path.endswith(".pyo"):
        path = path[:-1]
    if not os.path.isfile(path):
        return ircbot.reply("Found %s, but not the source file" % name)

    module = imp.load_source(name, path)
    sys.modules[name] = module
    if hasattr(module, "setup"):
        module.setup(ircbot)

    mtime = os.path.getmtime(module.__file__)
    modified = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(mtime))

    ircbot.register(vars(module))
    ircbot.bind_commands()

    ircbot.reply("%r (version: %s)" % (module, modified))
Exemple #11
0
def multisend():

    print 'Note: you must exit the GTS before sending each Pokemon'
    print '4th Gen Pokemon files are currently unsupported.\n'
    print 'Enter the path or drag the pkm file here, then\npress Enter, and enter another path. Finish by typing\nDone then press Enter.'
    print '(Type Back to go back)'

    multi = list()

    while True:
        path = raw_input().strip()

        if path == "Back" or path == "back": return
        
        path = os.path.normpath(path)
        if system() != 'Windows':
            path = path.replace('\\', '')

        if path == 'done' or path == 'Done':
            multisender(multi)
            break

        if path.startswith('"') or path.startswith("'"):
            path = path[1:]
        if path.endswith('"') or path.endswith("'"):
            path = path[:-1]
        if os.path.exists(path) and path.lower().endswith('.pkm'):
            multi.append(path)
        else:
            print 'Invalid file name, try again'
            continue
Exemple #12
0
def warn(message, category=None, stacklevel=1):
    """Intended to replace warnings.warn in tests.

    Modified copy from zope.deprecation.tests to:

      * make the signature identical to warnings.warn
      * to check for *.pyc and *.pyo files.

    When zope.deprecation is fixed, this warn function can be removed again.
    """
    print "From grok.testing's warn():"

    frame = sys._getframe(stacklevel)
    path = frame.f_globals['__file__']
    if path.endswith('.pyc') or path.endswith('.pyo'):
        path = path[:-1]

    file = open(path)
    lineno = frame.f_lineno
    for i in range(lineno):
        line = file.readline()

    print "%s:%s: %s: %s\n  %s" % (
        path,
        frame.f_lineno,
        category.__name__,
        message,
        line.strip(),
        )
 def isFile(self, fileName):
     if (path.endswithignorecase('.html') or
         path.endswith('.css') or
         path.endswith('.ico')):
         return True
     else:
         return false
    def __call__(self, environ, start_response):
        path = environ['PATH_INFO'].strip('/') or 'index.html'

        if path.startswith('/static') or path == 'index.html':
            try:
                data = open(path).read()
            except Exception:
                return not_found(start_response)

            if path.endswith(".js"):
                content_type = "text/javascript"
            elif path.endswith(".css"):
                content_type = "text/css"
            elif path.endswith(".swf"):
                content_type = "application/x-shockwave-flash"
            else:
                content_type = "text/html"

            start_response('200 OK', [('Content-Type', content_type)])
            return [data]
        if path.startswith("socket.io"):
            environ['scan_ts'] = self.scan_ts
            environ['scan_interval'] = self.scan_interval
            cur_ts = datetime.utcnow()
            socketio_manage(environ, {'/services': ServicesNamespace,
                                      '/sysinfo': SysinfoNamespace,
                                      '/cpu-widget': CPUWidgetNamespace,
                                      '/memory-widget': MemoryWidgetNamespace,
                                      '/network-widget': NetworkWidgetNamespace,
                                      '/disk-widget': DisksWidgetNamespace,
                                      '/logmanager': LogManagerNamespace,
                                      '/pincardmanager': PincardManagerNamespace
            })
            if ((cur_ts - self.scan_ts).total_seconds() > self.scan_interval):
                self.scan_ts = cur_ts
Exemple #15
0
 def has_volume_data(self, path):
     """
     Checks the file name <path> to see if it indicates a file or
     directory with NetCDF volume data.
     """
     return (path.endswith(".nc") or path.endswith("_nc") or
             path.endswith(".nc.bz2") or path.endswith("_nc.bz2"))
Exemple #16
0
 def read_file(self, path):
     try:
         if not path.endswith(".htm") and not path.endswith(".html"):
             path += ".html"
         f = open(path)
         page = f.read()
         f.close()
         if path == "NLTK Wordnet Browser Help.html":
             word = "* Help *"
         else:
             txt = "<title>" + frame_title + " display of: "
             ind_0 = page.find(txt)
             if ind_0 == -1:
                 err_mess = "This file is not in NLTK Browser format!"
                 self.panel.nb.h_w.show_msg(err_mess)
                 return
             ind_1 = page.find("of: ") + len("of: ")
             ind_2 = page.find("</title>")
             word = page[ind_1:ind_2]
             page = page[:ind_0] + page[ind_2 + len("</title>") :]
         current_page = self.panel.nb.add_html_page()
         self.panel.nb.SetPageText(current_page, word)
         self.panel.show_page_and_word(page, word)
         return current_page
     except:
         excpt = str(sys.exc_info())
         self.panel.nb.h_w.show_msg("Unexpected error; File: " + path + " ; " + excpt)
Exemple #17
0
def find_modules(modules_dir) -> List[Builtin]:
    builtins = [] # type: List[Builtin]
    cfiles = get_c_source_files(modules_dir)

    for i, cfile in enumerate(cfiles):
        sys.stderr.write('processing file {}: {}/{}\n'.format(cfile, i, len(cfiles)))

        modules = []
        with open(cfile) as f:
            text = f.read()
        matches = module_def_pattern.finditer(text)
        for match in matches:
            if match.group(1) == 'xx': # false positives
                continue
            modules.append(Builtin(
                path = match.group(1),
                start = match.start(1),
                end = match.end(1),
                filename = cfile,
            ))
        builtins.extend(modules)
        for module in modules:
            def_pattern = r'"({}(?:\.[A-Za-z0-9_]+)+)"'.format(module.path)
            for match in re.compile(def_pattern).finditer(text):
                path = match.group(1)
                if path.endswith('.c') or path.endswith('.h') or path == 'xx': # false positives
                    continue
                builtins.append(Builtin(
                    path = path,
                    start = match.start(1),
                    end = match.end(1),
                    filename = cfile,
                ))
    return builtins
def main():
    start_path = os.path.abspath(os.getcwd())
    abs_file = os.path.abspath(__file__)
    abs_dir = os.path.dirname(abs_file)
    dist_files = []
    for root, dirs, files in os.walk(os.path.dirname(__file__)):
        for name in files:
            path = os.path.abspath(os.path.join(root, name))
            if not path.endswith("~") and not path.endswith("#") and \
               not path.endswith(".tar.gz") and not path.endswith(".zip") and \
               path != abs_file:
                dist_files.append(os.path.abspath(os.path.join(root, name)))
        if ".svn" in dirs:
            dirs.remove(".svn")
    parent_dir = os.path.dirname(abs_dir) + os.sep
    full_paths = [p[len(parent_dir):] for p in dist_files]
    os.chdir(parent_dir)

    output = os.path.join(start_path, "friendfeed-api.tar.gz")
    command = "tar cvzf " + output + " " + " ".join(full_paths)
    print command
    os.system(command)

    output = os.path.join(start_path, "friendfeed-api.zip")
    command = "zip " + output + " " + " ".join(full_paths)
    print command
    os.system(command)
Exemple #19
0
    def __call__(self, environ, start_response):
        path = environ['PATH_INFO'].strip('/') or 'index.html'

        if path.startswith('static/') or path == 'index.html':
            try:
                data = open(path).read()
                lmt = os.path.getmtime(path)
            except Exception:
                return not_found(start_response)
            headers = [('Last-Modified', lmt)]
            if path.endswith(".js"):
                content_type = "text/javascript"
            elif path.endswith(".css"):
                content_type = "text/css"
            elif path.endswith(".jpg"):
                content_type = "image/jpeg"
                headers += [('Cache-Control', 'max-age=86400')]
            elif path.endswith(".png"):
                content_type = "image/png"
                headers += [('Cache-Control', 'max-age=86400')]
            else:
                content_type = "text/html"

            lmt = time.strftime("%a, %d %b %Y %H:%M:%S +0545", time.gmtime(lmt))
            start_response('200 OK', headers + [('Content-Type', content_type)])
            return [data]

        if path.startswith("socket.io"):
            socketio_manage(environ, {'/cpu': CPUNamespace})
        else:
            return not_found(start_response)
def _fix_path(path, sep):
    if path.endswith('/') or path.endswith('\\'):
        path = path[:-1]

    if sep != '/':
        path = path.replace('/', sep)
    return path
Exemple #21
0
def render_default(path, cp):
    """ This is the default function that will render a template to a string of HTML. The
    string will be for a drop-down tab that contains a link to the file.

    If the file extension requires information to be read, then that is passed to the
    content variable (eg. a segmentlistdict).
    """

    # define filename and slug from path
    filename = os.path.basename(path)
    slug = filename.replace('.', '_')

    # initializations
    content = None

    if path.endswith('.xml') or path.endswith('.xml.gz'):
        # segment or veto file return a segmentslistdict instance
        if 'SEG' in path or 'VETO' in path:
            with open(path, 'r') as xmlfile:
                content = fromsegmentxml(xmlfile, return_dict=True)

    # render template
    template_dir = pycbc.results.__path__[0] + '/templates/files'
    env = Environment(loader=FileSystemLoader(template_dir))
    env.globals.update(abs=abs)
    template = env.get_template('file_default.html')
    context = {'filename' : filename,
               'slug'     : slug,
               'cp'       : cp,
               'content'  : content}
    output = template.render(context)

    return output
Exemple #22
0
def db_open(path, timetype=None):
    """
    Create timeline database that can read and write timeline data from and to
    persistent storage identified by path.

    Throw a TimelineIOError exception if not able to read from the given path.

    Valid values for path:

      - special string ":tutorial:"
      - string with suffix .timeline
      - string with suffix .ics
      - string denoting a directory
    """
    if path == ":tutorial:":
        return open_gregorian_tutorial_timeline(path)
    elif path == ":numtutorial:":
        return open_numeric_tutorial_timeline(path)
    elif os.path.isdir(path):
        return open_directory_timeline(path)
    elif path.endswith(".timeline"):
        return db_open_timeline(path, timetype)
    elif path.endswith(".ics"):
        return db_open_ics(path)
    else:
        msg_template = (_("Unable to open timeline '%s'.") + "\n\n" +
                        _("Unknown format."))
        raise TimelineIOError(msg_template % path)
 def filepath(self, url, old_url, mimetype):
     # decode url, normalize url and query
     rurl = self.filepaths.get(old_url)
     if rurl is not None:
         return rurl
     o = urlparse.urlparse(url)
     path, ext = os.path.splitext(o.path)
     if not ext:
         ext = ".%s" % mime_extensions.get(mimetype, 'html')
     rurl = "%s://%s%s" % (o.scheme, o.netloc, path)
     if o.query:
         rurl += '/%s' % o.query
     if mimetype == 'text/html':
         if ext in ('.css', '.js', '.kss'):
             rurl += ext
         else:
             rurl = os.path.join(rurl, 'index.html')
     elif mimetype.startswith('image/'):
         # scaling needs special handling
         if image_view_pattern.search(path):
             rurl += ext
         else:
             rurl = os.path.join('%s%s' % (rurl, ext), "view%s" % ext)
     else:
         if path.endswith('view') or path.endswith('download'):
             rurl += ext
         else:
             rurl = os.path.join('%s%s' % (rurl, ext), "view%s" % ext)
     rurl = rurl.replace('%', '_').replace('&amp;', '_').replace('&', '_').replace('=', '_').replace('+', '_')
     rurl = rurl.replace(self.base, '')
     if rurl.startswith('/'):
         rurl = rurl[1:]
     self.filepaths[old_url] = rurl
     self.filepaths[url] = rurl
     return rurl
    def _run_part(self, mod, part, output):
        print " * Running part %s %s %s..." % (self.name, mod, part)
        self.on_run_part(mod, part, output)

        if self.console_output:
            outtype = None
        else:
            outtype = subprocess.PIPE

        path = os.path.join(self.osb.moddir, mod, part)
        if path.endswith(".inc"):
            fd = open(path)
            for line in fd:
                output.write(line)
        elif path.endswith(".sh"):
            proc = subprocess.Popen(["/bin/bash", path], shell=False,
                                    stdout=outtype, env=self.osb.env)
            try:
                (out, err) = proc.communicate()
            except (Exception, KeyboardInterrupt), e:
                proc.terminate()
                raise StageException(mod, part, repr(e))

            if not self.ignore_failures and proc.returncode != 0:
                raise StageException(mod, part, proc.returncode)
            if not self.console_output:
                output.write(out)
Exemple #25
0
def choose_cat(path):
    if path.endswith('.gz'):
        return 'zcat'
    elif path.endswith('.bz2'):
        return 'bzcat'
    else:
        return 'cat'
Exemple #26
0
def open_path(quteproc, path):
    """Open a URL.

    If used like "When I open ... in a new tab", the URL is opened in a new
    tab. With "... in a new window", it's opened in a new window.
    """
    new_tab = False
    new_window = False
    wait_for_load_finished = True

    new_tab_suffix = ' in a new tab'
    new_window_suffix = ' in a new window'
    do_not_wait_suffix = ' without waiting'

    if path.endswith(new_tab_suffix):
        path = path[:-len(new_tab_suffix)]
        new_tab = True
    elif path.endswith(new_window_suffix):
        path = path[:-len(new_window_suffix)]
        new_window = True

    if path.endswith(do_not_wait_suffix):
        path = path[:-len(do_not_wait_suffix)]
        wait_for_load_finished = False

    quteproc.open_path(path, new_tab=new_tab, new_window=new_window)

    if wait_for_load_finished:
        quteproc.wait_for_load_finished(path)
Exemple #27
0
    def getLines( self, path, extensions = [ '.py', '.py3', '.pyw' ] ):
        " Accumulates lines for a file or directory "

        if not os.path.exists( path ):
            raise Exception( "Lines counter cannot open " + path )

        self.__reset()

        if os.path.isfile( path ):
            for ext in extensions:
                if path.endswith( ext ):
                    # It's OK
                    self.__processFile( path )
                    self.files = 1
                    self.filesSize = os.path.getsize( path )
                    return
            raise Exception( "Lines counter detected inconsistency. " \
                             "The file " + path + " does not have expected " \
                             "extension (" + ", ".join( extensions ) + ")" )

        # It's a directory
        if not path.endswith( os.path.sep ):
            path += os.path.sep

        self.__processDir( path, extensions )
        return
Exemple #28
0
def decompress_file_in_place(path, remove=False):

    """
    This function ...
    :param path:
    :param remove:
    :return:
    """

    from ..basics.log import log

    # Inform the user
    log.info("Decompressing '" + path + "' ...")

    # Check extension
    if path.endswith(".bz2"):
        new_path = path.rstrip(".bz2")
        decompress_bz2(path, new_path)
    elif path.endswith(".gz"):
        new_path = path.rstrip(".gz")
        if new_path.endswith(".tar"): new_path = new_path.split(".tar")[0]
        decompress_gz(path, new_path)
    elif path.endswith(".zip"):
        new_path = path.rstrip(".zip")
        decompress_zip(path, new_path)
    else: raise ValueError("Unrecognized archive type (must be bz2, gz [or tar.gz] or zip)")

    # Remove the original file if requested
    if remove: fs.remove_file(path)

    # Return the new path
    return new_path
def crawl(source, **kwargs):

    validate = kwargs.get('validate', False)
    inflate = kwargs.get('inflate', False)

    ensure = kwargs.get('ensure_placetype', [])
    skip = kwargs.get('skip_placetype', [])

    for (root, dirs, files) in os.walk(source):

        for f in files:
            path = os.path.join(root, f)
            path = os.path.abspath(path)

            ret = path

            if not path.endswith('geojson'):
                continue

            if path.endswith('-alt.geojson'):
                continue

            if validate or inflate or len(skip) or len(ensure):

                try:
                    fh = open(path, 'r')
                    data = geojson.load(fh)

                except Exception, e:
                    logging.error("failed to load %s, because %s" % (path, e))
                    continue

                if len(ensure):

                    props = data['properties']
                    pt = props.get('wof:placetype', None)

                    if not pt in ensure:
                        logging.debug("skipping %s because it is a %s" % (path, pt))
                        continue

                elif len(skip):

                    props = data['properties']
                    pt = props.get('wof:placetype', None)

                    if pt in skip:
                        logging.debug("skipping %s because it is a %s" % (path, pt))
                        continue

                    if not pt:
                        logging.error("can not determine placetype for %s" % path)

                if not inflate:
                    ret = path
                else:
                    ret = data

            yield ret
Exemple #30
0
	def setlibrary(self,path):
		if (path.startswith('"') and path.endswith('"')) or (path.startswith("'") and path.endswith("'")):
			path = path[1:-1]
		if os.path.isdir(path):
			Playlist.root = path
			return "Set library PATH to %s" % path
		else:
			return "Error PATH not found"
Exemple #31
0
    def do_save(self, line):
        """Save an item to the filesystem.
'save n filename' saves menu item n to the specified filename.
'save filename' saves the last viewed item to the specified filename.
'save n' saves menu item n to an automagic filename."""
        args = line.strip().split()

        # First things first, figure out what our arguments are
        if len(args) == 0:
            # No arguments given at all
            # Save current item, if there is one, to a file whose name is
            # inferred from the gopher path
            if not self.tmp_filename:
                print("You need to visit an item first!")
                return
            else:
                index = None
                filename = None
        elif len(args) == 1:
            # One argument given
            # If it's numeric, treat it as an index, and infer the filename
            try:
                index = int(args[0])
                filename = None
            # If it's not numeric, treat it as a filename and
            # save the current item
            except ValueError:
                index = None
                filename = os.path.expanduser(args[0])
        elif len(args) == 2:
            # Two arguments given
            # Treat first as an index and second as filename
            index, filename = args
            try:
                index = int(index)
            except ValueError:
                print("First argument is not a valid item index!")
                return
            filename = os.path.expanduser(filename)
        else:
            print("You must provide an index, a filename, or both.")
            return

        # Next, fetch the item to save, if it's not the current one.
        if index != None:
            last_gi = self.gi
            try:
                gi = self.lookup[index - 1]
                self._go_to_gi(gi, update_hist=False, handle=False)
            except IndexError:
                print("Index too high!")
                self.gi = last_gi
                return

        # Derive filename from current GI's path, if one hasn't been set
        if not filename:
            if self.gi.itemtype == '1':
                path = self.gi.path
                if path in ("", "/"):
                    # Attempt to derive a nice filename from the gopher
                    # item name
                    filename = gi.name.lower().replace(" ", "_") + ".txt"
                else:
                    # Derive a filename from the last component of the
                    # path
                    if path.endswith("/"):
                        path = path[0:-1]
                    filename = os.path.split(path)[1]
            else:
                filename = os.path.basename(self.gi.path)
            print("Set filename to: " + filename)

        # Check for filename collisions and actually do the save if safe
        if os.path.exists(filename):
            print("File already exists!")
        else:
            # Don't use _get_active_tmpfile() here, because we want to save the
            # "source code" of menus, not the rendered view - this way VF-1
            # can navigate to it later.
            shutil.copyfile(self.tmp_filename, filename)
            print("Saved to %s" % filename)

        # Restore gi if necessary
        if index != None:
            self._go_to_gi(last_gi)
Exemple #32
0
    def test_find_tests_customize_via_package_pattern(self):
        # This test uses the example 'do-nothing' load_tests from
        # https://docs.python.org/3/library/unittest.html#load-tests-protocol
        # to make sure that that actually works.
        # Housekeeping
        original_listdir = os.listdir
        def restore_listdir():
            os.listdir = original_listdir
        self.addCleanup(restore_listdir)
        original_isfile = os.path.isfile
        def restore_isfile():
            os.path.isfile = original_isfile
        self.addCleanup(restore_isfile)
        original_isdir = os.path.isdir
        def restore_isdir():
            os.path.isdir = original_isdir
        self.addCleanup(restore_isdir)
        self.addCleanup(sys.path.remove, abspath('/foo'))

        # Test data: we expect the following:
        # a listdir to find our package, and isfile and isdir checks on it.
        # a module-from-name call to turn that into a module
        # followed by load_tests.
        # then our load_tests will call discover() which is messy
        # but that finally chains into find_tests again for the child dir -
        # which is why we don't have an infinite loop.
        # We expect to see:
        # the module load tests for both package and plain module called,
        # and the plain module result nested by the package module load_tests
        # indicating that it was processed and could have been mutated.
        vfs = {abspath('/foo'): ['my_package'],
               abspath('/foo/my_package'): ['__init__.py', 'test_module.py']}
        def list_dir(path):
            return list(vfs[path])
        os.listdir = list_dir
        os.path.isdir = lambda path: not path.endswith('.py')
        os.path.isfile = lambda path: path.endswith('.py')

        class Module(object):
            paths = []
            load_tests_args = []

            def __init__(self, path):
                self.path = path
                self.paths.append(path)
                if path.endswith('test_module'):
                    def load_tests(loader, tests, pattern):
                        self.load_tests_args.append((loader, tests, pattern))
                        return [self.path + ' load_tests']
                else:
                    def load_tests(loader, tests, pattern):
                        self.load_tests_args.append((loader, tests, pattern))
                        # top level directory cached on loader instance
                        __file__ = '/foo/my_package/__init__.py'
                        this_dir = os.path.dirname(__file__)
                        pkg_tests = loader.discover(
                            start_dir=this_dir, pattern=pattern)
                        return [self.path + ' load_tests', tests
                            ] + pkg_tests
                self.load_tests = load_tests

            def __eq__(self, other):
                return self.path == other.path

        loader = unittest.TestLoader()
        loader._get_module_from_name = lambda name: Module(name)
        loader.suiteClass = lambda thing: thing

        loader._top_level_dir = abspath('/foo')
        # this time no '.py' on the pattern so that it can match
        # a test package
        suite = list(loader._find_tests(abspath('/foo'), 'test*.py'))

        # We should have loaded tests from both my_package and
        # my_package.test_module, and also run the load_tests hook in both.
        # (normally this would be nested TestSuites.)
        self.assertEqual(suite,
                         [['my_package load_tests', [],
                          ['my_package.test_module load_tests']]])
        # Parents before children.
        self.assertEqual(Module.paths,
                         ['my_package', 'my_package.test_module'])

        # load_tests should have been called twice with loader, tests and pattern
        self.assertEqual(Module.load_tests_args,
                         [(loader, [], 'test*.py'),
                          (loader, [], 'test*.py')])
Exemple #33
0
def findFirstIsoImage(path):
    """
    Find the first iso image in path
    This also supports specifying a specific .iso image

    Returns the basename of the image
    """
    try:
        os.stat(path)
    except OSError:
        return None

    arch = _arch

    if os.path.isfile(path) and path.endswith(".iso"):
        files = [os.path.basename(path)]
        path = os.path.dirname(path)
    else:
        files = os.listdir(path)

    for fn in files:
        what = path + '/' + fn
        log.debug("Checking %s", what)
        if not isys.isIsoImage(what):
            continue

        log.debug("mounting %s on /mnt/install/cdimage", what)
        try:
            blivet.util.mount(what,
                              "/mnt/install/cdimage",
                              fstype="iso9660",
                              options="ro")
        except OSError:
            continue

        if not os.access("/mnt/install/cdimage/.discinfo", os.R_OK):
            blivet.util.umount("/mnt/install/cdimage")
            continue

        log.debug("Reading .discinfo")
        f = open("/mnt/install/cdimage/.discinfo")
        f.readline()  # skip timestamp
        f.readline()  # skip release description
        discArch = f.readline().strip()  # read architecture
        f.close()

        log.debug("discArch = %s", discArch)
        if discArch != arch:
            log.warning("findFirstIsoImage: architectures mismatch: %s, %s",
                        discArch, arch)
            blivet.util.umount("/mnt/install/cdimage")
            continue

        # If there's no repodata, there's no point in trying to
        # install from it.
        if not os.access("/mnt/install/cdimage/repodata", os.R_OK):
            log.warning("%s doesn't have repodata, skipping", what)
            blivet.util.umount("/mnt/install/cdimage")
            continue

        # warn user if images appears to be wrong size
        if os.stat(what)[stat.ST_SIZE] % 2048:
            log.warning("%s appears to be corrupted", what)
            exn = InvalidImageSizeError("size is not a multiple of 2048 bytes",
                                        what)
            if errorHandler.cb(exn) == ERROR_RAISE:
                raise exn

        log.info("Found disc at %s", fn)
        blivet.util.umount("/mnt/install/cdimage")
        return fn

    return None
Exemple #34
0
def postprocess_detailed_halo_data(path=None):
    """
    
    postprocess_detailed_halo_data : function
	----------

    Collates all the detailed snapshot halo data in given path into one combined file. 

    Parameters
    ----------

    path : str

        Path which contains the detailed halo data files for each snapshot. 

    Returns
    --------
    None

    saves to file:
    B3_HaloData_outname.dat : list of dict

    A list (for each snap desired) of dictionaries which contain halo data with the following fields:
        'ID'
        'hostHaloID'
        'Snap'
        'Head'
        'Tail'

        'SimulationInfo'
            'h_val'
            'Hubble_unit'
            'Omega_Lambda'
            'ScaleFactor'
            'z'
            'LookbackTime'

        'UnitInfo'
        'VR_FilePath'
        'VR_FileType'
        'Part_FilePath'
        'Part_FileType'

        AND ANY EXTRAS from vr_property_fields

        """

    if path==None:
        path='halo_data/'
    
    if not path.endswith('/'):
        path=path+'/'
    
    halo_data_files=sorted(os.listdir(path))
    halo_data_files_trunc=[halo_data_file for halo_data_file in halo_data_files if 'HaloData' in halo_data_file]
    halo_data_files_wdir=[path+halo_data_file for halo_data_file in halo_data_files_trunc]
    outfilename=halo_data_files_trunc[-1][:-8]+'.dat'
    if os.path.exists(outfilename):
        print('Removing existing detailed halo data ...')
        os.remove(outfilename)
    print('Will save full halo data to: ',outfilename)
    print(f'Number of halo data snaps: {len(halo_data_files_wdir)}')
    full_halo_data=[[] for i in range(len(halo_data_files_wdir))]
    for isnap,halo_data_file in enumerate(halo_data_files_wdir):
        print(f'Adding to full halo data for isnap {isnap}')
        halo_data_snap=open_pickle(halo_data_file)
        full_halo_data[isnap]=halo_data_snap
        
    dump_pickle(data=full_halo_data,path=outfilename)
    return full_halo_data
Exemple #35
0
    def __onDirChanged(self, path):
        """Triggered when the dir is changed"""
        if not path.endswith(os.path.sep):
            path = path + os.path.sep

        # Check if it is a top level dir
        try:
            oldSet = self.__fsTopLevelSnapshot[path]

            # Build a new set of what is in that top level dir
            newSet = set()
            for item in os.listdir(path):
                if not os.path.isdir(path + item):
                    continue    # Only dirs are of interest for the top level
                item = item + os.path.sep
                if item in oldSet:
                    newSet.add(item)
            # Now we have an old set and a new one with those from the old
            # which actually exist
            diff = oldSet - newSet

            # diff are those which disappeared. We need to do the following:
            # - build a list of all the items in the fs snapshot which start
            #   from this dir
            # - build a list of dirs which should be deregistered from the
            #   watcher. This list includes both top level and project level
            # - deregister dirs from the watcher
            # - emit a signal of what disappeared
            if not diff:
                return  # no changes

            self.__fsTopLevelSnapshot[path] = newSet

            dirsToBeRemoved = []
            itemsToReport = []

            for item in diff:
                self.__processRemoveTopDir(path + item, dirsToBeRemoved,
                                           itemsToReport)

            # Here: it is possible that the last dir to watch disappeared
            if not newSet:
                # There is nothing to watch here anymore
                dirsToBeRemoved.append(path)
                del self.__fsTopLevelSnapshot[path]

                parts = path[1:-1].split(os.path.sep)
                for index in range(len(parts) - 2, 0, -1):
                    candidate = os.path.sep + \
                                os.path.sep.join(parts[0:index]) + \
                                os.path.sep
                    dirSet = self.__fsTopLevelSnapshot[candidate]
                    dirSet.remove(parts[index + 1] + os.path.sep)
                    if not dirSet:
                        dirsToBeRemoved.append(candidate)
                        del self.__fsTopLevelSnapshot[candidate]
                        continue
                    break   # it is not the last item in the set

            # Update the watcher
            if dirsToBeRemoved:
                self.__dirWatcher.removePaths(dirsToBeRemoved)

            # Report
            if itemsToReport:
                self.sigFSChanged.emit(itemsToReport)
            return
        except:
            # it is not a top level dir - no key
            pass

        # Here: the change is in the project level dir
        try:
            oldSet = self.__fsSnapshot[path]

            # Build a new set of what is in that top level dir
            newSet = set()
            for item in os.listdir(path):
                if self.__shouldExclude(item):
                    continue
                if os.path.isdir(path + item):
                    newSet.add(item + os.path.sep)
                else:
                    newSet.add(item)

            # Here: we have a new and old snapshots
            # Lets calculate the difference
            deletedItems = oldSet - newSet
            addedItems = newSet - oldSet

            if not deletedItems and not addedItems:
                return  # No changes

            # Update the changed dir set
            self.__fsSnapshot[path] = newSet

            # We need to build some lists:
            # - list of files which were added
            # - list of dirs which were added
            # - list of files which were deleted
            # - list of dirs which were deleted
            # The deleted dirs must be unregistered in the watcher
            # The added dirs must be registered
            itemsToReport = []
            dirsToBeAdded = []
            dirsToBeRemoved = []

            for item in addedItems:
                if item.endswith(os.path.sep):
                    # directory was added
                    self.__processAddedDir(path + item,
                                           dirsToBeAdded, itemsToReport)
                else:
                    itemsToReport.append("+" + path + item)

            for item in deletedItems:
                if item.endswith(os.path.sep):
                    # directory was deleted
                    self.__processRemovedDir(path + item,
                                             dirsToBeRemoved, itemsToReport)
                else:
                    itemsToReport.append("-" + path + item)

            # Update the watcher
            if dirsToBeRemoved:
                self.__dirWatcher.removePaths(dirsToBeRemoved)
            if dirsToBeAdded:
                self.__dirWatcher.addPaths(dirsToBeAdded)

            # Report
            self.sigFSChanged.emit(itemsToReport)
        except:
            # It could be a queued signal about what was already reported
            pass

        # self.debug()
        return
Exemple #36
0
 def wpath(path):
     if (path.endswith('/') or path.endswith(os.path.sep)):
         path = path[:-1]
     path = path.replace('/', '\\')
     return path
    bin = os.path.dirname(whoami)
    root = os.path.dirname(bin)

    placetypes = os.path.join(root, 'placetypes')

    tmp = {}
    placetype_map = {}

    for (root, dirs, files) in os.walk(placetypes):

        for f in files:

            path = os.path.join(root, f)

            if not path.endswith('.json'):
                continue

            fh = open(path, 'r')
            data = json.load(fh)

            id = data.get('wof:id', None)
            placetype = data.get('wof:name', None)
            role = data.get('wof:role', None)
            parent = data.get('wof:parent', [])

            placetype_map[placetype] = id

            tmp[placetype] = {
                'role': role,
                'parent': parent,
Exemple #38
0
 def is_dir_path(path):
     # type: (RecordPath) -> bool
     return path.endswith("/")
def is_pdf_note(path):
    if not path.endswith('.pdf') or os.path.exists(path):
        return False
    return os.path.exists(path[:-3] + 'note')
Exemple #40
0
 def _ends_with_slash(path):
     return path.endswith('/') or path.endswith('\\')
 def _pathEndsWithSlash(self, path):
     return path.endswith("/")
Exemple #42
0
    def run(self):
        """
        Verbatim copy of docutils.parsers.rst.directives.misc.Include.run()
        that just calls to our Code instead of builtin CodeBlock but otherwise
        just passes it back to the parent implementation.
        """
        if not 'code' in self.options:
            return docutils.parsers.rst.directives.misc.Include.run(self)

        source = self.state_machine.input_lines.source(
            self.lineno - self.state_machine.input_offset - 1)
        source_dir = os.path.dirname(os.path.abspath(source))
        path = directives.path(self.arguments[0])
        if path.startswith('<') and path.endswith('>'):
            path = os.path.join(self.standard_include_path, path[1:-1])
        path = os.path.normpath(os.path.join(source_dir, path))
        path = utils.relative_path(None, path)
        path = nodes.reprunicode(path)
        encoding = self.options.get(
            'encoding', self.state.document.settings.input_encoding)
        e_handler = self.state.document.settings.input_encoding_error_handler
        tab_width = self.options.get('tab-width',
                                     self.state.document.settings.tab_width)
        try:
            self.state.document.settings.record_dependencies.add(path)
            include_file = io.FileInput(source_path=path,
                                        encoding=encoding,
                                        error_handler=e_handler)
        except UnicodeEncodeError as error:
            raise self.severe('Problems with "%s" directive path:\n'
                              'Cannot encode input file path "%s" '
                              '(wrong locale?).' %
                              (self.name, SafeString(path)))
        except IOError as error:
            raise self.severe('Problems with "%s" directive path:\n%s.' %
                              (self.name, ErrorString(error)))
        startline = self.options.get('start-line', None)
        endline = self.options.get('end-line', None)
        try:
            if startline or (endline is not None):
                lines = include_file.readlines()
                rawtext = ''.join(lines[startline:endline])
            else:
                rawtext = include_file.read()
        except UnicodeError as error:
            raise self.severe('Problem with "%s" directive:\n%s' %
                              (self.name, ErrorString(error)))
        # start-after/end-before: no restrictions on newlines in match-text,
        # and no restrictions on matching inside lines vs. line boundaries
        after_text = self.options.get('start-after', None)
        if after_text:
            # skip content in rawtext before *and incl.* a matching text
            after_index = rawtext.find(after_text)
            if after_index < 0:
                raise self.severe('Problem with "start-after" option of "%s" '
                                  'directive:\nText not found.' % self.name)
            rawtext = rawtext[after_index + len(after_text):]
        before_text = self.options.get('end-before', None)
        if before_text:
            # skip content in rawtext after *and incl.* a matching text
            before_index = rawtext.find(before_text)
            if before_index < 0:
                raise self.severe('Problem with "end-before" option of "%s" '
                                  'directive:\nText not found.' % self.name)
            rawtext = rawtext[:before_index]

        include_lines = statemachine.string2lines(rawtext,
                                                  tab_width,
                                                  convert_whitespace=True)

        self.options['source'] = path
        codeblock = Code(
            self.name,
            [self.options.pop('code')],  # arguments
            self.options,
            include_lines,  # content
            self.lineno,
            self.content_offset,
            self.block_text,
            self.state,
            self.state_machine)
        return codeblock.run()
Exemple #43
0
def f_reload(bot, trigger):
    """Reloads a module, for use by admins only."""
    if not trigger.admin:
        stderr('joku prkl yritti reloadata')
        return

    name = trigger.group(2)
    if name == bot.config.owner:
        return bot.reply('What?')

    if (not name) or (name == '*') or (name.upper() == 'ALL THE THINGS'):
        # Calling the shutdown methods of the modules
        for moduleName in bot.config.enumerate_modules():
            module = sys.modules[moduleName]
            if hasattr(module, "shutdown"):
                module.shutdown(bot)
        bot.callables = None
        bot.commands = None
        bot.setup()
        return bot.reply('done reloading')

    if not name in sys.modules:
        return bot.reply('%s: no such module!' % name)

    old_module = sys.modules[name]

    old_callables = {}
    for obj_name, obj in iteritems(vars(old_module)):
        if bot.is_callable(obj) or bot.is_shutdown(obj):
            old_callables[obj_name] = obj

    # Call the shutdown method of the module
    if hasattr(old_module, "shutdown"):
        old_module.shutdown(bot)

    bot.unregister(old_callables)
    # Also remove all references to willie callables from top level of the
    # module, so that they will not get loaded again if reloading the
    # module does not override them.

    for obj_name in old_callables.keys():
        delattr(old_module, obj_name)

    # Also delete the setup and shutdown function
    if hasattr(old_module, "setup"):
        delattr(old_module, "setup")
    if hasattr(old_module, "shutdown"):
        delattr(old_module, "shutdown")

    # Thanks to moot for prodding me on this
    path = old_module.__file__
    if path.endswith('.pyc') or path.endswith('.pyo'):
        path = path[:-1]
    if not os.path.isfile(path):
        return bot.reply('Found %s, but not the source file' % name)

    module = imp.load_source(name, path)
    sys.modules[name] = module
    if hasattr(module, 'setup'):
        module.setup(bot)

    mtime = os.path.getmtime(module.__file__)
    modified = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(mtime))

    bot.register(vars(module))
    bot.bind_commands()

    bot.reply('%r (version: %s)' % (module, modified))
Exemple #44
0
def is_cpp_file(path):
    return path.endswith('.cc') or path.endswith('.h')
def get_path(path):
    if path.endswith('/'):
        path = path[:-1]
    return(path)
def compare_dict_blobs(path, ablob, bblob, report_all, report_ver):
    adict = blob_to_dict(ablob)
    bdict = blob_to_dict(bblob)

    pkgname = os.path.basename(path)

    defaultvals = {}
    defaultvals['PKG'] = pkgname
    defaultvals['PKGE'] = '0'

    changes = []
    keys = list(
        set(adict.keys()) | set(bdict.keys()) | set(defaultval_map.keys()))
    for key in keys:
        astr = adict.get(key, '')
        bstr = bdict.get(key, '')
        if key in ver_monitor_fields:
            monitored = report_ver or astr or bstr
        else:
            monitored = key in monitor_fields
        mapped_key = defaultval_map.get(key, '')
        if mapped_key:
            if not astr:
                astr = '%s [default]' % adict.get(mapped_key,
                                                  defaultvals.get(key, ''))
            if not bstr:
                bstr = '%s [default]' % bdict.get(mapped_key,
                                                  defaultvals.get(key, ''))

        if astr != bstr:
            if (not report_all) and key in numeric_fields:
                aval = int(astr or 0)
                bval = int(bstr or 0)
                if aval != 0:
                    percentchg = ((bval - aval) / float(aval)) * 100
                else:
                    percentchg = 100
                if abs(percentchg) < monitor_numeric_threshold:
                    continue
            elif (not report_all) and key in list_fields:
                if key == "FILELIST" and path.endswith(
                        "-dbg") and bstr.strip() != '':
                    continue
                if key in [
                        'RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS',
                        'RREPLACES', 'RCONFLICTS'
                ]:
                    (depvera, depverb) = compare_pkg_lists(astr, bstr)
                    if depvera == depverb:
                        continue
                if key == 'FILELIST':
                    alist = shlex.split(astr)
                    blist = shlex.split(bstr)
                else:
                    alist = astr.split()
                    blist = bstr.split()
                alist.sort()
                blist.sort()
                # We don't care about the removal of self-dependencies
                if pkgname in alist and not pkgname in blist:
                    alist.remove(pkgname)
                if ' '.join(alist) == ' '.join(blist):
                    continue

            if key == 'PKGR' and not report_all:
                vers = []
                # strip leading 'r' and dots
                for ver in (astr.split()[0], bstr.split()[0]):
                    if ver.startswith('r'):
                        ver = ver[1:]
                    vers.append(ver.replace('.', ''))
                maxlen = max(len(vers[0]), len(vers[1]))
                try:
                    # pad with '0' and convert to int
                    vers = [int(ver.ljust(maxlen, '0')) for ver in vers]
                except ValueError:
                    pass
                else:
                    # skip decrements and increments
                    if abs(vers[0] - vers[1]) == 1:
                        continue

            chg = ChangeRecord(path, key, astr, bstr, monitored)
            changes.append(chg)
    return changes
def naijaPathmaker(dbname, path):
    if path.endswith('/'):
        return (path + dbname + '.db')
    else:
        return (path + '/' + dbname + '.db')
Exemple #48
0
def is_fileinfo(path):
    return path.endswith(".json")
Exemple #49
0
    def main(self):
        keys = self.env.get("plist_keys")

        # Many types of paths are accepted. Figure out which kind we have.
        path = os.path.normpath(self.env["info_path"])

        try:
            # Wrap all other actions in a try/finally so if we mount an image,
            # it will always be unmounted.

            # Check if we're trying to read something inside a dmg.
            (dmg_path, dmg, dmg_source_path) = self.parsePathForDMG(path)
            if dmg:
                mount_point = self.mount(dmg_path)
                path = os.path.join(mount_point, dmg_source_path.lstrip("/"))

            # Finally check whether this is at least a valid path
            if not os.path.exists(path):
                raise ProcessorError(f"Path '{path}' doesn't exist!")

            # Is the path a bundle?
            info_plist_path = self.get_bundle_info_path(path)
            if info_plist_path:
                path = info_plist_path

            # Does it have a 'plist' extension
            # (naively assuming 'plist' only names, for now)
            elif path.endswith(".plist"):
                # Full path to a plist was supplied, move on.
                pass

            # Might the path contain a bundle at its root?
            else:
                path = self.find_bundle(path)

            # Try to read the plist
            self.output(f"Reading: {path}")
            try:
                with open(path, "rb") as f:
                    info = plistlib.load(f)
            except Exception as err:
                raise ProcessorError(err)

            # Copy each plist_keys' values and assign to new env variables
            self.env["plist_reader_output_variables"] = {}
            for key, val in list(keys.items()):
                try:
                    self.env[val] = info[key]
                    self.output(
                        f"Assigning value of '{self.env[val]}' to output "
                        f"variable '{val}'"
                    )
                    # This one is for documentation/recordkeeping
                    self.env["plist_reader_output_variables"][val] = self.env[val]
                except KeyError:
                    raise ProcessorError(
                        f"Key '{key}' could not be found in the plist {path}!"
                    )

        finally:
            if dmg:
                self.unmount(dmg_path)
Exemple #50
0
 def isfile(path):
     # another_dir is not a package and so shouldn't be recursed into
     return not path.endswith('dir') and not 'another_dir' in path
Exemple #51
0
def open_file(path, mode='r'):
    """Open a file, which may be gzip-compressed."""
    if path.endswith('.gz'):
        return gzip.open(path, mode)
    else:
        return open(path, mode)
Exemple #52
0
 def isdir(path):
     return path.endswith('dir')
  testroot = join(root, dir)
  suite.addTestsByExt(testroot, '.xht')
  suite.addTestsByExt(testroot, '.html')
  if exists(join(testroot, reftestPath)):
    suite.addReftests(testroot, reftestPath)
suite.addTestsByExt(root, '.xht')
suite.addTestsByExt(root, '.html')
if exists(join(root, reftestPath)):
  suite.addReftests(root, reftestPath)
for src, dst in rawDirs.items():
  if exists(join(root,src)):
    suite.addRaw(join(root,src), dst)

# Add unreviewed tests
for path in unreviewed:
  if path.endswith('.list'):
    print "Adding unreviewed reftests from %s" % path
    suite.addReftests(basepath(path), basename(path))
  else:
    def grep(file):
      if not (file.endswith('.xht') or file.endswith('.html')):
        return False
      for line in open(join(path, file)):
        if line.find(suite.specroot) != -1:
          return True
      return False
    files = listfiles(path)
    files = filter(grep, files)
    print "Adding %d unreviewed selftests from %s" % (len(files), path)
    suite.addTestsByList(path, files)
Exemple #54
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec,
                                         quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(
                        source_spec, quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_bytes(d.get("rw_uri"))
                readcap = to_bytes(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap,
                                    name)
        return t
Exemple #55
0
def find_file(filename,
              env_vars=(),
              searchpath=(),
              file_names=None,
              url=None,
              verbose=True):
    """
    Search for a file to be used by nltk.

    :param filename: The name or path of the file.
    :param env_vars: A list of environment variable names to check.
    :param file_names: A list of alternative file names to check.
    :param searchpath: List of directories to search.
    :param url: URL presented to user for download help.
    :param verbose: Whether or not to print path when a file is found.
    """
    if file_names is None: file_names = [filename]
    assert isinstance(filename, basestring)
    assert not isinstance(file_names, basestring)
    assert not isinstance(searchpath, basestring)
    if isinstance(env_vars, basestring):
        env_vars = env_vars.split()

    # File exists, no magic
    if os.path.isfile(filename):
        if verbose: print '[Found %s: %s]' % (filename, filename)
        return filename
    for alternative in file_names:
        path_to_file = os.path.join(filename, alternative)
        if os.path.isfile(path_to_file):
            if verbose: print '[Found %s: %s]' % (filename, path_to_file)
            return path_to_file
        path_to_file = os.path.join(filename, 'file', alternative)
        if os.path.isfile(path_to_file):
            if verbose: print '[Found %s: %s]' % (filename, path_to_file)
            return path_to_file

    # Check environment variables
    for env_var in env_vars:
        if env_var in os.environ:
            path_to_file = os.environ[env_var]
            if os.path.isfile(path_to_file):
                if verbose: print '[Found %s: %s]' % (filename, path_to_file)
                return path_to_file
            else:
                for alternative in file_names:
                    path_to_file = os.path.join(os.environ[env_var],
                                                alternative)
                    if os.path.isfile(path_to_file):
                        if verbose:
                            print '[Found %s: %s]' % (filename, path_to_file)
                        return path_to_file
                    path_to_file = os.path.join(os.environ[env_var], 'file',
                                                alternative)
                    if os.path.isfile(path_to_file):
                        if verbose:
                            print '[Found %s: %s]' % (filename, path_to_file)
                        return path_to_file

    # Check the path list.
    for directory in searchpath:
        for alternative in file_names:
            path_to_file = os.path.join(directory, alternative)
            if os.path.isfile(path_to_file):
                return path_to_file

    # If we're on a POSIX system, then try using the 'which' command
    # to find the file.
    if os.name == 'posix':
        for alternative in file_names:
            try:
                p = subprocess.Popen(['which', alternative],
                                     stdout=subprocess.PIPE)
                stdout, stderr = p.communicate()
                path = stdout.strip()
                if path.endswith(alternative) and os.path.exists(path):
                    if verbose: print '[Found %s: %s]' % (filename, path)
                    return path
            except KeyboardInterrupt, SystemExit:
                raise
            except:
Exemple #56
0
    def begin(self,
              scope="",
              response_type="",
              use_nonce=False,
              path="",
              **kwargs):
        """
        Begin the OIDC flow.

        :param scope: Defines which user info claims is wanted
        :param response_type: Controls the parameters returned in the response from the Authorization Endpoint
        :param use_nonce: If not implicit flow nonce is optional. This defines if it should be used anyway.
        :param path: The path part of the redirect URL
        :return: A 2-tuple, session identifier and URL to which the user should be redirected
        """
        _log_info = logger.info

        if self.debug:
            _log_info("- begin -")

        _page = self.consumer_config["authz_page"]
        if not path.endswith("/"):
            if _page.startswith("/"):
                self.redirect_uris = [path + _page]
            else:
                self.redirect_uris = ["%s/%s" % (path, _page)]
        else:
            if _page.startswith("/"):
                self.redirect_uris = [path + _page[1:]]
            else:
                self.redirect_uris = ["%s/%s" % (path, _page)]

        # Put myself in the dictionary of sessions, keyed on session-id
        if not self.seed:
            self.seed = rndstr()

        if not scope:
            scope = self.consumer_config["scope"]
        if not response_type:
            response_type = self.consumer_config["response_type"]

        sid = stateID(path, self.seed)
        self.grant[sid] = Grant(seed=self.seed)

        self._backup(sid)
        self.sdb["seed:%s" % self.seed] = sid
        self.sso_db[sid] = {}

        args = {
            "client_id": self.client_id,
            "state": sid,
            "response_type": response_type,
            "scope": scope,
        }

        # nonce is REQUIRED in implicit flow,
        # OPTIONAL on code flow.
        if "token" in response_type or use_nonce:
            args["nonce"] = rndstr(12)
            self.state2nonce[sid] = args["nonce"]

        if "max_age" in self.consumer_config:
            args["max_age"] = self.consumer_config["max_age"]

        _claims = None
        if "user_info" in self.consumer_config:
            _claims = ClaimsRequest(userinfo=Claims(
                **self.consumer_config["user_info"]))
        if "id_token" in self.consumer_config:
            if _claims:
                _claims["id_token"] = Claims(
                    **self.consumer_config["id_token"])
            else:
                _claims = ClaimsRequest(id_token=Claims(
                    **self.consumer_config["id_token"]))

        if _claims:
            args["claims"] = _claims

        if "request_method" in self.consumer_config:
            areq = self.construct_AuthorizationRequest(request_args=args,
                                                       extra_args=None,
                                                       request_param="request")

            if self.consumer_config["request_method"] == "file":
                id_request = areq["request"]
                del areq["request"]
                _filedir = self.consumer_config["temp_dir"]
                _webpath = self.consumer_config["temp_path"]
                _name = rndstr(10)
                filename = os.path.join(_filedir, _name)
                while os.path.exists(filename):
                    _name = rndstr(10)
                    filename = os.path.join(_filedir, _name)
                fid = open(filename, mode="w")
                fid.write(id_request)
                fid.close()
                _webname = "%s%s/%s" % (path, _webpath, _name)
                areq["request_uri"] = _webname
                self.request_uri = _webname
                self._backup(sid)
        else:
            if "userinfo_claims" in args:  # can only be carried in an IDRequest
                raise PyoidcError("Need a request method")

            areq = self.construct_AuthorizationRequest(AuthorizationRequest,
                                                       request_args=args)

        location = areq.request(self.authorization_endpoint)

        if self.debug:
            _log_info("Redirecting to: %s" % location)

        self.authz_req[areq["state"]] = areq
        return sid, location
    def main(self):
        keys = self.env.get('plist_keys')
        regexs = self.env.get('plist_regex')

        # Many types of paths are accepted. Figure out which kind we have.
        path = os.path.normpath(self.env['info_path'])

        try:
            # Wrap all other actions in a try/finally so if we mount an image,
            # it will always be unmounted.

            # Check if we're trying to read something inside a dmg.
            (dmg_path, dmg, dmg_source_path) = self.parsePathForDMG(path)
            if dmg:
                mount_point = self.mount(dmg_path)
                path = os.path.join(mount_point, dmg_source_path.lstrip('/'))

            # Finally check whether this is at least a valid path
            if not os.path.exists(path):
                raise ProcessorError("Path '%s' doesn't exist!" % path)

            # Is the path a bundle?
            info_plist_path = self.get_bundle_info_path(path)
            if info_plist_path:
                path = info_plist_path

            # Does it have a 'plist' extension
            # (naively assuming 'plist' only names, for now)
            elif path.endswith('.plist'):
                # Full path to a plist was supplied, move on.
                pass

            # Might the path contain a bundle at its root?
            else:
                path = self.find_bundle(path)

            # Try to read the plist
            self.output("Reading: %s" % path)
            try:
                info = FoundationPlist.readPlist(path)
            except (FoundationPlist.NSPropertyListSerializationException,
                    UnicodeEncodeError) as err:
                raise ProcessorError(err)

            # Copy each plist_keys' values and assign to new env variables
            self.env["plist_reader_output_variables"] = {}
            for key, val in keys.items():
                if (regexs.get(key, '') == ''):
                    self.env[val] = info[key]
                else:
                    myreg = regexs.get(key)
                    self.env[val] = re.sub(myreg['pattern'], myreg['repl'],
                                           info[key])
                try:
                    self.output(
                        "Assigning value of '%s' to output variable '%s'" %
                        (self.env[val], val))
                    # This one is for documentation/recordkeeping
                    self.env["plist_reader_output_variables"][val] = (
                        self.env[val])
                except KeyError:
                    raise ProcessorError(
                        "Key '%s' could not be found in the plist %s!" %
                        (key, path))

        finally:
            if dmg:
                self.unmount(dmg_path)
Exemple #58
0
def load_settings(  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
        fm, clean):
    from ranger.core.actions import Actions
    import ranger.core.shared
    import ranger.api.commands
    from ranger.config import commands as commands_default

    # Load default commands
    fm.commands = ranger.api.commands.CommandContainer()
    include = [name for name in dir(Actions) if name not in COMMANDS_EXCLUDE]
    fm.commands.load_commands_from_object(fm, include)
    fm.commands.load_commands_from_module(commands_default)

    if not clean:
        system_confdir = os.path.join(os.sep, 'etc', 'ranger')
        if os.path.exists(system_confdir):
            sys.path.append(system_confdir)
        allow_access_to_confdir(ranger.args.confdir, True)

        # Load custom commands
        def import_file(name, path):  # From https://stackoverflow.com/a/67692
            # pragma pylint: disable=no-name-in-module,import-error,no-member, deprecated-method
            if sys.version_info >= (3, 5):
                import importlib.util as util
                spec = util.spec_from_file_location(name, path)
                module = util.module_from_spec(spec)
                spec.loader.exec_module(module)
            elif (3, 3) <= sys.version_info < (3, 5):
                from importlib.machinery import SourceFileLoader
                module = SourceFileLoader(name, path).load_module()
            else:
                import imp
                module = imp.load_source(name, path)
            # pragma pylint: enable=no-name-in-module,import-error,no-member
            return module

        def load_custom_commands(*paths):
            old_bytecode_setting = sys.dont_write_bytecode
            sys.dont_write_bytecode = True
            for custom_comm_path in paths:
                if os.path.exists(custom_comm_path):
                    try:
                        commands_custom = import_file('commands',
                                                      custom_comm_path)
                        fm.commands.load_commands_from_module(commands_custom)
                    except ImportError as ex:
                        LOG.debug("Failed to import custom commands from '%s'",
                                  custom_comm_path)
                        LOG.exception(ex)
                    else:
                        LOG.debug("Loaded custom commands from '%s'",
                                  custom_comm_path)
            sys.dont_write_bytecode = old_bytecode_setting

        system_comm_path = os.path.join(system_confdir, 'commands.py')
        custom_comm_path = fm.confpath('commands.py')
        load_custom_commands(system_comm_path, custom_comm_path)

        # XXX Load plugins (experimental)
        plugindir = fm.confpath('plugins')
        try:
            plugin_files = os.listdir(plugindir)
        except OSError:
            LOG.debug('Unable to access plugin directory: %s', plugindir)
        else:
            plugins = []
            for path in plugin_files:
                if not path.startswith('_'):
                    if path.endswith('.py'):
                        # remove trailing '.py'
                        plugins.append(path[:-3])
                    elif os.path.isdir(os.path.join(plugindir, path)):
                        plugins.append(path)

            if not os.path.exists(fm.confpath('plugins', '__init__.py')):
                LOG.debug(
                    "Creating missing '__init__.py' file in plugin folder")
                fobj = open(fm.confpath('plugins', '__init__.py'), 'w')
                fobj.close()

            ranger.fm = fm
            for plugin in sorted(plugins):
                try:
                    try:
                        # importlib does not exist before python2.7.  It's
                        # required for loading commands from plugins, so you
                        # can't use that feature in python2.6.
                        import importlib
                    except ImportError:
                        module = __import__('plugins', fromlist=[plugin])
                    else:
                        module = importlib.import_module('plugins.' + plugin)
                        fm.commands.load_commands_from_module(module)
                    LOG.debug("Loaded plugin '%s'", plugin)
                except Exception as ex:  # pylint: disable=broad-except
                    ex_msg = "Error while loading plugin '{0}'".format(plugin)
                    LOG.error(ex_msg)
                    LOG.exception(ex)
                    fm.notify(ex_msg, bad=True)
            ranger.fm = None

        allow_access_to_confdir(ranger.args.confdir, False)
        # Load rc.conf
        custom_conf = fm.confpath('rc.conf')
        system_conf = os.path.join(system_confdir, 'rc.conf')
        default_conf = fm.relpath('config', 'rc.conf')

        custom_conf_is_readable = os.access(custom_conf, os.R_OK)
        system_conf_is_readable = os.access(system_conf, os.R_OK)
        if (os.environ.get('RANGER_LOAD_DEFAULT_RC', 'TRUE').upper() != 'FALSE'
                or not (custom_conf_is_readable or system_conf_is_readable)):
            fm.source(default_conf)
        if system_conf_is_readable:
            fm.source(system_conf)
        if custom_conf_is_readable:
            fm.source(custom_conf)

    else:
        fm.source(fm.relpath('config', 'rc.conf'))
Exemple #59
0
 def IsTestCase(self, path):
     return path.endswith('.js')
Exemple #60
0
def uncrustify(ui, repo, *patterns, **options):
    """Run uncrustify on the specified files or directories.

    If no files are specified, operates on the whole working
    directory.

    Note: Files that don't have a .cc or .h suffix are always ignored,
    even if specified on the command line explicitly.

    By default, prints a list of files that are not clean according to
    uncrustify, using a similar output format as with hg status. No
    changes are made to the working directory.

    With the --diff option, prints the changes suggested by uncrustify
    in unified diff format. No changes are made to the working
    directory.

    With the --modify option, actually performs the changes suggested
    by uncrustify. The original (dirty) files are backed up with a
    .crusty suffix. Existing files with such a suffix are silently
    overwritten. To disable these backups, use --no-backup.

    This command always operates on the working directory, not on
    arbitrary repository revisions.

    Returns 0 on success.
    """
    if options["diff"] and options["modify"]:
        raise util.Abort("cannot specify --diff and --modify at the same time")

    if options["diff"]:
        mode = "diff"
    elif options["modify"]:
        mode = "modify"
    else:
        mode = "status"

    no_backup = options["no_backup"]
    show_clean = options["show_clean"]

    paths = [
        path for path in _get_files(repo, patterns, options)
        if path.endswith((".cc", ".h"))
    ]

    uncrustify_cfg = repo.pathto(".uncrustify.cfg")
    relpaths = [repo.pathto(path) for path in paths]
    if not os.path.exists(uncrustify_cfg):
        raise util.Abort("could not find .uncrustify.cfg in repository root")
    _run_uncrustify(uncrustify_cfg, relpaths)

    ctx = repo[None]
    for path in paths:
        relpath = repo.pathto(path)
        uncr_path = path + SUFFIX
        uncr_relpath = relpath + SUFFIX
        have_changes = (ctx[path].data() != ctx[uncr_path].data())

        if have_changes:
            if mode == "status":
                ui.write("M %s\n" % relpath, label="status.modified")
                util.unlink(uncr_relpath)
            elif mode == "diff":
                _run_diff(relpath, uncr_relpath)
                util.unlink(uncr_relpath)
            elif mode == "modify":
                if not no_backup:
                    util.rename(relpath, relpath + ".crusty")
                util.rename(uncr_relpath, relpath)
                if not ui.quiet:
                    ui.write("%s uncrustified\n" % relpath)
        else:
            if show_clean:
                if mode == "status":
                    ui.write("C %s\n" % relpath, label="status.clean")
                elif mode == "modify":
                    ui.write("%s is clean\n" % relpath)
            util.unlink(uncr_relpath)