Example #1
0
def get_template(archivist, context):
    """Return the correct Jinja2 Template object for this archetype."""
    templates = []

    # Most specific to least specific. Does the archetype request a
    # custom template? Note that config values may be a list, or a
    # single string.
    t = context.get('template')
    if is_sequence(t):
        templates.extend(t)
    elif t:
        templates.append(t)

    # Next, we'll look for templates specific to the itemtype, crawling up the
    # hierarchy for fallbacks.
    if 'itemtype' in context:
        templates.append(context['itemtype'])
        (root, _) = path.split(context['itemtype'])
        while root:
            templates.append(root)
            (root, _) = path.split(root)

    # Does the siteconfig specify a default template?
    t = archivist.siteconfig.get('default_template')
    if is_sequence(t):
        templates.extend(t)
    elif t:
        templates.append(t)

    # If no configured default, fall back to "emergency" default.
    templates.append(Template(fallback_template))

    return archivist.jinja.select_template(templates)
Example #2
0
    def __init__(self, cmpFullPath, includes):
        self.cmpFullPath = cmpFullPath
        cppPath = cmpFullPath[cmpFullPath.index('/src/scripts/cmp')+len('/src/scripts'):].replace('/cmp/', '/cpp/').replace('.cmp', '.cpp')
        self.cppFullPath = bindir() + cppPath
        self.hFullPath = self.cppFullPath.replace('.cpp', '.h')
        self.cmpFilename = posixpath.split(self.cmpFullPath)[1]
        self.cppFilename = posixpath.split(self.cppFullPath)[1]
        self.hFilename = posixpath.split(self.hFullPath)[1]
        
        self.cppExists, self.cppModTime = check_file(self.cppFullPath)
        self.cmpExists, self.cmpModTime = check_file(self.cmpFullPath)
        self.hExists, self.hModTime = check_file(self.hFullPath)

        if (self.cppExists):
            self.cppOutputOld, self.cppSourceOld, self.cppSourceOldHash = read_cpp_file(self.cppFullPath)
            self.cppSourceOldHashActual = md5.new(self.cppSourceOld).hexdigest()

        if (self.hExists):
            self.hOutputOld, self.hSourceOld, self.hSourceOldHash = read_cpp_file(self.hFullPath)
            self.hSourceOldHashActual = md5.new(self.hSourceOld).hexdigest()

        self._compile(includes)

        self.cppOutput = TEMPLATE % (self.cppFilename, self.cmpFilename, license_text('//', self.cppFullPath), self.cppSourceHash, self.cppSource)

        if (self.hSource is not None):
            self.hOutput = TEMPLATE % (self.hFilename, self.cmpFilename, license_text('//', self.hFullPath), self.hSourceHash, self.hSource)
Example #3
0
def write_latex(outdir,images,prefix,query_image):
  otex=posixpath.join(outdir,'{}.tex'.format(prefix))
  with open(otex,'w') as f:
    print(r'''\documentclass{article}
\usepackage{graphicx}
\usepackage{fullpage}
\usepackage{paralist}
\usepackage{multirow}
\usepackage{caption}
\usepackage{subcaption}
\usepackage{amssymb,amsmath}
\usepackage{tikz}
\usetikzlibrary{arrows}
\begin{document}''',file=f)
    x=query_image
    pname=posixpath.join(outdir,'{}query{}'.format(prefix,posixpath.splitext(x)[1]))
    shutil.copyfile(x,pname)
    print(r'''\begin{figure}[h]
\centering
\includegraphics[width=2.0in]{%s}
\caption{query} \label{fig:%s}
\end{figure}''' % (posixpath.split(pname)[1],prefix+'query'),file=f)
    print(r'\begin{figure}',file=f)
    for i,x in enumerate(images):
      pname=posixpath.join(outdir,'{}{:03}{}'.format(prefix,i,posixpath.splitext(x)[1]))
      shutil.copyfile(x,pname)
      print(r'''\begin{minipage}[b]{.5\linewidth}
\centering \includegraphics[width=1.0in]{%s}
\subcaption{A subfigure}\label{fig:%s}
\end{minipage}''' % (posixpath.split(pname)[1],prefix+str(i)),file=f)
    print(r'\end{figure}',file=f)
    print(r'''\end{document}''',file=f)
Example #4
0
    def lookup(self, path=None, inode=None, inode_id=None):
        dbh=DB.DBO(self.case)
        if path:
            dir,name = posixpath.split(path)
            if not name:
                dir,name = posixpath.split(path[:-1])
            if dir == '/':
                dir = ''

            dbh.execute("select inode_id from vfs "
                        "where path=%r and name=%r and "
                        "not isnull(inode_id) limit 1", (dir,name,name))
            res = dbh.fetch()
            if not res:
                raise RuntimeError("VFS path not found %s/%s" % (dir,name))
            return res['inode_id']
        
        elif inode_id:
            dbh.execute("select inode_id, concat(path,'/',name) as path from vfs  "
                        "where vfs.inode_id=%r limit 1", inode_id)
            res = dbh.fetch()
            if not res: raise IOError("Inode ID %s not found" % inode_id)
            return res['path']

        else:
            dbh.execute("select vfs.inode_id, concat(path,'/',name) as path from vfs "
                        "where urn=%r limit 1", inode)
            res = dbh.fetch()
            if not res:
                raise RuntimeError("VFS Inode %s not known" % inode)
            return res["path"], res['inode_id']
Example #5
0
File: os.py Project: ztane/jython3
def makedirs(name, mode=0o777, exist_ok=False):
    """makedirs(name [, mode=0o777][, exist_ok=False])

    Super-mkdir; create a leaf directory and all intermediate ones.  Works like
    mkdir, except that any intermediate path segment (not just the rightmost)
    will be created if it does not exist. If the target directory already
    exists, raise an OSError if exist_ok is False. Otherwise no exception is
    raised.  This is recursive.

    """
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    if head and tail and not path.exists(head):
        try:
            makedirs(head, mode, exist_ok)
        except FileExistsError:
            # be happy if someone already created the path
            pass
        cdir = curdir
        if isinstance(tail, bytes):
            cdir = bytes(curdir, 'ASCII')
        if tail == cdir:           # xxx/newdir/. exists if xxx/newdir exists
            return
    try:
        mkdir(name, mode)
    except OSError as e:
        if not exist_ok or e.errno != errno.EEXIST or not path.isdir(name):
            raise
Example #6
0
    def lookup(self, path=None,inode=None, inode_id=None):
        dbh=DB.DBO(self.case)
        if path:
            dir,name = posixpath.split(path)
            if not name:
                dir,name = posixpath.split(path[:-1])
            if dir == '/':
                dir = ''

            dbh.check_index('file','path', 200)
            dbh.check_index('file','name', 200)
            dbh.execute("select inode,inode_id from file where path=%r and (name=%r or name=concat(%r,'/')) limit 1", (dir+'/',name,name))
            res = dbh.fetch()
            if not res:
                raise RuntimeError("VFS path not found %s/%s" % (dir,name))
            return path, res["inode"], res['inode_id']
        
        elif inode_id:
            dbh.check_index('inode','inode_id')
            dbh.execute("select mtime, inode.inode, concat(path,name) as path from inode left join file on inode.inode_id=file.inode_id where inode.inode_id=%r order by file.status limit 1", inode_id)
            res = dbh.fetch()
            if not res: raise IOError("Inode ID %s not found" % inode_id)
            self.mtime = res['mtime']
            return res['path'],res['inode'], inode_id

        else:
            dbh.check_index('file','inode')
            dbh.execute("select inode.inode_id,concat(path,name) as path from file join inode on inode.inode_id = file.inode_id where inode.inode=%r order by file.status limit 1", inode)
            res = dbh.fetch()
            if not res:
                raise RuntimeError("VFS Inode %s not known" % inode)
            return res["path"], inode, res['inode_id']
  def StatAsync(self, path):

    def get_child_versions(path):
      return dict((e['name'], e['id'])
                  for e in local_git_util.ListDir(path, self._commit))

    def get_file_version(dir, filename):
      try:
        return next(e['id'] for e in local_git_util.ListDir(dir, self._commit)
                    if e['name'] == filename)
      except StopIteration:
        raise FileNotFoundError('%s not found in revision %s' %
                                (path, self._commit))

    dir, filename = posixpath.split(path)
    if path == '':
      version = local_git_util.GetRootTree(self._commit)
      child_versions = get_child_versions('')
    elif IsDirectory(path):
      parent_dir, stat_dir = posixpath.split(dir)
      version = get_file_version(parent_dir, stat_dir)
      child_versions = get_child_versions(dir)
    else:
      version = get_file_version(dir, filename)
      child_versions = None

    #print 'Accessing local git for stat on %s (%s)' % (path, version)
    return Future(value=StatInfo(version, child_versions))
Example #8
0
def makedirs(name, mode=0o777, exist_ok=False):
    """makedirs(path [, mode=0o777][, exist_ok=False])

    Super-mkdir; create a leaf directory and all intermediate ones.
    Works like mkdir, except that any intermediate path segment (not
    just the rightmost) will be created if it does not exist. If the
    target directory with the same mode as we specified already exists,
    raises an OSError if exist_ok is False, otherwise no exception is
    raised.  This is recursive.

    """
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    if head and tail and not path.exists(head):
        try:
            makedirs(head, mode, exist_ok)
        except OSError as e:
            # be happy if someone already created the path
            if e.errno != errno.EEXIST:
                raise
        if tail == curdir:           # xxx/newdir/. exists if xxx/newdir exists
            return
    try:
        mkdir(name, mode)
    except OSError as e:
        import stat as st
        if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and
                st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)):
            raise
Example #9
0
def makedirs(name, mode=0o777, exist_ok=False):
    """makedirs(name [, mode=0o777][, exist_ok=False])

    Super-mkdir; create a leaf directory and all intermediate ones.  Works like
    mkdir, except that any intermediate path segment (not just the rightmost)
    will be created if it does not exist. If the target directory already
    exists, raise an OSError if exist_ok is False. Otherwise no exception is
    raised.  This is recursive.

    """
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    if head and tail and not path.exists(head):
        try:
            makedirs(head, exist_ok=exist_ok)
        except FileExistsError:
            # Defeats race condition when another thread created the path
            pass
        cdir = curdir
        if isinstance(tail, bytes):
            cdir = bytes(curdir, 'ASCII')
        if tail == cdir:           # xxx/newdir/. exists if xxx/newdir exists
            return
    try:
        mkdir(name, mode)
    except OSError:
        # Cannot rely on checking for EEXIST, since the operating system
        # could give priority to other errors like EACCES or EROFS
        if not exist_ok or not path.isdir(name):
            raise
Example #10
0
def renames(old, new):
    """renames(old, new)

    Super-rename; create directories as necessary and delete any left
    empty.  Works like rename, except creation of any intermediate
    directories needed to make the new pathname good is attempted
    first.  After the rename, directories corresponding to rightmost
    path segments of the old name will be pruned way until either the
    whole path is consumed or a nonempty directory is found.

    Note: this function can fail with the new directory structure made
    if you lack permissions needed to unlink the leaf directory or
    file.

    """
    head, tail = path.split(new)
    if head and tail and not path.exists(head):
        makedirs(head)
    rename(old, new)
    head, tail = path.split(old)
    if head and tail:
        try:
            removedirs(head)
        except error:
            pass
Example #11
0
    def walk(self, path, refresh=False):
        """
        Directory tree generator, like os.walk

        Generator version of what is in s3fs, which yields a flattened list of
        files
        """
        path = path.replace('s3://', '')
        directories = set()
        files = set()

        for key in list(self.fs._ls(path, refresh=refresh)):
            path = key['Key']
            if key['StorageClass'] == 'DIRECTORY':
                directories.add(path)
            elif key['StorageClass'] == 'BUCKET':
                pass
            else:
                files.add(path)

        # s3fs creates duplicate 'DIRECTORY' entries
        files = sorted([posixpath.split(f)[1] for f in files
                        if f not in directories])
        directories = sorted([posixpath.split(x)[1]
                              for x in directories])

        yield path, directories, files

        for directory in directories:
            for tup in self.walk(directory, refresh=refresh):
                yield tup
    def test_split(self):
        self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
        self.assertEqual(posixpath.split("/"), ("/", ""))
        self.assertEqual(posixpath.split("foo"), ("", "foo"))
        self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
        self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))

        self.assertRaises(TypeError, posixpath.split)
Example #13
0
 def rename(self, old, new):
     'See IWriteFileSystem'
     old = self._translate(old)
     new = self._translate(new)
     path0, old = posixpath.split(old)
     path1, new = posixpath.split(new)
     assert path0 == path1
     return self._execute(path0, 'rename', split=False, old=old, new=new)
Example #14
0
    def connect(self, identifier=None, active=False):
        '''
        POST initial survey content to kobocat and create a new project.
        store results in self.asset._deployment_data.
        '''
        # If no identifier was provided, construct one using
        # `settings.KOBOCAT_URL` and the uid of the asset
        if not identifier:
            # Use the external URL here; the internal URL will be substituted
            # in when appropriate
            if not settings.KOBOCAT_URL or not settings.KOBOCAT_INTERNAL_URL:
                raise ImproperlyConfigured(
                    'Both KOBOCAT_URL and KOBOCAT_INTERNAL_URL must be '
                    'configured before using KobocatDeploymentBackend'
                )
            server = settings.KOBOCAT_URL
            username = self.asset.owner.username
            id_string = self.asset.uid
            identifier = '{server}/{username}/forms/{id_string}'.format(
                server=server,
                username=username,
                id_string=id_string,
            )
        else:
            # Parse the provided identifier, which is expected to follow the
            # format http://kobocat_server/username/forms/id_string
            parsed_identifier = urlparse.urlparse(identifier)
            server = u'{}://{}'.format(
                parsed_identifier.scheme, parsed_identifier.netloc)
            path_head, path_tail = posixpath.split(parsed_identifier.path)
            id_string = path_tail
            path_head, path_tail = posixpath.split(path_head)
            if path_tail != 'forms':
                raise Exception('The identifier is not properly formatted.')
            path_head, path_tail = posixpath.split(path_head)
            if path_tail != self.asset.owner.username:
                raise Exception(
                    'The username in the identifier does not match the owner '
                    'of this asset.'
                )
            if path_head != '/':
                raise Exception('The identifier is not properly formatted.')

        url = self.external_to_internal_url(u'{}/api/v1/forms'.format(server))
        csv_io = self.to_csv_io(self.asset.to_xls_io(versioned=True), id_string)
        valid_xlsform_csv_repr = csv_io.getvalue()
        payload = {
            u'text_xls_form': valid_xlsform_csv_repr,
            u'downloadable': active
        }
        json_response = self._kobocat_request('POST', url, payload)
        self.store_data({
            'backend': 'kobocat',
            'identifier': self.internal_to_external_url(identifier),
            'active': json_response['downloadable'],
            'backend_response': json_response,
            'version': self.asset.version_id,
        })
Example #15
0
 def _generate_file_diff(self, buf):
         change = None
         if self.src_kind == svn.core.svn_node_none:
             change = "add"
         elif self.tgt_kind == svn.core.svn_node_none:
             change = "delete"
         tgt_base, tgt_path = vcspath.split(self.tgt_path)
         src_base, src_path = vcspath.split(self.src_path)
         self._generate_node_diff(
             buf, change, tgt_path, tgt_base, src_path, src_base)
Example #16
0
def renames(old, new):
    head, tail = path.split(new)
    if head and tail and not path.exists(head):
        makedirs(head)
    rename(old, new)
    head, tail = path.split(old)
    if head and tail:
        try:
            removedirs(head)
        except error:
            pass
Example #17
0
def removedirs(name):
    rmdir(name)
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    while head and tail:
        try:
            rmdir(head)
        except error:
            break

        head, tail = path.split(head)
    def test_split(self):
        self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
        self.assertEqual(posixpath.split("/"), ("/", ""))
        self.assertEqual(posixpath.split("foo"), ("", "foo"))
        self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
        self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))

        self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar"))
        self.assertEqual(posixpath.split(b"/"), (b"/", b""))
        self.assertEqual(posixpath.split(b"foo"), (b"", b"foo"))
        self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo"))
        self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar"))
 def get(self, url):
     shorturl = self._cache.get(url)
     if not shorturl:
         if len(url) > self._maxlen and url.count("/") > 3:
             dir, base = posixpath.split(url)
             while len(dir)+len(base)+5 > self._maxlen:
                 if dir.count("/") < 3:
                     break
                 dir, _ = posixpath.split(dir)
             shorturl = posixpath.join(dir, ".../", base)
         else:
             shorturl = url
         self._cache[url] = shorturl
     return shorturl
Example #20
0
def makedirs(name, mode = 511):
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    if head and tail and not path.exists(head):
        try:
            makedirs(head, mode)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

        if tail == curdir:
            return
    mkdir(name, mode)
 def Activated(self):
     #disable proxies solving the system as their objects are updated
     parms = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Assembly2")
     org_setting = parms.GetBool('autoSolveConstraintAttributesChanged', True)
     parms.SetBool('autoSolveConstraintAttributesChanged', False)
     solve_assembly_constraints = False
     for obj in FreeCAD.ActiveDocument.Objects:
         if hasattr(obj, 'sourceFile'):
             if not hasattr( obj, 'timeLastImport'):
                 obj.addProperty("App::PropertyFloat", "timeLastImport","importPart") #should default to zero which will force update.
                 obj.setEditorMode("timeLastImport",1)
             if not os.path.exists( obj.sourceFile ):
                 debugPrint( 3, '%s.sourceFile %s is missing, attempting to repair it' % (obj.Name,  obj.sourceFile) )
                 replacement = None
                 aFolder, aFilename = posixpath.split( FreeCAD.ActiveDocument.FileName )
                 sParts = path_split( posixpath, obj.sourceFile)
                 debugPrint( 3, '  obj.sourceFile parts %s' % sParts )
                 replacement = None
                 previousRejects = []
                 while replacement == None and aFilename <> '':
                     for i in reversed(range(len(sParts))):
                         newFn = aFolder
                         for j in range(i,len(sParts)):
                             newFn = posixpath.join( newFn,sParts[j] )
                         debugPrint( 4, '    checking %s' % newFn )
                         if os.path.exists( newFn ) and not newFn in previousRejects :
                             reply = QtGui.QMessageBox.question(
                                 QtGui.qApp.activeWindow(), "%s source file not found" % obj.Name,
                                 "Unable to find\n  %s \nUse \n  %s\n instead?" % (obj.sourceFile, newFn) , 
                                 QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.Yes)
                             if reply == QtGui.QMessageBox.Yes:
                                 replacement = newFn
                                 break
                             else:
                                 previousRejects.append( newFn )
                     aFolder, aFilename = posixpath.split( aFolder )
                 if replacement <> None:
                     obj.sourceFile = replacement
                 else:
                     QtGui.QMessageBox.critical(  QtGui.qApp.activeWindow(), "Source file not found", "update of %s aborted!\nUnable to find %s" % (obj.Name, obj.sourceFile) )
                     obj.timeLastImport = 0 #force update if users repairs link
             if os.path.exists( obj.sourceFile ):
                 if os.path.getmtime( obj.sourceFile ) > obj.timeLastImport:
                     importPart( obj.sourceFile, obj.Name )
                     solve_assembly_constraints = True
     if solve_assembly_constraints:
         solveConstraints( FreeCAD.ActiveDocument )
     FreeCAD.ActiveDocument.recompute()
     parms.SetBool('autoSolveConstraintAttributesChanged', org_setting )
 def Activated(self):
     for obj in FreeCAD.ActiveDocument.Objects:
         if hasattr(obj, "sourceFile"):
             if not hasattr(obj, "timeLastImport"):
                 obj.addProperty(
                     "App::PropertyFloat", "timeLastImport", "importPart"
                 )  # should default to zero which will force update.
                 obj.setEditorMode("timeLastImport", 1)
             if not os.path.exists(path_convert(obj.sourceFile, posixpath, os.path)):
                 debugPrint(3, "%s.sourceFile %s is missing, attempting to repair it" % (obj.Name, obj.sourceFile))
                 replacement = None
                 aFolder, aFilename = posixpath.split(FreeCAD.ActiveDocument.FileName)
                 sParts = path_split(posixpath, obj.sourceFile)
                 debugPrint(3, "  obj.sourceFile parts %s" % sParts)
                 replacement = None
                 previousRejects = []
                 while replacement == None and aFilename <> "":
                     for i in reversed(range(len(sParts))):
                         newFn = aFolder
                         for j in range(i, len(sParts)):
                             newFn = posixpath.join(newFn, sParts[j])
                         # debugPrint( 3, '    checking %s' % newFn )
                         if os.path.exists(path_convert(newFn, posixpath, os.path)) and not newFn in previousRejects:
                             reply = QtGui.QMessageBox.question(
                                 QtGui.qApp.activeWindow(),
                                 "%s source file not found" % obj.Name,
                                 "Unable to find\n  %s \nUse \n  %s\n instead?" % (obj.sourceFile, newFn),
                                 QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
                                 QtGui.QMessageBox.Yes,
                             )
                             if reply == QtGui.QMessageBox.Yes:
                                 replacement = newFn
                                 break
                             else:
                                 previousRejects.append(newFn)
                     aFolder, aFilename = posixpath.split(aFolder)
                 if replacement <> None:
                     obj.sourceFile = replacement
                 else:
                     QtGui.QMessageBox.critical(
                         QtGui.qApp.activeWindow(),
                         "Source file not found",
                         "update of %s aborted!\nUnable to find %s" % (obj.Name, obj.sourceFile),
                     )
                     obj.timeLastImport = 0  # force update if users repairs link
             if os.path.exists(obj.sourceFile):
                 if os.path.getmtime(obj.sourceFile) > obj.timeLastImport:
                     importPart(obj.sourceFile, obj.Name)
     FreeCAD.ActiveDocument.recompute()
Example #23
0
def makedirs(name, mode=0777):
    """makedirs(path [, mode=0777])

    Super-mkdir; create a leaf directory and all intermediate ones.
    Works like mkdir, except that any intermediate path segment (not
    just the rightmost) will be created if it does not exist.  This is
    recursive.

    """
    head, tail = path.split(name)
    if not tail:
        head, tail = path.split(head)
    if head and tail and not path.exists(head):
        makedirs(head, mode)
    mkdir(name, mode)
Example #24
0
    def _createInitialFigure(self):
        extent = []
        sd = self.scan_data.measurement.scalar_data
        x_axis = sd.get_sorted_axes_list(1)[0]
        y_axis = sd.get_sorted_axes_list(2)[0]
        extent.extend(x_axis.range)
        extent.extend(y_axis.range)
        self.image = self.axes.imshow(self._elementData, extent=extent,
                                       interpolation='nearest',
                                       origin='lower')
        self._colorbar = self.figure.colorbar(self.image)

        self.axes.set_xlabel(posixpath.split(x_axis.name)[-1])
        try: self.axes.set_ylabel(posixpath.split(y_axis.name)[-1])
        except IndexError: pass
Example #25
0
def createdict(singlelist):

      newdict = {}
      for filename in singlelist:
            path,name     = posixpath.split(filename)
            # grab the short path Mat from /wired/path/Mat
            junk,path     = posixpath.split(path)
            index_char    = lower(name[0:1])
            # remove the .name suffix from name
            func_name,ext = posixpath.splitext(name)
            if not newdict.has_key(index_char):
                  newdict[index_char] = {}
            newdict[index_char][func_name] = path + '/' + name

      return newdict
Example #26
0
    def rename(self, old, new):
        "See zope.server.interfaces.ftp.IFileSystem"
        oldpath, oldname = posixpath.split(old)
        newpath, newname = posixpath.split(new)

        olddir = self.getwdir(oldpath)
        newdir = self.getwdir(newpath)

        if oldname not in olddir.files:
            raise AssertionError("Not exists:", oldname)
        if newname in newdir.files:
            raise AssertionError("Already exists:", newname)

        newdir.files[newname] = olddir.files[oldname]
        del olddir.files[oldname]
Example #27
0
def _execvpe(file, args, env=None):
    from errno import ENOENT, ENOTDIR

    if env is not None:
        func = execve
        argrest = (args, env)
    else:
        func = execv
        argrest = (args,)
        env = environ

    head, tail = path.split(file)
    if head:
        func(file, *argrest)
        return
    if 'PATH' in env:
        envpath = env['PATH']
    else:
        envpath = defpath
    PATH = envpath.split(pathsep)
    saved_exc = None
    saved_tb = None
    for dir in PATH:
        fullname = path.join(dir, file)
        try:
            func(fullname, *argrest)
        except error, e:
            tb = sys.exc_info()[2]
            if (e.errno != ENOENT and e.errno != ENOTDIR
                and saved_exc is None):
                saved_exc = e
                saved_tb = tb
Example #28
0
 def iglob(self, pathname):
     """
     Return an iterator which yields the paths matching a pathname pattern.
     The pattern may contain simple shell-style wildcards a la fnmatch.
     """
     if not glob.has_magic(pathname):
         if self.ssh.lpath_exists(pathname):
             yield pathname
         return
     dirname, basename = posixpath.split(pathname)
     if not dirname:
         for name in self.glob1(posixpath.curdir, basename):
             yield name
         return
     if glob.has_magic(dirname):
         dirs = self.iglob(dirname)
     else:
         dirs = [dirname]
     if glob.has_magic(basename):
         glob_in_dir = self.glob1
     else:
         glob_in_dir = self.glob0
     for dirname in dirs:
         for name in glob_in_dir(dirname, basename):
             yield posixpath.join(dirname, name)
Example #29
0
def _execvpe(file, args, env=None):
    if env is not None:
        exec_func = execve
        argrest = (args, env)
    else:
        exec_func = execv
        argrest = (args,)
        env = environ

    head, tail = path.split(file)
    if head:
        exec_func(file, *argrest)
        return
    last_exc = saved_exc = None
    saved_tb = None
    path_list = get_exec_path(env)
    if name != 'nt':
        file = fsencode(file)
        path_list = map(fsencode, path_list)
    for dir in path_list:
        fullname = path.join(dir, file)
        try:
            exec_func(fullname, *argrest)
        except OSError as e:
            last_exc = e
            tb = sys.exc_info()[2]
            if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR
                and saved_exc is None):
                saved_exc = e
                saved_tb = tb
    if saved_exc:
        raise saved_exc.with_traceback(saved_tb)
    raise last_exc.with_traceback(tb)
  def StatAsync(self, path):
    '''Stats the directory given, or if a file is given, stats the file's parent
    directory to get info about the file.
    '''
    # Always stat the parent directory, since it will have the stat of the child
    # anyway, and this gives us an entire directory's stat info at once.
    dir_path, file_path = posixpath.split(path)
    dir_path = ToDirectory(dir_path)

    def make_stat_info(dir_stat):
      '''Converts a dir stat into the correct resulting StatInfo; if the Stat
      was for a file, the StatInfo should just contain that file.
      '''
      if path == dir_path:
        return dir_stat
      # Was a file stat. Extract that file.
      file_version = dir_stat.child_versions.get(file_path)
      if file_version is None:
        raise FileNotFoundError('No stat found for %s in %s (found %s)' %
                                (path, dir_path, dir_stat.child_versions))
      return StatInfo(file_version)

    dir_stat = self._stat_cache.Get(dir_path).Get()
    if dir_stat is not None:
      return Future(callback=lambda: make_stat_info(dir_stat))

    def next(dir_stat):
      assert dir_stat is not None  # should have raised a FileNotFoundError
      # We only ever need to cache the dir stat.
      self._stat_cache.Set(dir_path, dir_stat)
      return make_stat_info(dir_stat)
    return self._MemoizedStatAsyncFromFileSystem(dir_path).Then(next)
Example #31
0
    def __init__(self, parent, session, source_name):
        QDialog.__init__(self, parent)

        self.setupUi(self)
        self.setModal(True)

        self.source_name   = source_name
        self.last_filename = os.path.join(get_home_path(), posixpath.split(source_name)[1])
        self.log_file      = None
        self.content       = None

        source_name_parts = posixpath.split(source_name)[1].split('_')

        if source_name_parts[0] == 'continuous':
            date_time       = 'Continuous ({0})'.format(source_name_parts[1])
            self.continuous = True
        else:
            try:
                timestamp = int(source_name_parts[1].split('+')[0]) / 1000000
            except ValueError:
                timestamp = 0

            date_time       = '{0} ({1})'.format(timestamp_to_date_at_time(timestamp), source_name_parts[2])
            self.continuous = False

        self.rejected.connect(self.abort_download)
        self.progress_download.setRange(0, 0)
        self.label_date_time.setText(date_time)
        self.button_save.clicked.connect(self.save_content)
        self.button_close.clicked.connect(self.reject)

        self.button_save.setEnabled(False)

        def cb_open(dummy):
            def cb_read_status(bytes_read, max_length):
                self.progress_download.setValue(bytes_read)

            def cb_read(result):
                self.log_file.release()
                self.log_file = None

                self.label_download.setVisible(False)
                self.progress_download.setVisible(False)

                if result.error != None:
                    self.log(u'Error: ' + Qt.escape(unicode(result.error)), bold=True)
                    return

                try:
                    self.content = result.data.decode('utf-8')
                except UnicodeDecodeError:
                    # FIXME: maybe add a encoding guesser here or try some common encodings if UTF-8 fails
                    self.log(u'Error: Log file is not UTF-8 encoded', bold=True)
                    return

                self.button_save.setEnabled(True)

                if self.continuous:
                    content = self.content.lstrip()
                else:
                    content = self.content

                self.edit_content.setPlainText('')

                font = QFont('monospace')
                font.setStyleHint(QFont.TypeWriter)

                self.edit_content.setFont(font)
                self.edit_content.setPlainText(content)

            self.progress_download.setRange(0, self.log_file.length)
            self.log_file.read_async(self.log_file.length, cb_read, cb_read_status)

        def cb_open_error():
            self.label_download.setVisible(False)
            self.progress_download.setVisible(False)
            self.log(u'Error: Could not open log file', bold=True)

        self.log_file = REDFile(session)

        async_call(self.log_file.open,
                   (source_name, REDFile.FLAG_READ_ONLY | REDFile.FLAG_NON_BLOCKING, 0, 0, 0),
                   cb_open, cb_open_error)
Example #32
0
 def split(self, url):
   server, rel_path = self._parse_url(url)
   if server:
     server = '/' + server
   head, tail = posixpath.split(rel_path)
   return _HDFS_PREFIX + server + head, tail
Example #33
0
def strip_scripts_dir(p):
    dirpath = posixpath.split(p.lstrip())[0]
    dirpath = dirpath.replace('${scripts_dir}', '')
    dirpath = dirpath.replace('${CMAKE_CURRENT_BINARY_DIR}', '')
    return dirpath
Example #34
0
 def split(self, url):
     rel_path = self._parse_url(url)
     head, tail = posixpath.split(rel_path)
     return _HDFS_PREFIX + head, tail
Example #35
0
def ensure_file(module, domain, file_path, data, state):
    result = {}
    result['changed'] = False
    connection = Connection(module._socket_path)
    parent_dir = posixpath.split(file_path)[0]
    top_dir = file_path.split(
        '/'
    )[0] or parent_dir  # Handles the case where the parent dir is also the root directory.
    diff = None

    # Ensure the parent directory is present before uploading file
    # If file state is 'absent' do nothing.
    if state != 'absent':
        result['directory'] = ensure_directory(module, domain, parent_dir)

    files = list_directory(module=module, domain=domain, dir_path=parent_dir)

    file_req = build_file_request(domain, file_path, data)

    if not has_file(files, file_path) and state == 'present':
        if not module.check_mode:
            file_create_resp = connection.send_request(**file_req.post())
            result['response'] = file_create_resp
            result['path'] = file_create_resp['_links']['location']['href']
        result['diff'] = {'before': None, 'after': file_path}
        result['changed'] = True

    elif has_file(files, file_path) and state == 'present':
        # Compare the files, can't compare cert/sharedcert.
        if 'sharecert' not in top_dir and 'cert' not in top_dir:
            resp = connection.get_resource_or_none(file_req.path)
            from_data = base64.b64decode(resp['file'])

            try:
                diff = file_diff(
                    from_data,
                    data,
                    file_path,
                )
            except UnicodeDecodeError as e:
                # File seems to be binary
                diff = 'Not possible to compare a binary file.'

            # Compare md5, if data is different update the file
            to_md5 = hashlib.md5()
            to_md5.update(data)

            from_md5 = hashlib.md5()
            from_md5.update(from_data)
            if to_md5.hexdigest() != from_md5.hexdigest():
                if not module.check_mode:
                    update_resp = connection.send_request(**file_req.put())
                    result['response'] = update_resp
                result['changed'] = True

        # The requested file already exists in cert/shared cert
        # Not updating a file as there is no way to restore/backout
        # unless you have the original cert/key or secure backups.
        elif has_file(files, file_path):
            result['path'] = file_path
            result[
                'msg'] = 'Files are in cert / sharedcert directories, not overwiting existing crypto files.'
            return result
        else:
            raise NotImplementedError(
                "This condition was not expected, this is likely a bug.")
    elif not has_file(files, file_path) and state == 'absent':
        diff = {'before': None, 'after': None}
    elif has_file(files, file_path) and state == 'absent':
        diff = {'before': file_path, 'after': None}
        delete_resp = connection.send_request(**file_req.delete())
        result['changed'] = True
        result['response'] = delete_resp

    if module._diff:
        result['diff'] = diff

    return result
Example #36
0
        # Handle IRC URLs

        if args.url != '':
            u = urllib.parse.urlparse(args.url)
            if u.scheme == 'irc':
                if u.password:
                    args.password = u.password
                if u.hostname:
                    args.server = u.hostname
                if u.port:
                    args.port = u.port
                if u.path != '':
                    p = urllib.parse.unquote(u.path)
                    p = posixpath.normpath(p)
                    l = posixpath.split(p)
                    if len(l) > 0:
                        if l[0] == '/':
                            if l[1] != '':
                                c = str(l[1])
                                if ',' in c:
                                    channel = c.split(',')
                                    if len(channel) == 2:
                                        if channel[0][:1] != '#':
                                            channel[0] = '#' + channel[0]
                                        if args.channel:
                                            args.channel.append(
                                                [channel[0], channel[1]])
                                        else:
                                            args.channel = []
                                            args.channel.append(
Example #37
0
def nameonly(s):
    import posixpath
    return posixpath.splitext(posixpath.split(s)[1])[0]
Example #38
0
 def run(self, nodes, master, user, user_shell, volumes):
     log.info("Installing mysql-cluster-server on all nodes...")
     for node in nodes:
         self.pool.simple_job(self._install_mysql_cluster, (node),
                              jobid=node.alias)
     self.pool.wait(len(nodes))
     mconn = master.ssh
     mconn.execute('rm -f /usr/mysql-cluster/*')
     # Get IPs for all nodes
     self.mgm_ip = master.private_ip_address
     if not self._dedicated_query:
         self.storage_ips = [x.private_ip_address for x in nodes[1:]]
         self.query_ips = self.storage_ips
         self.data_nodes = nodes[1:]
         self.query_nodes = nodes
     else:
         self.data_nodes = nodes[1:self._num_data_nodes + 1]
         self.query_nodes = nodes[self._num_data_nodes + 1:]
         self.query_nodes.append(master)
         self.storage_ips = [x.private_ip_address for x in self.data_nodes]
         self.query_ips = [x.private_ip_address for x in self.query_nodes]
     # Create backup dir and change ownership of mysql-cluster dir
     log.info('Backing up and stopping all mysql processes on all nodes')
     for node in nodes:
         self.pool.simple_job(self._backup_and_reset, (node),
                              jobid=node.alias)
     self.pool.wait(len(nodes))
     # Generate and place ndb_mgmd configuration file
     log.info('Generating ndb_mgmd.cnf...')
     ndb_mgmd = mconn.remote_file('/etc/mysql/ndb_mgmd.cnf')
     ndb_mgmd.write(self.generate_ndb_mgmd())
     ndb_mgmd.close()
     # Generate and place my.cnf configuration file on each data node
     log.info('Generating my.cnf on all nodes')
     for node in nodes:
         self.pool.simple_job(self._write_my_cnf, (node), jobid=node.alias)
     self.pool.wait(len(nodes))
     # Restart mysql-ndb-mgm on master
     log.info('Restarting mysql-ndb-mgm on master node...')
     mconn.execute('/etc/init.d/mysql-ndb-mgm restart')
     # Start mysqld-ndb on data nodes
     log.info('Restarting mysql-ndb on all data nodes...')
     for node in self.data_nodes:
         self.pool.simple_job(node.ssh.execute,
                              ('/etc/init.d/mysql-ndb restart'),
                              jobid=node.alias)
     self.pool.wait(len(self.data_nodes))
     # Start mysql on query nodes
     log.info('Starting mysql on all query nodes')
     for node in self.query_nodes:
         self.pool.simple_job(node.ssh.execute,
                              ('/etc/init.d/mysql restart'),
                              dict(ignore_exit_status=True),
                              jobid=node.alias)
     self.pool.wait(len(self.query_nodes))
     # Import sql dump
     dump_file = self._dump_file
     dump_dir = '/mnt/mysql-cluster-backup'
     if posixpath.isabs(self._dump_file):
         dump_dir, dump_file = posixpath.split(self._dump_file)
     else:
         log.warn("%s is not an absolute path, defaulting to %s" %
                  (self._dump_file, posixpath.join(dump_dir, dump_file)))
     name, ext = posixpath.splitext(dump_file)
     sc_path = posixpath.join(dump_dir, name + '.sc' + ext)
     orig_path = posixpath.join(dump_dir, dump_file)
     if not mconn.isdir(dump_dir):
         log.info("Directory %s does not exist, creating..." % dump_dir)
         mconn.makedirs(dump_dir)
     if mconn.isfile(sc_path):
         mconn.execute('mysql < %s' % sc_path)
     elif mconn.isfile(orig_path):
         mconn.execute('mysql < %s' % orig_path)
     else:
         log.info('No dump file found, not importing.')
     log.info('Adding MySQL dump cronjob to master node')
     cronjob = self.generate_mysqldump_crontab(sc_path)
     mconn.remove_lines_from_file('/etc/crontab', '#tethyscluster-mysql')
     crontab_file = mconn.remote_file('/etc/crontab', 'a')
     crontab_file.write(cronjob)
     crontab_file.close()
     log.info('Management Node: %s' % master.alias)
     log.info('Data Nodes: \n%s' %
              '\n'.join([x.alias for x in self.data_nodes]))
     log.info('Query Nodes: \n%s' %
              '\n'.join([x.alias for x in self.query_nodes]))
Example #39
0
    def Activated(self):
        #disable proxies solving the system as their objects are updated
        doc_assembly = FreeCAD.ActiveDocument
        solve_assembly_constraints = False
        YesToAll_clicked = False
        for obj in doc_assembly.Objects:
            if hasattr(obj, 'sourceFile'):
                if not hasattr( obj, 'timeLastImport'):
                    obj.addProperty("App::PropertyFloat", "timeLastImport","importPart") #should default to zero which will force update.
                    obj.setEditorMode("timeLastImport",1)
                if not os.path.exists( obj.sourceFile ) and  path_rel_to_abs( obj.sourceFile ) is None:
                    debugPrint( 3, '%s.sourceFile %s is missing, attempting to repair it' % (obj.Name,  obj.sourceFile) )
                    replacement = None
                    aFolder, aFilename = posixpath.split( doc_assembly.FileName )
                    sParts = path_split( posixpath, obj.sourceFile)
                    debugPrint( 3, '  obj.sourceFile parts %s' % sParts )
                    replacement = None
                    previousRejects = []
                    while replacement == None and aFilename != '':
                        for i in reversed(range(len(sParts))):
                            newFn = aFolder
                            for j in range(i,len(sParts)):
                                newFn = posixpath.join( newFn,sParts[j] )
                            debugPrint( 4, '    checking %s' % newFn )
                            if os.path.exists( newFn ) and not newFn in previousRejects :
                                if YesToAll_clicked:
                                    replacement = newFn
                                    break
                                reply = QtGui.QMessageBox.question(
                                    QtGui.qApp.activeWindow(), "%s source file not found" % obj.Name,
                                    "Unable to find\n  %s \nUse \n  %s\n instead?" % (obj.sourceFile, newFn) ,
                                    QtGui.QMessageBox.Yes | QtGui.QMessageBox.YesToAll | QtGui.QMessageBox.No, QtGui.QMessageBox.Yes)
                                if reply == QtGui.QMessageBox.Yes:
                                    replacement = newFn
                                    break
                                if reply == QtGui.QMessageBox.YesToAll:
                                    replacement = newFn
                                    YesToAll_clicked = True
                                    break
                                else:
                                    previousRejects.append( newFn )
                        aFolder, aFilename = posixpath.split( aFolder )
                    if replacement != None:
                        obj.sourceFile = replacement
                    else:
                        QtGui.QMessageBox.critical(  QtGui.qApp.activeWindow(), "Source file not found", "update of %s aborted!\nUnable to find %s" % (obj.Name, obj.sourceFile) )
                        obj.timeLastImport = 0 #force update if users repairs link
                if path_rel_to_abs( obj.sourceFile ) is not None:
                    absolutePath = path_rel_to_abs( obj.sourceFile )
                    if os.path.getmtime( absolutePath ) > obj.timeLastImport:
                        importPart( absolutePath, obj.Name,  doc_assembly )
                        solve_assembly_constraints = True
                if os.path.exists( obj.sourceFile ):
                    if os.path.getmtime( obj.sourceFile ) > obj.timeLastImport:
                        importPart( obj.sourceFile, obj.Name,  doc_assembly )
                        solve_assembly_constraints = True

        if solve_assembly_constraints:
            solveConstraints( doc_assembly )
        # constraint mirror house keeping

        for obj in doc_assembly.Objects: #for adding creating mirrored constraints in old files
            if 'ConstraintInfo' in obj.Content:
                if doc_assembly.getObject( obj.Object1 ) == None or doc_assembly.getObject( obj.Object2 ) == None:
                    debugPrint(2, 'removing %s which refers to non-existent objects' % obj.Name)
                    doc_assembly.removeObject( obj.Name ) #required for FreeCAD 0.15 which does not support the on-delete method
                if group_constraints_under_parts():
                    if not hasattr( obj.ViewObject.Proxy, 'mirror_name'):
                        if isinstance( doc_assembly.getObject( obj.Object1 ).Proxy, Proxy_importPart) \
                                or isinstance( doc_assembly.getObject( obj.Object2 ).Proxy, Proxy_importPart):
                            debugPrint(2, 'creating mirror of %s' % obj.Name)
                            doc_assembly.getObject( obj.Object2 ).touch()
                            obj.ViewObject.Proxy.mirror_name = create_constraint_mirror(  obj, obj.ViewObject.Proxy.iconPath )
            elif 'ConstraintNfo' in obj.Content: #constraint mirror
                if  doc_assembly.getObject( obj.ViewObject.Proxy.constraintObj_name ) == None:
                    debugPrint(2, 'removing %s which mirrors/links to a non-existent constraint' % obj.Name)
                    doc_assembly.removeObject( obj.Name ) #clean up for FreeCAD 0.15 which does not support the on-delete method
                elif not group_constraints_under_parts():
                     debugPrint(2, 'removing %s since group_constraints_under_parts=False' % obj.Name)
                     delattr( doc_assembly.getObject( obj.ViewObject.Proxy.constraintObj_name ),  'mirror_name' )
                     doc_assembly.removeObject( obj.Name )
            elif hasattr(obj,'Proxy') and isinstance( obj.Proxy, Proxy_importPart) and not isinstance( obj.ViewObject.Proxy, ImportedPartViewProviderProxy):
                obj.ViewObject.Proxy = ImportedPartViewProviderProxy()
                debugPrint(2, '%s.ViewObject.Proxy = ImportedPartViewProviderProxy()'%obj.Name)
        doc_assembly.recompute()
Example #40
0
        def valid_filename(name):
            head, tail = posixpath.split(name)

            tail = tail.lower()
            return (not tail.endswith('.tmp') and not tail.endswith('.copying')
                    and not tail.startswith('_') and not tail.startswith('.'))
Example #41
0
 def test_split(self):
     self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
     self.assertEqual(posixpath.split("/"), ("/", ""))
     self.assertEqual(posixpath.split("foo"), ("", "foo"))
     self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
     self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))
Example #42
0
def clean_posixpath(path, up_level_references=False):
    """
    Based on code from:
    https://github.com/django/django/blob/master/django/views/static.py
    
    But we build always a posixpath with "/" as path separate character.  
    
    
    Keep starting/ending slash:
    
    >>> clean_posixpath("no/slash")
    'no/slash'
    >>> clean_posixpath("/starts/with/slash")
    '/starts/with/slash'
    >>> clean_posixpath("ends/with/slash/")
    'ends/with/slash/'
    >>> clean_posixpath("") # normpath would return: "."
    ''
    >>> clean_posixpath("/")
    '/'
    >>> clean_posixpath("/../")
    '/'


    Remove every crude characters:
    
    >>> clean_posixpath("foo//bar")
    'foo/bar'
    >>> clean_posixpath("/foo/./bar")
    '/foo/bar'
    >>> clean_posixpath(r"foo\\bar/")
    'foo/bar/'
    
    
    up-level references would be only applied if activated:
    
    >>> clean_posixpath("/foo/bar/../../etc/passwd") # normpath would return: '../etc/passwd'
    '/foo/bar/etc/passwd'
    >>> clean_posixpath("/foo/bar/../../etc/passwd", up_level_references=True)
    '/etc/passwd'
    
    >>> clean_posixpath("../../../etc/passwd") # normpath would return: '../../../etc/passwd'
    'etc/passwd'
    
    >>> clean_posixpath(r"\\foo\\bar\\..\\etc\\passwd") # normpath would return: '\\foo\\bar\\..\\etc\\password'
    '/foo/bar/etc/passwd'
    
    
    Ignore windows drive parts:
    
    >>> clean_posixpath(r"c:\\boot.ini")
    'boot.ini'
    >>> clean_posixpath(r"foo/bar/c:\\boot.ini")
    'foo/bar/boot.ini'
    """
    path = path.replace('\\', '/')

    add_slash = path.endswith("/")
    if path.startswith("/"):
        newpath = "/"
    else:
        newpath = ""
    path = path.strip("/")

    if up_level_references:
        # e.g.: foo/../bar -> bar
        path = posixpath.normpath(path)

    for part in path.split("/"):
        if not part:
            # Strip empty path components.
            continue

        drive, part = ntpath.splitdrive(part)
        head, part = posixpath.split(part)
        if part in (".", ".."):
            continue
        newpath = posixpath.join(newpath, part)

    if add_slash and newpath != "/":
        newpath += "/"

    return newpath
Example #43
0
    def rename(self, src, dst):
        """
        Rename a file/directory from src to dst.

        Raises OSError on error.
        """
        src = self.abspath(src)
        dst = self.abspath(dst)
        logging.debug("rename %r -> %r" % (src, dst))
        self._listdir_cache.flush()
        # Check not renaming to itself
        if src == dst:
            logging.debug("Renaming %r to itself - doing nothing" % src)
            return
        # If dst is an existing directory, copy src inside it
        if self.isdir(dst):
            if dst:
                dst += "/"
            dst += posixpath.basename(src)
        # Check constraints for renaming a directory
        if self.isdir(src):
            if self.listdir(src):
                raise IOSError(ENOTEMPTY,
                               "Can't rename non-empty directory: %s" % src)
            if self.isfile(dst):
                raise IOSError(ENOTDIR, "Can't rename directory to file")
        # Check not renaming to itself
        if src == dst:
            logging.debug("Renaming %r to itself - doing nothing" % src)
            return
        # Parse the paths now
        src_container_name, src_path = parse_fspath(src)
        dst_container_name, dst_path = parse_fspath(dst)
        logging.debug(
            "`.. %r/%r -> %r/%r" %
            (src_container_name, src_path, dst_container_name, dst_path))
        # Check if we are renaming containers
        if not src_path and not dst_path and src_container_name and dst_container_name:
            return self._rename_container(src_container_name,
                                          dst_container_name)
        # ...otherwise can't deal with root stuff
        if not src_container_name or not src_path or not dst_container_name or not dst_path:
            raise IOSError(EACCES, "Can't rename to / from root")
        # Check destination directory exists
        if not self.isdir(posixpath.split(dst)[0]):
            raise IOSError(
                ENOENT,
                "Can't copy %r to %r, destination directory doesn't exist" %
                (src, dst))

        # check dst container
        self._container_exists(dst_container_name)

        # Do the rename of the file/dir
        meta = self.conn.head_object(src_container_name, src_path)
        if 'x-object-manifest' in meta:
            # a manifest file
            headers = {'x-object-manifest': quote(meta['x-object-manifest'])}
        else:
            # regular file
            headers = {
                'x-copy-from': quote("/%s/%s" % (src_container_name, src_path))
            }
        self.conn.put_object(dst_container_name,
                             dst_path,
                             headers=headers,
                             contents=None)
        # Delete src
        self.conn.delete_object(src_container_name, src_path)
        self._listdir_cache.flush(posixpath.dirname(src))
        self._listdir_cache.flush(posixpath.dirname(dst))
Example #44
0
def main():
    #-- Read the system arguments listed after the program
    long_options = ['help', 'np=', 'directory=', 'year=', 'clobber', 'mode=']
    optlist, arglist = getopt.getopt(sys.argv[1:], 'hP:D:Y:CM:', long_options)

    #-- command line parameters
    local_dir = os.getcwd()
    #-- years to sync (default all)
    YEARS = '\d+'
    #-- number of processes
    PROCESSES = 1
    CLOBBER = False
    #-- permissions mode of the local directories and files (number in octal)
    MODE = 0o775
    for opt, arg in optlist:
        if opt in ('-h', '--help'):
            usage()
            sys.exit()
        elif opt in ("-D", "--directory"):
            local_dir = os.path.expanduser(arg)
        elif opt in ("-Y", "--year"):
            YEARS = '|'.join(arg.split(','))
        elif opt in ("-P", "--np"):
            PROCESSES = int(arg)
        elif opt in ("-C", "--clobber"):
            CLOBBER = True
        elif opt in ("-M", "--mode"):
            MODE = int(arg, 8)

    #-- need to input a ftp path
    if not arglist:
        raise IOError('Need to input a path to the MAR ftp server')

    #-- check internet connection
    if check_connection():
        #-- check if local directory exists and recursively create if not
        os.makedirs(local_dir, MODE) if not os.path.exists(local_dir) else None

        #-- connect and login to MAR ftp server
        #-- get list of files to download
        parsed_ftp = urlparse.urlparse(arglist[0])
        ftp = ftplib.FTP(parsed_ftp.netloc)
        ftp.login()
        # find files and reduce to years of interest if specified
        remote_files = sorted([
            f for f in ftp.nlst(parsed_ftp.path)
            if re.search(YEARS, posixpath.basename(f))
        ])
        ftp.quit()

        #-- run in parallel with multiprocessing Pool
        pool = multiprocessing.Pool(processes=PROCESSES)
        #-- download remote MAR files to local directory
        for j, remote_file in enumerate(remote_files):
            #-- extract filename
            url, fi = posixpath.split(remote_file)
            args = (
                parsed_ftp.netloc,
                remote_file,
                os.path.join(local_dir, fi),
            )
            kwds = dict(CLOBBER=CLOBBER, MODE=MODE)
            pool.apply_async(ftp_mar_data, args=args, kwds=kwds)
        #-- start multiprocessing jobs
        #-- close the pool
        #-- prevents more tasks from being submitted to the pool
        pool.close()
        #-- exit the completed processes
        pool.join()
Example #45
0
    def handle(self):
        self.update_ts = int(time.time())
        self.start_ts = int(time.time())
        self.session_detect()

        if TELNET_ISSUE:
            self.writeline(TELNET_ISSUE)

        authenticated = False
        for attempt in xrange(MAX_AUTH_ATTEMPTS):
            self.start_ts = int(time.time())
            authenticated = self.authentication_ok()
            if authenticated:
                break
        if not authenticated:
            return

        if self.DOECHO and self.WELCOME:
            self.writeline(self.WELCOME)

        self.session_start()

        while self.RUNSHELL and self.process.poll() is None:
            self.start_ts = int(time.time())
            self.PROMPT = "[%s@%s:~] $ " % (self.username, FAKE_HOSTNAME)
            line = self.input_reader(self,
                                     self.readline(prompt=self.PROMPT).strip())
            raw = line.raw
            hooks = []
            for c in raw.split(";"):
                cmds = c.split()
                try:
                    if 'busybox' in cmds[0]:
                        _cmd = cmds[1]
                        if HOOK_CMD.get(_cmd):
                            cmds[1] = HOOK_CMD.get(_cmd)
                            cmds[0] = ""
                    else:
                        _cmd = cmds[0]
                        if HOOK_CMD.get(_cmd):
                            cmds[0] = HOOK_CMD.get(_cmd)
                except:
                    pass
                hooks.append(" ".join(cmds))
            raw = ";".join(hooks)
            cmd = line.cmd
            params = line.params

            self._log("CMD", raw)

            if cmd in ("QUIT", ):
                try:
                    self.COMMANDS[cmd](params)
                    continue
                except:
                    pass

            try:
                match = re.search(r"(?i)(wget|curl).+(http[^ >;\"']+)", raw)
                if match:
                    url = match.group(2)
                    original = posixpath.split(urlparse.urlsplit(url).path)[-1]
                    filename = self._retrieve_url(url)
                    if filename:
                        destination = os.path.join(
                            SAMPLES_DIR,
                            "%s_%s" % (original, self._md5(filename)))
                        shutil.move(filename, destination)
                        self._log("SAMPLE", destination)
                        self.session_timeout("download file finished")
            except:
                pass
            if not self.alive:
                print "session dead"
                return

            try:
                if RUN_ATTACKERS_COMMANDS:
                    self.process.stdin.write(raw.strip() + "\n")
                else:
                    self.process.stdin.write("\n")
            except IOError, ex:
                return
                #raise
            finally:
Example #46
0
    def upload(self,
               hdfs_path,
               local_path,
               n_threads=1,
               temp_dir=None,
               chunk_size=2**16,
               progress=None,
               cleanup=True,
               **kwargs):
        """Upload a file or directory to HDFS.

    :param hdfs_path: Target HDFS path. If it already exists and is a
      directory, files will be uploaded inside.
    :param local_path: Local path to file or folder. If a folder, all the files
      inside of it will be uploaded (note that this implies that folders empty
      of files will not be created remotely).
    :param n_threads: Number of threads to use for parallelization. A value of
      `0` (or negative) uses as many threads as there are files.
    :param temp_dir: Directory under which the files will first be uploaded
      when `overwrite=True` and the final remote path already exists. Once the
      upload successfully completes, it will be swapped in.
    :param chunk_size: Interval in bytes by which the files will be uploaded.
    :param progress: Callback function to track progress, called every
      `chunk_size` bytes. It will be passed two arguments, the path to the
      file being uploaded and the number of bytes transferred so far. On
      completion, it will be called once with `-1` as second argument.
    :param cleanup: Delete any uploaded files if an error occurs during the
      upload.
    :param \*\*kwargs: Keyword arguments forwarded to :meth:`write`. In
      particular, set `overwrite` to overwrite any existing file or directory.

    On success, this method returns the remote upload path.

    """
        if chunk_size <= 0:
            raise ValueError('Upload chunk size must be positive.')
        _logger.info('Uploading %r to %r.', local_path, hdfs_path)

        def _upload(_path_tuple):
            """Upload a single file."""
            _local_path, _temp_path = _path_tuple
            _logger.debug('Uploading %r to %r.', _local_path, _temp_path)

            def wrap(_reader, _chunk_size, _progress):
                """Generator that can track progress."""
                nbytes = 0
                while True:
                    chunk = _reader.read(_chunk_size)
                    if chunk:
                        if _progress:
                            nbytes += len(chunk)
                            _progress(_local_path, nbytes)
                        yield chunk
                    else:
                        break
                if _progress:
                    _progress(_local_path, -1)

            with open(_local_path, 'rb') as reader:
                self.write(_temp_path, wrap(reader, chunk_size, progress),
                           **kwargs)

        # First, we gather information about remote paths.
        hdfs_path = self.resolve(hdfs_path)
        temp_path = None
        try:
            statuses = [
                status for _, status in self.list(hdfs_path, status=True)
            ]
        except HdfsError as err:
            message = str(err)
            if 'not a directory' in message:
                # Remote path is a normal file.
                if not kwargs.get('overwrite'):
                    raise HdfsError('Remote path %r already exists.',
                                    hdfs_path)
            elif 'does not exist' in message:
                # Remote path doesn't exist.
                temp_path = hdfs_path
            else:
                # An unexpected error occurred.
                raise err
        else:
            # Remote path is a directory.
            suffixes = set(status['pathSuffix'] for status in statuses)
            local_name = osp.basename(local_path)
            hdfs_path = psp.join(hdfs_path, local_name)
            if local_name in suffixes:
                if not kwargs.get('overwrite'):
                    raise HdfsError('Remote path %r already exists.',
                                    hdfs_path)
            else:
                temp_path = hdfs_path
        if not temp_path:
            # The remote path already exists, we need to generate a temporary one.
            remote_dpath, remote_name = psp.split(hdfs_path)
            temp_dir = temp_dir or remote_dpath
            temp_path = psp.join(
                temp_dir, '%s.temp-%s' % (remote_name, int(time.time())))
            _logger.debug(
                'Upload destination %r already exists. Using temporary path %r.',
                hdfs_path, temp_path)
        # Then we figure out which files we need to upload, and where.
        if osp.isdir(local_path):
            local_fpaths = [
                osp.join(dpath, fpath)
                for dpath, _, fpaths in os.walk(local_path) for fpath in fpaths
            ]
            if not local_fpaths:
                raise HdfsError('No files to upload found inside %r.',
                                local_path)
            offset = len(local_path.rstrip(os.sep)) + len(os.sep)
            fpath_tuples = [(fpath,
                             psp.join(temp_path,
                                      fpath[offset:].replace(os.sep, '/')))
                            for fpath in local_fpaths]
        elif osp.exists(local_path):
            fpath_tuples = [(local_path, temp_path)]
        else:
            raise HdfsError('Local path %r does not exist.', local_path)
        # Finally, we upload all files (optionally, in parallel).
        if n_threads <= 0:
            n_threads = len(fpath_tuples)
        else:
            n_threads = min(n_threads, len(fpath_tuples))
        _logger.debug('Uploading %s files using %s thread(s).',
                      len(fpath_tuples), n_threads)
        try:
            if n_threads == 1:
                for path_tuple in fpath_tuples:
                    _upload(path_tuple)
            else:
                _map_async(n_threads, _upload, fpath_tuples)
        except Exception as err:  # pylint: disable=broad-except
            if cleanup:
                _logger.exception('Error while uploading. Attempting cleanup.')
                try:
                    self.delete(temp_path, recursive=True)
                except Exception:
                    _logger.error('Unable to cleanup temporary folder.')
                finally:
                    raise err
            else:
                raise err
        else:
            if temp_path != hdfs_path:
                _logger.debug('Upload of %r complete. Moving from %r to %r.',
                              local_path, temp_path, hdfs_path)
                self.delete(hdfs_path, recursive=True)
                self.rename(temp_path, hdfs_path)
            else:
                _logger.debug('Upload of %r to %r complete.', local_path,
                              hdfs_path)
        return hdfs_path
Example #47
0
 def get_temp_filename(self, filename):
     dirname, basename = posixpath.split(filename)
     return posixpath.join(dirname, '.' + basename + '.tmp')
Example #48
0
def read_satellite_track_file(filename):
    print "Reading %s" % filename

    with open(filename, 'r') as file:
        lines = file.readlines()

        if not lines or len(lines) < 2:
            return None

        track = {}

        track['id'] = int(posixpath.splitext(posixpath.split(filename)[-1])[0])

        track['catalogue_id'] = int(lines[0].split()[0])
        track['name'] = " ".join(lines[0].split()[1:])

        s = lines[1].split()
        if s:
            track['country'] = s[0]
        else:
            track['country'] = ''

        if lines[2].split():
            track['launch_date'] = datetime.datetime.strptime(
                lines[2].split()[0], "%Y-%m-%d")
        else:
            track['launch_date'] = None

        track['catalogue'] = int(lines[3].split()[0])
        track['tle'] = " ".join(lines[3].split()[1:])

        tle = parse_tle(track['tle'])
        track['orbit_inclination'] = tle['inclination']
        track['orbit_period'] = tle['period']
        track['orbit_eccentricity'] = tle['eccentricity']

        s = lines[4].split()

        track['ang_vel_ra'] = float(s[0])
        track['ang_vel_dec'] = float(s[1])

        s = lines[5].split()

        track['age'] = float(s[0])
        track['transversal_shift'] = float(s[1])
        track['transversal_rms'] = float(s[2])
        track['binormal_shift'] = float(s[3])
        track['binormal_rms'] = float(s[4])

        s = lines[6].split()

        track['variability'] = int(s[0])
        track['variability_period'] = float(s[1])

        records = []
        object_ids = {}

        for line in lines[7:]:
            rec = {}
            s = line.split()

            rec['object_id'] = int(s[0])
            rec['time'] = datetime.datetime.strptime(" ".join(s[1:7]),
                                                     "%Y %m %d %H %M %S.%f")
            rec['stdmag'] = float(s[7])
            rec['phase'] = float(s[8])
            rec['distance'] = float(s[9])
            rec['penumbra'] = float(s[10])

            object_ids[int(s[0])] = 1

            records.append(rec)

        track['records'] = records
        track['object_ids'] = object_ids.keys()

        return track
Example #49
0
 def get_relationship_part_uri(part_uri):
     container, filename = posixpath.split(part_uri)
     filename_rels = '%s.rels' % filename
     return posixpath.join(container, '_rels', filename_rels)
Example #50
0
def nameonly(s):
    return posixpath.splitext(posixpath.split(s)[1])[0]
Example #51
0
def validate_append_to_uri_path_test_cases(cases):
    for input_uri, input_path, expected_output_uri in cases:
        assert append_to_uri_path(input_uri, input_path) == expected_output_uri
        assert append_to_uri_path(
            input_uri, *posixpath.split(input_path)) == expected_output_uri
Example #52
0
 def reset_new_name(self):
     self.edit_new_name.setText(posixpath.split(self.upload.target)[1])
def splitpath(path):
    return posixpath.split(path) if '/' in path else ntpath.split(path)
Example #54
0
def get_dependent_url(url_path, suffix, ext=None):
    url_directory, url_filename = posixpath.split(url_path)
    url_base, url_ext = posixpath.splitext(url_filename)
    if ext is None:
        ext = url_ext
    return posixpath.join(url_directory, url_base + u"@" + suffix + ext)
Example #55
0
    def rename_item(self, name_item):
        item_type = name_item.data(USER_ROLE_ITEM_TYPE)

        if item_type == ITEM_TYPE_FILE:
            title = 'Rename File'
            type_name = 'file'
        else:
            title = 'Rename Directory'
            type_name = 'directory'

        old_name = name_item.text()

        # get new name
        dialog = ExpandingInputDialog(get_main_window())
        dialog.setModal(True)
        dialog.setWindowTitle(title)
        dialog.setLabelText('Enter new {0} name:'.format(type_name))
        dialog.setInputMode(QInputDialog.TextInput)
        dialog.setTextValue(old_name)
        dialog.setOkButtonText('Rename')

        if dialog.exec_() != QDialog.Accepted:
            return

        new_name = dialog.textValue()

        if new_name == old_name:
            return

        # check that new name is valid
        if len(
                new_name
        ) == 0 or new_name == '.' or new_name == '..' or '/' in new_name:
            QMessageBox.critical(
                get_main_window(), title + ' Error',
                'A {0} name cannot be empty, cannot be one dot [.], cannot be two dots [..] and cannot contain a forward slash [/].'
                .format(type_name))
            return

        # check that new name is not already in use
        name_item_parent = name_item.parent()

        if name_item_parent == None:
            name_item_parent = self.tree_files_model.invisibleRootItem()

        for i in range(name_item_parent.rowCount()):
            if new_name == name_item_parent.child(i).text():
                QMessageBox.critical(
                    get_main_window(), title + ' Error',
                    'The new {0} name is already in use.'.format(type_name))
                return

        absolute_old_name = posixpath.join(self.bin_directory,
                                           get_full_item_path(name_item))
        absolute_new_name = posixpath.join(
            posixpath.split(absolute_old_name)[0], new_name)

        def cb_rename(result):
            if not report_script_result(
                    result, title + ' Error',
                    u'Could not rename {0}'.format(type_name)):
                return

            name_item.setText(new_name)

            if self.tree_files.header().sortIndicatorSection() == 0:
                self.tree_files.header().setSortIndicator(
                    0,
                    self.tree_files.header().sortIndicatorOrder())

        self.script_manager.execute_script(
            'rename', cb_rename, [absolute_old_name, absolute_new_name])
Example #56
0
def printindex(outfilename, headfilename, levels, titles, tables):
    # Read in the header file
    headbuf = ''
    if posixpath.exists(headfilename):
        try:
            fd = open(headfilename, 'r')
        except:
            print('Error reading file', headfilename)
            exit()
        headbuf = fd.read()
        headbuf = headbuf.replace('PETSC_DIR', '../../../')
        fd.close()
    else:
        print('Header file \'' + headfilename + '\' does not exist')

    # Now open the output file.
    try:
        fd = open(outfilename, 'w')
    except:
        print('Error writing to file', outfilename)
        exit()

    # Add the HTML Header info here.
    fd.write(headbuf)
    # Add some HTML separators
    fd.write('\n<P>\n')
    fd.write('<TABLE>\n')
    for i in range(len(levels)):
        level = levels[i]
        title = titles[i]

        if len(tables[i]) == 0:
            # If no functions in 'None' category, then don't print
            # this category.
            if level == 'none':
                continue
            else:
                # If no functions in any other category, then print
                # the header saying no functions in this cagetory.
                fd.write('<TR><TD WIDTH=250 COLSPAN="3">')
                fd.write('<B>' + 'No ' + level + ' routines' + '</B>')
                fd.write('</TD></TR>\n')
                continue

        fd.write('<TR><TD WIDTH=250 COLSPAN="3">')
        #fd.write('<B>' + upper(title[0])+title[1:] + '</B>')
        fd.write('<B>' + title + '</B>')
        fd.write('</TD></TR>\n')
        # Now make the entries in the table column oriented
        tables[i] = maketranspose(tables[i], 3)
        for filename in tables[i]:
            path, name = posixpath.split(filename)
            func_name, ext = posixpath.splitext(name)
            mesg          = ' <TD WIDTH=250><A HREF="'+ './' + name + '">' + \
                            func_name + '</A></TD>\n'
            fd.write(mesg)
            if tables[i].index(filename) % 3 == 2: fd.write('<TR>\n')
    fd.write('</TABLE>\n')
    # Add HTML tail info here
    fd.write(
        '<BR><A HREF="../../../docs/manualpages/index.html">Table of Contents</A>\n'
    )
    fd.close()
Example #57
0
    def get_name_pathes(self, path):
        assert self.context is not None
        provider = self.context.provider
        assert isinstance(provider, AlchemyProvider)
        self.reset()

        #prepare path
        log.debug(" |  |- ls.get_name_pathes")
        log.debug(" |  | \\")
        log.debug(" |  |  |- before prepare_path: " + path)

        self.raw_entry = self.prepare_path(path)
        log.debug(" |  |  |- after prepare_path:  " + self.raw_entry)

        #SEARCH LOGIC
        #---------------------------

        #brute assumption that user has entered a simple dir path
        try:
            self.parent_dir = provider.get_directory(self.raw_entry)
            self.parent_path = self.raw_entry
            self.pattern = ""
        except DirectoryNotFound:
            self.parent_dir = None
            log.debug(" |  |  |- directory {0} not found.".format(
                self.raw_entry))

        if not self.parent_dir:
            #we have not find the directory by brute rawentry.
            #but maybe it is just /path/plus*some*pattern
            (head, tale) = posixpath.split(self.raw_entry)
            self.parent_path = head
            self.pattern = tale
            log.debug(" |  |  |- searching parent directory as:")
            log.debug(" |  |  |- new path: " + self.parent_path)
            if self.pattern:
                log.debug(" |  |  |- pattern: " + self.pattern)

            #try to find such dir once more
            self.parent_dir = provider.get_directory(self.parent_path)

        #found a directory
        assert isinstance(self.parent_dir, Directory)
        log.debug(" |  |  |- searching sub directories ")
        log.debug(" |  |  |- full path: " + self.parent_dir.path)
        if self.pattern:
            log.debug(" |  |  |- pattern: " + self.pattern)

        #part 1 directories for this path
        if self.pattern == "":
            sub_dirs = self.parent_dir.sub_dirs
            log.debug(" |  |  |- simply taking sub directories ")
        else:
            sub_dirs = provider.search_directories(self.pattern,
                                                   self.parent_path)
            log.debug(" |  |  |- use database search for directories ")

        #fill list of directory names
        dir_list = [subdir.name for subdir in sub_dirs]

        log.debug(" |  |  |- found dirs:" + " ".join([d for d in dir_list]))

        log.debug(" |  |  |- searching tables ")
        #part 2 is tables for this path
        if self.pattern == "":
            tables = self.context.provider.get_type_tables(self.parent_dir)
        else:
            tables = self.context.provider.search_type_tables(
                self.pattern, self.parent_path)

        return sub_dirs, tables
Example #58
0
    def blind_match_file(self,
                         filename=None,
                         darkname=None,
                         obj=None,
                         outfile=None,
                         order=2):
        dir = self.tempdir()
        wcs = None
        binname = None
        ext = 0

        for path in ['.', '/usr/local', '/opt/local']:
            if os.path.isfile(
                    posixpath.join(path, 'astrometry', 'bin', 'solve-field')):
                binname = posixpath.join(path, 'astrometry', 'bin',
                                         'solve-field')
                break

        if filename and darkname and posixpath.exists(
                filename) and posixpath.exists(darkname):
            image = pyfits.getdata(filename, -1)
            header = pyfits.getheader(filename, -1)
            dark = pyfits.getdata(darkname, -1)

            image = 1.0 * image - dark

            filename = posixpath.join(dir, 'preprocessed.fits')

            pyfits.writeto(filename, image, clobber=True)

        if binname:
            extra = ""

            if not filename and obj:
                columns = [
                    pyfits.Column(name='XIMAGE', format='1D', array=obj['x']),
                    pyfits.Column(name='YIMAGE', format='1D', array=obj['y']),
                    pyfits.Column(name='FLUX', format='1D', array=obj['flux'])
                ]
                tbhdu = pyfits.BinTableHDU.from_columns(columns)
                filename = posixpath.join(dir, 'list.fits')
                tbhdu.writeto(filename, clobber=True)
                extra = "--x-column XIMAGE --y-column YIMAGE --sort-column FLUX --width %d --height %d" % (
                    np.ceil(max(obj['x'])), np.ceil(max(obj['y'])))
            elif len(pyfits.open(filename)) > 1:
                # Compressed file, let's uncompress it
                newname = posixpath.join(dir, 'uncompressed.fits')
                img = pyfits.getdata(filename, -1)
                header = pyfits.getheader(filename, -1)
                pyfits.writeto(newname, img, header, clobber=True)
                filename = newname

            #os.system("%s -D %s --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s %s >/dev/null 2>/dev/null" % (binname, dir, order, extra, filename))
            #os.system("%s -D %s --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s %s" % (binname, dir, order, extra, filename))
            wcsname = posixpath.split(filename)[-1]
            fitsname = posixpath.join(dir,
                                      posixpath.splitext(wcsname)[0] + '.new')
            tmpname = posixpath.join(dir,
                                     posixpath.splitext(wcsname)[0] + '.tmp')
            wcsname = posixpath.join(dir,
                                     posixpath.splitext(wcsname)[0] + '.wcs')

            if not os.path.isfile(fitsname):
                os.system(
                    "%s -D %s --no-verify --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s %s >/dev/null 2>/dev/null"
                    % (binname, dir, order, extra, filename))
                #os.system("%s -D %s --no-verify --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s" % (binname, dir, order, filename))

            if os.path.isfile(fitsname):
                shutil.move(fitsname, tmpname)
                os.system(
                    "%s -D %s --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s >/dev/null 2>/dev/null"
                    % (binname, dir, order, tmpname))
                #os.system("%s -D %s --overwrite --no-fits2fits --no-plots --use-sextractor --objs 300 -t %d -l 30 %s" % (binname, dir, order, tmpname))

                if os.path.isfile(wcsname):
                    header = pyfits.getheader(wcsname)
                    wcs = pywcs.WCS(header)

                    if outfile:
                        shutil.move(fitsname, outfile)
        else:
            print "Astrometry.Net binary not found"

        shutil.rmtree(dir)

        return wcs
Example #59
0
def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
  local_test_path = os.path.join(binary_dir, test)
  MAYBE_UNSUPPORTED_TESTS = (
      'crashpad_client_test',
      'crashpad_handler_test',
      'crashpad_minidump_test',
      'crashpad_snapshot_test',
  )
  if not os.path.exists(local_test_path) and test in MAYBE_UNSUPPORTED_TESTS:
    print('This test is not present and may not be supported, skipping')
    return

  def _adb(*args):
    # Flush all of this script’s own buffered stdout output before running adb,
    # which will likely produce its own output on stdout.
    sys.stdout.flush()

    adb_command = ['adb', '-s', android_device]
    adb_command.extend(args)
    subprocess.check_call(adb_command, shell=IS_WINDOWS_HOST)

  def _adb_push(sources, destination):
    args = list(sources)
    args.append(destination)
    _adb('push', *args)

  def _adb_shell(command_args, env={}):
    # Build a command to execute via “sh -c” instead of invoking it directly.
    # Here’s why:
    #
    # /system/bin/env isn’t normally present prior to Android 6.0 (M), where
    # toybox was introduced (Android platform/manifest 9a2c01e8450b). Instead,
    # set environment variables by using the shell’s internal “export” command.
    #
    # adbd prior to Android 7.0 (N), and the adb client prior to SDK
    # platform-tools version 24, don’t know how to communicate a shell command’s
    # exit status. This was added in Android platform/system/core 606835ae5c4b).
    # With older adb servers and clients, adb will “exit 0” indicating success
    # even if the command failed on the device. This makes
    # subprocess.check_call() semantics difficult to implement directly. As a
    # workaround, have the device send the command’s exit status over stdout and
    # pick it back up in this function.
    #
    # Both workarounds are implemented by giving the device a simple script,
    # which adbd will run as an “sh -c” argument.
    adb_command = ['adb', '-s', android_device, 'shell']
    script_commands = []
    for k, v in env.items():
      script_commands.append('export %s=%s' % (pipes.quote(k), pipes.quote(v)))
    script_commands.extend([
        ' '.join(pipes.quote(x) for x in command_args),
        'status=${?}',
        'echo "status=${status}"',
        'exit ${status}'])
    adb_command.append('; '.join(script_commands))
    child = subprocess.Popen(adb_command,
                             shell=IS_WINDOWS_HOST,
                             stdin=open(os.devnull),
                             stdout=subprocess.PIPE)

    FINAL_LINE_RE = re.compile('status=(\d+)$')
    final_line = None
    while True:
      # Use readline so that the test output appears “live” when running.
      data = child.stdout.readline().decode('utf-8')
      if data == '':
        break
      if final_line is not None:
        # It wasn’t really the final line.
        print(final_line, end='')
        final_line = None
      if FINAL_LINE_RE.match(data.rstrip()):
        final_line = data
      else:
        print(data, end='')

    if final_line is None:
      # Maybe there was some stderr output after the end of stdout. Old versions
      # of adb, prior to when the exit status could be communicated, smush the
      # two together.
      raise subprocess.CalledProcessError(-1, adb_command)
    status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1))
    if status != 0:
      raise subprocess.CalledProcessError(status, adb_command)

    child.wait()
    if child.returncode != 0:
      raise subprocess.CalledProcessError(subprocess.returncode, adb_command)

  # /system/bin/mktemp isn’t normally present prior to Android 6.0 (M), where
  # toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it with
  # a host-generated name. This won’t retry if the name is in use, but with 122
  # bits of randomness, it should be OK. This uses “mkdir” instead of “mkdir -p”
  # because the latter will not indicate failure if the directory already
  # exists.
  device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex)
  _adb_shell(['mkdir', device_temp_dir])

  try:
    # Specify test dependencies that must be pushed to the device. This could be
    # determined automatically in a GN build, following the example used for
    # Fuchsia. Since nothing like that exists for GYP, hard-code it for
    # supported tests.
    test_build_artifacts = [test, 'crashpad_handler']
    test_data = ['test/test_paths_test_data_root.txt']

    if test == 'crashpad_test_test':
      test_build_artifacts.append(
          'crashpad_test_test_multiprocess_exec_test_child')
    elif test == 'crashpad_util_test':
      test_data.append('util/net/testdata/')

    # Establish the directory structure on the device.
    device_out_dir = posixpath.join(device_temp_dir, 'out')
    device_mkdirs = [device_out_dir]
    for source_path in test_data:
      # A trailing slash could reasonably mean to copy an entire directory, but
      # will interfere with what’s needed from the path split. All parent
      # directories of any source_path need to be be represented in
      # device_mkdirs, but it’s important that no source_path itself wind up in
      # device_mkdirs, even if source_path names a directory, because that would
      # cause the “adb push” of the directory below to behave incorrectly.
      if source_path.endswith(posixpath.sep):
        source_path = source_path[:-1]

      device_source_path = posixpath.join(device_temp_dir, source_path)
      device_mkdir = posixpath.split(device_source_path)[0]
      if device_mkdir not in device_mkdirs:
        device_mkdirs.append(device_mkdir)
    adb_mkdir_command = ['mkdir', '-p']
    adb_mkdir_command.extend(device_mkdirs)
    _adb_shell(adb_mkdir_command)

    # Push the test binary and any other build output to the device.
    local_test_build_artifacts = []
    for artifact in test_build_artifacts:
      local_test_build_artifacts.append(os.path.join(binary_dir, artifact))
    _adb_push(local_test_build_artifacts, device_out_dir)

    # Push test data to the device.
    for source_path in test_data:
      _adb_push([os.path.join(CRASHPAD_DIR, source_path)],
                posixpath.join(device_temp_dir, source_path))

    # Run the test on the device. Pass the test data root in the environment.
    #
    # Because the test will not run with its standard output attached to a
    # pseudo-terminal device, gtest will not normally enable colored output, so
    # mimic gtest’s own logic for deciding whether to enable color by checking
    # this script’s own standard output connection. The whitelist of TERM values
    # comes from gtest googletest/src/gtest.cc
    # testing::internal::ShouldUseColor().
    env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir}
    gtest_color = os.environ.get('GTEST_COLOR')
    if gtest_color in ('auto', None):
      if (sys.stdout.isatty() and
          (os.environ.get('TERM') in
               ('xterm', 'xterm-color', 'xterm-256color', 'screen',
                'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode',
                'rxvt-unicode-256color', 'linux', 'cygwin') or
           (IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))):
        gtest_color = 'yes'
      else:
        gtest_color = 'no'
    env['GTEST_COLOR'] = gtest_color
    _adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line, env)
  finally:
    _adb_shell(['rm', '-rf', device_temp_dir])
Example #60
0
 def split(self, path):
     return posixpath.split(path)