Esempio n. 1
0
def dolink(dirpath, target, target_prefix='', excludes=None):
    for fn in sorted(os.listdir(dirpath)):
        localfn = path.join(dirpath, fn)
        if localfn in EXCLUDES:
            continue
        if excludes and localfn in excludes:
            continue

        targetfn = path.join(target, target_prefix + fn)
        localfnabs = path.abspath(localfn)
        if path.isdir(localfn):
            if localfn in MKDIR_INSTEADOF_LINK:
                mkdir(targetfn)
                dolink(localfn, targetfn)

            else:
                if path.lexists(targetfn):
                    if not (path.islink(targetfn) \
                       and os.readlink(targetfn) == localfnabs):
                        warn('exists: diff -u %s %s' % (targetfn, localfn))
                else:
                    os.symlink(localfnabs, targetfn)

        else:
            if path.lexists(targetfn):
                if not (path.islink(targetfn) \
                   and os.readlink(targetfn) == localfnabs):
                    warn('exists: diff -u %s %s' % (targetfn, localfn))
            else:
                os.symlink(localfnabs, targetfn)
 def setOpenSSLVersion(self, ret=None):
     printDBG('Check opennSSL version')
     self.setInfo(_("Detection of the OpenSSL version."), _("OpenSSL lib is needed by wget and rtmpdump utilities."))
     for ver in ['.0.9.8', '.1.0.0']:
         libsslExist = False
         libryptoExist = False
         for path in ['/usr/lib/', '/lib/', '/usr/local/lib/', '/local/lib/', '/lib/i386-linux-gnu/']:
             try:
                 filePath = path + 'libssl.so' + ver
                 if os_path.isfile(filePath) and not os_path.islink(filePath):
                     libsslExist = True
                 filePath = path + 'libcrypto.so' + ver
                 if os_path.isfile(filePath) and not os_path.islink(filePath):
                     libryptoExist = True
                 if libsslExist and libryptoExist:
                     break
             except:
                 printExc()
                 continue
         if libsslExist and libryptoExist:
             break
     if libsslExist and libryptoExist:
         self.openSSLVersion = ver
         #self.showMessage(_("Your OpenSSL version is [%s]") % self.openSSLVersion, MessageBox.TYPE_INFO, self.wgetDetect)
         self.getGstreamerVer()
     else:
         self.openSSLVersion = ""
         self.showMessage(_("Fatal Error!\nOpenssl could not be found. Please install it and retry."), MessageBox.TYPE_ERROR, boundFunction(self.finish, False) )
Esempio n. 3
0
def can_be_unrolled(source, dest):
    if not exists(dest):
        return True

    if islink(dest):
        if islink(source):
            return readlink(source) != readlink(dest)
        else:
            return realpath(dest) != abspath(source)
    elif isdir(dest) and isdir(source):
        dcmp = filecmp.dircmp(source, dest)
        afiles = [r for r in dcmp.right_only if not ignore_regex.match(r)]
        dfiles = dcmp.diff_files

        if afiles or dfiles:
            print 'dirs are different {}: new {}, chg {}'.format(source, afiles, dfiles)
            return False

        return True
    elif isfile(dest) and isfile(source):
        if filecmp.cmp(source, dest):
            return True

        print 'files are different {} {}'.format(source, dest)
        return False
    else:
        print 'unknown case {} {}'.format(source, dest)
Esempio n. 4
0
def decideClean(workDir, architecture, aggressiveCleanup):
  """ Decides what to delete, without actually doing it:
      - Find all the symlinks in "BUILD"
      - Find all the directories in "BUILD"
      - Schedule a directory for deletion if it does not have a symlink
  """
  symlinksBuild = [os.readlink(x) for x in glob.glob("%s/BUILD/*-latest*" % workDir)]
  # $WORK_DIR/TMP should always be cleaned up. This does not happen only
  # in the case we run out of space while unpacking.
  # $WORK_DIR/<architecture>/store can be cleaned up as well, because
  # we do not need the actual tarballs after they have been built.
  toDelete = ["%s/TMP" % workDir]
  if aggressiveCleanup:
    toDelete += ["%s/TARS/%s/store" % (workDir, architecture),
                 "%s/SOURCES" % (workDir)]
  allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
  toDelete += [x for x in allBuildStuff
               if not path.islink(x) and not basename(x) in symlinksBuild]
  installGlob ="%s/%s/*/" % (workDir, architecture)
  installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
  symlinksInstall = []
  for x in installedPackages:
    symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
  toDelete += [x for x in glob.glob(installGlob+ "*")
               if not path.islink(x) and not path.realpath(x) in symlinksInstall]
  toDelete = [x for x in toDelete if path.exists(x)]
  return toDelete
Esempio n. 5
0
def _collect_infos(dirname):

    """ Utility function used by ExplodedZipFile to generate ZipInfo
    entries for all of the files and directories under dirname """

    for r, _ds, fs in walk(dirname):
        if not islink(r) and r != dirname:
            i = ZipInfo()
            i.filename = join(relpath(r, dirname), "")
            i.file_size = 0
            i.compress_size = 0
            i.CRC = 0
            yield i.filename, i

        for f in fs:
            df = join(r, f)
            relfn = relpath(join(r, f), dirname)

            if islink(df):
                pass

            elif isfile(df):
                i = ZipInfo()
                i.filename = relfn
                i.file_size = getsize(df)
                i.compress_size = i.file_size
                i.CRC = file_crc32(df)
                yield i.filename, i

            else:
                # TODO: is there any more special treatment?
                pass
Esempio n. 6
0
def have_prefix_files(files):
    for f in files:
        if f.endswith(('.pyc', '.pyo', '.a')):
            continue
        path = join(prefix, f)
        if isdir(path):
            continue
        if sys.platform != 'darwin' and islink(path):
            # OSX does not allow hard-linking symbolic links, so we cannot
            # skip symbolic links (as we can on Linux)
            continue
        if is_obj(path):
            continue
        if islink(path):
            continue
        try:
            with open(path) as fi:
                data = fi.read()
        except UnicodeDecodeError:
            continue
        if prefix not in data:
            continue
        st = os.stat(path)
        data = data.replace(prefix, prefix_placeholder)
        with open(path, 'w') as fo:
            fo.write(data)
        os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w
        yield f
Esempio n. 7
0
 def test_common_and_host_file_collision(self):
     '''
     What happens if a file with the same name exists in the common dir and
     in the "hostname" dir?
     Desired behavior is that common is linked first and hostname overwrites
     it, so that common can have a config that applies to all machines, but
     it gets overwritten on a machine by machine basis.
     '''
     real_bothfile = path.join(self.indir, gethostname(), 'bothfile')
     real_bothfile_back = path.join(self.indir, 'common', 'bothfile')
     self.touch(real_bothfile)
     self.touch(real_bothfile_back)
     linker = Linker(self.indir, self.outdir)
     linker.make_links()
     bothfile=path.join(self.outdir, 'bothfile')
     bothfile_back=path.join(self.outdir, 'bothfile.back')
     try:
         self.assertTrue(path.exists(bothfile))
         self.assertTrue(path.exists(bothfile_back))
         self.assertTrue(path.islink(bothfile))
         self.assertTrue(path.islink(bothfile_back))
         self.assertEqual(real_bothfile, path.realpath(bothfile))
         self.assertEqual(real_bothfile_back, path.realpath(bothfile_back))
     finally:
         remove(real_bothfile)
         remove(real_bothfile_back)
Esempio n. 8
0
	def keyOkret(self, ret = False):
		printDEBUG(">>>")
		if ret == True:
			printDEBUG('self["config"].isChanged()')
			printDEBUG("self.myUserSkin_style.value=" + self.myUserSkin_style.value)
			printDEBUG("self.myUserSkin_bar.value=" + self.myUserSkin_bar.value)
			config.plugins.UserSkin.refreshInterval.value = config.plugins.AtileHD.refreshInterval.value
			config.plugins.UserSkin.woeid.value = config.plugins.AtileHD.woeid.value
			config.plugins.UserSkin.tempUnit.value = config.plugins.AtileHD.tempUnit.value
			for x in self["config"].list:
				x[1].save()
			configfile.save()
			#we change current folder to active skin folder
			chdir(SkinPath)
			#### FONTS
			if path.exists(SkinPath + "skin_user_header.xml") or path.islink(SkinPath + "skin_user_header.xml"):
				remove(SkinPath + "skin_user_header.xml")
			if path.exists(SkinPath + 'allFonts/' + self.myUserSkin_font.value):
				printDEBUG("self.myUserSkin_font.value='%s'" % self.myUserSkin_font.value)
				symlink(SkinPath + 'allFonts/' + self.myUserSkin_font.value, SkinPath + "skin_user_header.xml")
			#### COLORS
			if path.exists(SkinPath + "skin_user_colors.xml") or path.islink(SkinPath + "skin_user_colors.xml"):
				remove(SkinPath + "skin_user_colors.xml")
			if path.exists("allColors/" + self.myUserSkin_style.value):
				symlink(SkinPath +"allColors/" + self.myUserSkin_style.value, SkinPath + "skin_user_colors.xml")
			#### USER BARS
			if path.exists(SkinPath + 'skin_user_bar') or path.islink(SkinPath + 'skin_user_bar'):
				remove(SkinPath + 'skin_user_bar')
			if path.exists(SkinPath + "allBars/" + self.myUserSkin_bar.value):
				symlink(SkinPath + "allBars/" + self.myUserSkin_bar.value , 'skin_user_bar')
				sourcePath = path.join(SkinPath , 'skin_user_bar')
				destFolder = self.myUserSkin_bar.value.split(".", 1)[1]
				destPath = path.join(SkinPath , destFolder)
				printDEBUG("cp -fr %s %s" % (sourcePath,destPath))
				self.UserSkinToolSet.ClearMemory()
				system("cp -fr %s/* %s/" %(sourcePath,destPath)) #for safety, nicely manage overwrite ;)
			#### SCREENS
			if self.myUserSkin_active.value:
				if not path.exists("mySkin") and path.exists("UserSkin_Selections"):
					try:
						mkdir("mySkin")
					except:
						printDEBUG("symlinking myskin exception")
						self.UserSkinToolSet.ClearMemory()
						destPath = path.join(SkinPath , "mySkin")
						system("mkdir -p %s" % destPath )
			else:
				if path.exists("mySkin"):
					if path.exists("UserSkin_Selections"):
						if path.islink("mySkin"):
							remove("mySkin")
						else:
							shutil.rmtree("mySkin")
					else:
						rename("mySkin", "UserSkin_Selections")
  
			self.update_user_skin()
			self.restartGUI()
		else:
			self.close()
    def compare_trees(test_class, first, second):
        diff = dircmp(first, second)

        test_class.assertEquals([], diff.diff_files)

        # Need special code to compare links
        if len(diff.common_funny) > 0:
            for filename in diff.common_funny:
                first_file = path.join(first, filename)
                second_file = path.join(second, filename)
                if path.islink(first_file) and path.islink(second_file):
                    test_class.assertEquals(readlink(first_file),
                                      readlink(second_file))
                else:
                    test_class.fail('common_funny files was not empty!' \
                                    '\n%s != %s' % (first_file, second_file))

        test_class.assertEquals([], diff.left_only)
        test_class.assertEquals([], diff.right_only)

        files_to_compare = []
        for root, directories, files in walk(first):
            for cmp_file in files:
                files_to_compare.append(path.join(root, cmp_file))

        # Strip target file prefixes
        files_to_compare = [name[len(first)+1:] for name in files_to_compare]

        _, mismatch, error = cmpfiles(first, second, files_to_compare)

        test_class.assertEquals([], mismatch)
Esempio n. 10
0
def render_task(arg):
    """
    This is the worker task run on a sub-process,
    it needs TARG and j2env set properly (done inside render())
    """
    fn, root = arg
    src = join(root, fn)
    dst = normpath(join("..", TARG, src))
    lvl = root.count(os.sep)
    #log("processing/f: %s" % src, nl=False)
    if fn.endswith(".html"):
        # we ignore html files starting with "_" (e.g. language specific templates)
        # except the ones in doc, which might be __init__.html
        if fn.startswith("_") and fn != "__init__.html":
            return
        # assume it's a template and process it
        tmpl = j2env.get_template(src)
        c = fn.rsplit(".", 1)[0].split("-", 1)[0]
        content = tmpl.render(level=lvl, filename=fn, category=c)
        with open(dst, "wb") as output:
            output.write(content.encode("utf-8"))
            output.write(b"\n")
    elif islink(src):
        # we have a symlink, copy it
        # log("SYMLINK/files %s" % src)
        if islink(dst):
            os.remove(dst)
        os.symlink(os.readlink(src), dst)
    else:
        # all other files, hardlink them
        # log("hardlink %s -> %s" % (src, dst))
        if exists(dst):
            os.remove(dst)
        os.link(src, dst)
Esempio n. 11
0
 def keyOk(self):
     if self["config"].isChanged() or self.updateEntries == True:
         printDEBUG("[UserSkin:keyOk] self.myUserSkin_font.value=" + self.myUserSkin_font.value)
         printDEBUG("[UserSkin:keyOk] self.myUserSkin_style.value=" + self.myUserSkin_style.value)
         printDEBUG("[UserSkin:keyOk] self.myUserSkin_bar.value=" + self.myUserSkin_bar.value)
         for x in self["config"].list:
             x[1].save()
         configfile.save()
         #Zmieniamy katalog na ten wtyczki
         chdir(self.skin_base_dir)
         #FONTS
         if path.exists(self.user_font_file):
             remove(self.user_font_file)
         elif path.islink(self.user_font_file):
             remove(self.user_font_file)
         if path.exists('allFonts/' + self.myUserSkin_font.value):
             symlink('allFonts/' + self.myUserSkin_font.value, self.user_font_file)
         #COLORS
         if path.exists(self.color_file):
             remove(self.color_file)
         elif path.islink(self.color_file):
             remove(self.color_file)
         if path.exists("allColors/" + self.myUserSkin_style.value):
             symlink("allColors/" + self.myUserSkin_style.value, self.color_file)
         #SELECTOR
         if path.exists(self.user_bar_link):
             remove(self.user_bar_link)
         elif path.islink(self.user_bar_link):
             remove(self.user_bar_link)
         if path.exists("allBars/" + self.myUserSkin_bar.value):
             symlink("allBars/" + self.myUserSkin_bar.value , self.user_bar_link)
             sourcePath = path.join(self.skin_base_dir , self.user_bar_link)
             destFolder = self.myUserSkin_bar.value.split(".", 1)[1]
             destPath = path.join(self.skin_base_dir , destFolder)
             printDEBUG("[UserSkin:keyOk]cp -fr %s %s" % (sourcePath,destPath))
             with open("/proc/sys/vm/drop_caches", "w") as f: f.write("1\n")
             printDEBUG("[UserSkin:keyOk]cp -fr %s/* %s/" % (sourcePath,destPath))
             system("cp -fr %s/* %s/" %(sourcePath,destPath)) #dla bezpieczenstwa, obsluguje zgrabnie overwrite ;)
         #SCREENS
         if self.myUserSkin_active.value:
             if not path.exists("mySkin") and path.exists("UserSkin_Selections"):
                     symlink("UserSkin_Selections","mySkin")
         else:
             if path.exists("mySkin"):
                 if path.exists("UserSkin_Selections"):
                     if path.islink("mySkin"):
                         remove("mySkin")
                     else:
                         shutil.rmtree("mySkin")
                 else:
                     rename("mySkin", "UserSkin_Selections")
         self.update_user_skin()
         self.restartGUI()
     else:
         if self.changed_screens:
             self.update_user_skin()
             self.restartGUI()
         else:
             self.close()
Esempio n. 12
0
def copy(src, dst):
    if islink(src):
        linkto = readlink(src)
        if islink(dst):
            remove(dst)
        symlink(linkto, dst)
    else:
        copyfile(src, dst)
Esempio n. 13
0
 def test_Link(self):
   self.TempFileNX.Link(self.FileHash)
   self.assertFalse(islink(self.Client[self.FileHash].Path))
   self.assertTrue(islink(self.TempFileNX.Path))
   self.assertTrue(samefile(self.TempFileNX.Path, self.Client[self.FileHash].Path))
   with self.TempFileNX.GetStream() as stream1,\
       self.Client[self.FileHash].GetStream() as stream2:
     self.assertTrue(sameopenfile(stream1.fileno(), stream2.fileno()))
Esempio n. 14
0
File: fs.py Progetto: em-p/pithy
def remove_dir_contents(path):
  if _path.islink(path): raise OSError('remove_dir_contents received symlink: ' + path)
  l = _os.listdir(path)
  for n in l:
    p = _path.join(path, n)
    if _path.isdir(p) and not _path.islink(p):
      remove_dir_tree(p)
    else:
      _os.remove(p)
Esempio n. 15
0
 def test_delete_existing(self):
     want = ['commonfile1', '.commonfile3', 'common_file5', 'hostnamefile1']
     self.touch(self.outdir, 'common_file5')
     self.assertTrue(not path.islink(path.join(self.outdir, 'common_file5')))
     linker = Linker(self.indir, self.outdir, delete_existing=True)
     linker.make_links()
     have = listdir(self.outdir)
     want.sort()
     have.sort()
     self.assertEqual(want, have)
     self.assertTrue(path.islink(path.join(self.outdir, 'common_file5')))
Esempio n. 16
0
    def switch(self):
        if self.in_use:
            print("\nVersion %s already in use." % self.version)
        else:
            current = join(self.r_home, "Current")
            islink(current) and unlink(current)
            symlink(self.version, current)
            print("\nSwitched to version %s" % self.version)

        if self.long is None:
            print("\nWARNING: Current R version does not appear to be correctly installed")
Esempio n. 17
0
def link_files(files, base_path):
    for key in files:
        source = path.realpath(path.join(base_path, key))
        target = path.expanduser(files[key])
        if not (path.isfile(source) or path.isdir(source) or path.islink(source)):
            print("Source file: %s does not exist. Skipping" % source)
            continue
        if path.isfile(target) or path.isdir(target) or path.islink(target):
            print("Link target: %s already exists. Skipping" % target)
            continue
        dir_util.mkpath(path.dirname(target))
        os.symlink(source, target)
Esempio n. 18
0
  def next_show_files(self, show):
    """Receives a Show instance and
    returns a filled SchedShow instance """
    import os
    from os.path import islink,isfile
    
    
    regex = '^%s[.]*' % show.bcast_prefix
    sufix = re.compile(regex)
    def ffilter(x):
      if(sufix.search(x) != None):
        return True
      return False
    
    def do_path(x):
      if show.first < show.last:
        return self.bcast + '/' + x
      return self.pbcast + '/' + x

    files = media_file_list(self.pbcast)
    files = filter(ffilter, files)
    lst = map(do_path, files)

    if len(lst) != 1:
      link = self.pbcast + '/'
      first = SchedFile(file=File('err'), link=link)
    else:
      if( islink(lst[0]) and isfile(lst[0])):
        fname = os.readlink(lst[0])
        ft = File(fname)
        link =  lst[0]
        first = SchedFile(file=ft, link=link)
      else:
        link = self.pbcast + '/'
        first = SchedFile(file=File('err'), link=link)

    files = media_file_list(self.bcast)
    files = filter(ffilter, files)
    lst = map(lambda x:self.bcast + '/' + x, files)
    if len(lst) != 1:
      link = self.bcast + '/'
      last = SchedFile(file=File(), link=link)
    else:
      if( islink(lst[0]) and isfile(lst[0])):
        fname = os.readlink(lst[0])
        ft = File(fname)
        link = lst[0]
        last = SchedFile(file=ft, link=link)
      else:
        link = self.bcast + '/'
        first = SchedFile(file=File('err'), link=link)

    return SchedShow(show, first=first, last=last)
Esempio n. 19
0
def merge(origin, dest, options):
    '''
    for op,args in merge(origin, dest);
        op(*args)

    Attempt to merge directories `origin` and `dest`

    Parameters
    ----------
    origin : str
        path to origin
    dest : str
        path to destination
    options : options object
    '''
    filequeue = os.listdir(origin)
    while filequeue:
        fname = filequeue.pop()
        ofname = path.join(origin, fname)
        dfname = path.join(dest, fname)
        try:
            if not path.exists(dfname):
                if not options.remove_only:
                    if options.verbose:
                        print('mv {} {}'.format(ofname, dfname))
                    yield os.rename, (ofname, dfname)
                elif options.verbose:
                    print('#mv {} {}'.format(ofname, dfname))
            elif not path.islink(ofname):
                print('Ignoring link: {}'.format(ofname))
            elif path.isdir(ofname):
                    filequeue.extend(path.join(fname,ch) for ch in os.listdir(ofname))
            elif not path.isfile(ofname):
                print('Ignoring non-file non-directory: {}'.format(ofname))
            elif not options.ignore_flags and props_for(ofname) != props_for(dfname):
                print('Flags differ: {}'.format(fname))
            elif path.isdir(dfname):
                print('File `{}` matches directory `{}`'.format(ofname, dfname))
            elif not path.isfile(dfname) and not (options.follow_links and path.islink(dfname)):
                print('File `{}` matches non-file `{}`'.format(ofname, dfname))
            elif hash_file(ofname) != hash_file(dfname):
                print('Content differs: {}'.format(fname))
            else:
                if options.verbose:
                    print('rm {}'.format(ofname))
                if options.set_oldest:
                    yield set_oldest, (ofname,dfname)
                yield os.unlink, (ofname,)
        except IOError as e:
            import sys
            sys.stderr.write('Error accessing `{}`/`{}`: {}\n'.format(ofname, dfname, e))
            if not options.continue_on_error:
                return
Esempio n. 20
0
def gitmap_check():
	rawlog = [];
	for git_path in g_gitmap_fd:
		data_path = g_gitmap_fd[git_path];
		assert g_gitmap_bd[data_path] == git_path;
		assert _path.exists(git_path) and not _path.islink(git_path);
		assert _path.exists(data_path) and not _path.islink(data_path);
		assert _path.isfile(git_path) == _path.isfile(data_path);
	for git_path in g_gitmap_fd:
		git_path = g_gitmap_bd[data_path];
		assert g_gitmap_fd[git_path] == data_path;
	return;
Esempio n. 21
0
def place(src,dest,**opts):
    '''Copies the given (extant) source file to the given (non-extant) destination file, using the given options. Does not work with directories.'''
    src_stat = src.lstat()
    dest_stat = dest.parent.lstat()
    if src_stat.st_dev == dest_stat.st_dev and not islink(str(src)) and bool(opts.get('hl')):
        print('link',src,dest)
        link(str(src),str(dest),follow_symlinks=False)
    else:
        if islink(str(src)): print('copylink',src,dest)
        else: print('copy',src,dest)
        copy2(str(src),str(dest),follow_symlinks=False)
        if bool(opts.get('chown')): chown(str(dest),src_stat.st_uid,src_stat.st_gid,follow_symlinks=False)
Esempio n. 22
0
def files( root):
    c = os.getcwd()
    os.chdir( root )
    o = []
    for root, dirs, files in os.walk( '.', followlinks= not optz.nosymlink):
        for f in files:
            fp = join( root, f)
            if ignore and ignore.search( fp): continue
            #fp = fp.decode( fenc)
            if optz.symtext and islink( fp):
                item = outsymlink( fp,f)
            else:
                sz = '##'
                if not optz.nosize:
                    try: sz = getsize( fp)
                    except Exception as e: print( e)
                if optz.timestamps:
                    sz = (sz, os.stat( fp).st_mtime)
                item = DictAttr( size= sz, path= fp, name= f)
            o.append( item)
        dd = []
        for f in dirs:
            fp = join( root, f)
            if ignore and ignore.search( fp): continue
            if optz.symtext and islink( fp):
                item = outsymlink( fp,f)
                o.append( item)
            else: dd.append(f)
        dirs[:] = dd

    os.chdir( c)
    if optz.convert:
        r = {}
        for x in o:
            r.setdefault( x.name, set() ).add( x)
#
#            else:
#                print( '''
#!!!! %s repeats:
# %s
# %s
#'''.strip() % (x.name, x.path, r[ x.name ].path) )
#        assert len( r) == len(o)
        return r

    if optz.byname:
        o.sort( key= lambda x: (x.name,x.path,x.size) )
        o = [ '%(name)16s %(size)16s %(path)s' % dict( x, path= dirname( x.path))
                for x in o ]
    else:
        o.sort( key= lambda x: (x.path,x.size) )
        o = [ '%(size)16s %(path)s' % x for x in o ]
    return o
Esempio n. 23
0
def sync_todo(env, project, subfile=None):
    """Ensure that the todo file identified by ``project`` and
    ``subfile`` is setup correctly (exists in local dir, links to
    target dir).

    If not the case, try to make it so.
    """
    target_directory = env["target_directory"]

    # Determine target file
    target_file = path.join(target_directory, project)
    if subfile:
        target_file = "%s.%s" % (target_file, subfile)

    # Determine source file
    if subfile:
        source_file = path.join(os.curdir, SUBFILE_NAME % subfile)
    else:
        source_file = path.join(os.curdir, TODOFILE_NAME)

    # See what we have to do
    if path.exists(source_file) and path.exists(target_file):
        if path.realpath(source_file) == target_file:
            env["established_links"].append((source_file, target_file))
            return
        print "%s exists, but so does %s\nMaybe you want to call with " '"--delete %s" to delete the target.' % (
            target_file,
            source_file,
            project,
        )
        raise ExitCodeError(2)

    # If there is a local file, move it to the target
    if path.exists(source_file) and not path.islink(source_file):
        print "Moving %s to %s" % (source_file, target_file)
        os.rename(source_file, target_file)
    elif not path.exists(target_file):
        print "Creating new empty file %s" % (target_file,)
        with open(target_file, "w"):
            pass
    else:
        print "Found existing file %s" % (target_file,)

    # Create the link
    #
    # If the current file is already a link, simply replace it.
    if path.islink(source_file):
        os.unlink(source_file)
        print "Replacing existing link %s with new target" % source_file
    # To use the relative path: path.relpath(target_file, path.dirname(source_file))
    os.symlink(path.abspath(target_file), source_file)
    env["established_links"].append((source_file, target_file))
Esempio n. 24
0
def find_absences(src, dest, ignored_patterns=[]):
    """ Walk the source directory and return a lists of files and dirs absent
        from the destination directory

    Args:
        source: The path to copy from (Default is the script's location)
        destination The path to copy to (Defaults to home directory)

    Returns:
        absent_files: a list of Links
        absent_dirs: a list of paths to directories
    """
    absent_dirs = []
    absent_files = []

    default_ignore = ["!.*", cfgcaddy.DEFAULT_CONFIG_NAME, ".git"]

    ignored = pathspec.PathSpec.from_lines('gitwildmatch',
                                           ignored_patterns + default_ignore)

    for root, dirs, files in os.walk(src, topdown=True):
        rel_path = path.relpath(root, src)
        if rel_path == ".":
            rel_path = ""

        # Remove ignored directories from the walk
        dirs[:] = [
            dir_name for dir_name in dirs
            if not ignored.match_file(path.join(root, dir_name))
        ]
        files[:] = [
            f for f in files if not ignored.match_file(path.join(root, f))
        ]

        # Create list of dirs that dont exist
        for dir_name in dirs:
            pathname = path.join(dest, rel_path, dir_name)
            if not path.exists(pathname):
                if path.islink(pathname):
                    os.unlink(pathname)  # Fix Broken Links
                absent_dirs.append(pathname)

        # Create a list of files to be symlinked
        for f in files:
            pathname = path.join(dest, rel_path, f)
            if not path.exists(pathname):
                if path.islink(pathname):
                    os.unlink(pathname)  # Fix Broken Links
                # Add the source and destination for the symlink
                absent_files.append(Link(path.join(root, f), pathname))

    return absent_files, absent_dirs
Esempio n. 25
0
    def test_link_local_settings_replaces_old_local_settings(self):
        self.create_settings_py()
        local_settings_path = path.join(tasklib.env['django_settings_dir'], 'local_settings.py')
        self.create_local_settings_py_dev(local_settings_path)
        open(local_settings_path, 'a').close()
        self.assertFalse(path.islink(local_settings_path))

        tasklib.link_local_settings('dev')

        self.assertTrue(path.islink(local_settings_path))
        # assert the link goes to the correct file
        linkto = os.readlink(local_settings_path)
        self.assertEqual(linkto, 'local_settings.py.dev')
Esempio n. 26
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)
    dists = discard_conda(install.linked(prefix1))

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    if index is None:
        index = get_index()

    r = Resolve(index)
    sorted_dists = r.dependency_sort(dists)

    actions = ensure_linked_actions(sorted_dists, prefix2)
    execute_actions(actions, index=index, verbose=not quiet)

    return actions, untracked_files
Esempio n. 27
0
def find_symlinks(directory):
    paths = []
    for (fpath, directories, filenames) in walk(directory, followlinks=True):
        for dir in directories:
            dir_path = path.join(fpath, dir)
            if path.islink(dir_path):
                paths.append(dir_path)
            else:
                paths += find_symlinks(dir_path)
        for filename in filenames:
            f_path = path.join(fpath, filename)
            if path.islink(f_path):
                paths.append(f_path)
    return [x.replace('/', '.') for x in paths]
Esempio n. 28
0
def need_to_backup(src_dir, dest_dir):
    src_dir = abspath(src_dir)
    dest_dir = abspath(dest_dir)
    if not exists(dest_dir):
        return False
    for path, dirs, files in os.walk(src_dir):
        for f in [f for f in files if not ignorable(f)]:
            src = abspath(join(path, f))
            path_suffix = src[len(src_dir) + 1 :]
            dest = join(dest_dir, path_suffix)
            if exists(dest) and (not islink(dest) or (islink(dest) and
                                src != os.readlink(dest))):
                return True
    return False
Esempio n. 29
0
def download_data(pkg_name, path, url, md5, download_client=None,
                  extract=False, quiet=True):
    """Install test data checking md5 and rosbag decompress if needed."""
    if download_client is None:
        if is_google_drive_url(url):
            download_client = 'gdown'
        else:
            download_client = 'wget'
    # prepare cache dir
    cache_dir = osp.join(osp.expanduser('~/data'), pkg_name)
    if not osp.exists(cache_dir):
        os.makedirs(cache_dir)
    cache_file = osp.join(cache_dir, osp.basename(path))
    # check if cache exists, and update if necessary
    print("Checking md5 of '{path}'...".format(path=cache_file))
    # check real path
    if osp.exists(path):
        if check_md5(path, md5):
            print("File '{0}' is newest.".format(path))
            if extract:
                print("Extracting '{path}'...".format(path=path))
                extract_file(path, to_directory=osp.dirname(path))
            return
        else:
            if not osp.islink(path):
                # not link and exists so skipping
                sys.stderr.write("WARNING: '{0}' exists\n".format(path))
                return
            os.remove(path)
    else:
        if osp.islink(path):
            os.remove(path)
    # check cache path
    if osp.exists(cache_file):
        if check_md5(cache_file, md5):
            print("Cache file '{0}' is newest.".format(cache_file))
            os.symlink(cache_file, path)
            if extract:
                print("Extracting '{path}'...".format(path=path))
                extract_file(path, to_directory=osp.dirname(path))
            return
        else:
            os.remove(cache_file)
    print("Downloading file from '{url}'...".format(url=url))
    download(download_client, url, cache_file, quiet=quiet)
    os.symlink(cache_file, path)
    if extract:
        print("Extracting '{path}'...".format(path=path))
        extract_file(path, to_directory=osp.dirname(path))
Esempio n. 30
0
 def test_move_first(self):
     test_file = '/tmp/test_file'
     self.touch(self.outdir, test_file)
     self.linker = Linker(self.indir, test_file)
     test_link = path.join(self.indir, gethostname(),
                           self.linker.generate_target(test_file))
     try:
         self.linker.move_to_target()
         self.assertTrue(path.exists(test_file))
         self.assertTrue(path.islink(test_file))
         self.assertTrue(path.exists(test_link))
         self.assertFalse(path.islink(test_link))
         self.assertEqual(path.realpath(test_file), test_link)
     finally:
         remove(test_file)
Esempio n. 31
0
 def performCustomAction(self, privateData):
     retCode = RetHost.ERROR
     retlist = []
     if privateData['action'] == 'remove_file':
         try:
             ret = self.host.sessionEx.waitForFinishOpen(MessageBox, text=_('Are you sure you want to remove file "%s"?') % privateData['file_path'], type=MessageBox.TYPE_YESNO, default=False)
             if ret[0]:
                 os_remove(privateData['file_path'])
                 retlist = ['refresh']
                 retCode = RetHost.OK
         except Exception:
             printExc()
     if privateData['action'] == 'rename_file':
         try:
             path, fileName = os_path.split(privateData['file_path'])
             name, ext = os_path.splitext(fileName)
             ret = self.host.sessionEx.waitForFinishOpen(GetVirtualKeyboard(), title=_('Set file name'), text=name)
             printDBG('rename_file new name[%s]' % ret)
             if isinstance(ret[0], basestring):
                 newPath = os_path.join(path, ret[0] + ext)
                 printDBG('rename_file new path[%s]' % newPath)
                 if not os_path.isfile(newPath) and not os_path.islink(newPath):
                     os_rename(privateData['file_path'], newPath)
                     retlist = ['refresh']
                     retCode = RetHost.OK
                 else:
                     retlist = [_('File "%s" already exists!') % newPath]
         except Exception:
             printExc()
     elif privateData['action'] == 'cut_file':
         self.cFilePath = privateData['file_path']
         self.cType = 'cut'
         retCode = RetHost.OK
     elif privateData['action'] == 'copy_file':
         self.cFilePath = privateData['file_path']
         self.cType = 'copy'
         retCode = RetHost.OK
     elif privateData['action'] == 'paste_file':
         try:
             ok = True
             cutPath, cutFileName = os_path.split(self.cFilePath)
             newPath = os_path.join(privateData['path'], cutFileName)
             if os_path.isfile(newPath):
                 retlist = [_('File "%s" already exists') % newPath]
                 ok = False
             else:
                 ret = {'sts':True, 'code':0, 'data':''}
                 if self.cType == 'cut':
                     try:
                         os_rename(self.cFilePath, newPath)
                         self.needRefresh = cutPath
                     except Exception:
                         printExc()
                         cmd = 'mv -f "%s" "%s"' % (self.cFilePath, newPath)
                         ret = iptv_execute_wrapper(cmd)
                 elif self.cType == 'copy':
                     cmd = 'cp "%s" "%s"' % (self.cFilePath, newPath)
                     ret = iptv_execute_wrapper(cmd)
             
             if ret['sts'] and 0 != ret['code']:
                 retlist = [(_('Moving file from "%s" to "%s" failed.\n') % (self.cFilePath, newPath)) + (_('Error code: %s\n') % ret['code']) + (_('Error message: %s\n') % ret['data'])]
                 ok = False
             
             if ok:
                 self.cType =  ''
                 self.cFilePath =  ''
                 retlist = ['refresh']
                 retCode = RetHost.OK
         except Exception:
             printExc()
     elif privateData['action'] == 'umount_iso_file':
         cmd = 'umount "{0}"'.format(privateData['iso_mount_path']) + ' 2>&1'
         ret = iptv_execute_wrapper(cmd)
         if ret['sts'] and 0 != ret['code']:
             # normal umount failed, so detach filesystem only
             cmd = 'umount -l "{0}"'.format(privateData['iso_mount_path']) + ' 2>&1'
             ret = iptv_execute_wrapper(cmd)
     
     return RetHost(retCode, value=retlist)
Esempio n. 32
0
 def is_library(clazz, filename):
     'Return True if filename is any kind of static or shared library.'
     if not path.isfile(filename) or path.islink(filename):
         return False
     return clazz.is_static_library(filename) or clazz.is_shared_library(
         filename)
Esempio n. 33
0
    def changeDir(self, directory, select=None):
        self.list = []
        if self.current_directory is None:
            if directory and self.showMountpoints:
                self.current_mountpoint = self.getMountpointLink(directory)
            else:
                self.current_mountpoint = None
        self.current_directory = directory
        directories = []
        files = []
        if directory is None and self.showMountpoints:
            for p in harddiskmanager.getMountedPartitions():
                path = os_path.join(p.mountpoint, '')
                if path not in self.inhibitMounts and not self.inParentDirs(
                        path, self.inhibitDirs):
                    self.list.append(
                        MultiFileSelectEntryComponent(name=p.description,
                                                      absolute=path,
                                                      isDir=True))

            files = []
            directories = []
        elif directory is None:
            files = []
            directories = []
        elif self.useServiceRef:
            root = eServiceReference('2:0:1:0:0:0:0:0:0:0:' + directory)
            if self.additional_extensions:
                root.setName(self.additional_extensions)
            serviceHandler = eServiceCenter.getInstance()
            list = serviceHandler.list(root)
            while 1:
                s = list.getNext()
                if not s.valid():
                    del list
                    break
                if s.flags & s.mustDescent:
                    directories.append(s.getPath())
                else:
                    files.append(s)

            directories.sort()
            files.sort()
        elif fileExists(directory):
            try:
                files = listdir(directory)
            except:
                files = []

            files.sort()
            tmpfiles = files[:]
            for x in tmpfiles:
                if os_path.isdir(directory + x):
                    directories.append(directory + x + '/')
                    files.remove(x)

        if directory is not None and self.showDirectories and not self.isTop:
            if directory == self.current_mountpoint and self.showMountpoints:
                self.list.append(
                    MultiFileSelectEntryComponent(
                        name='<' + _('List of Storage Devices') + '>',
                        absolute=None,
                        isDir=True))
            elif directory != '/' and not (self.inhibitMounts
                                           and self.getMountpoint(directory)
                                           in self.inhibitMounts):
                self.list.append(
                    MultiFileSelectEntryComponent(
                        name='<' + _('Parent Directory') + '>',
                        absolute='/'.join(directory.split('/')[:-2]) + '/',
                        isDir=True))
        if self.showDirectories:
            for x in directories:
                if not (self.inhibitMounts and self.getMountpoint(x)
                        in self.inhibitMounts) and not self.inParentDirs(
                            x, self.inhibitDirs):
                    name = x.split('/')[-2]
                    alreadySelected = False
                    testname = x[:-1]
                    if os_path.islink(testname):
                        my_isLink = True
                    else:
                        my_isLink = False
                    for entry in self.selectedFiles:
                        if entry == x:
                            alreadySelected = True

                    if alreadySelected:
                        self.list.append(
                            MultiFileSelectEntryComponent(name=name,
                                                          absolute=x,
                                                          isDir=True,
                                                          isLink=my_isLink,
                                                          selected=True))
                    else:
                        self.list.append(
                            MultiFileSelectEntryComponent(name=name,
                                                          absolute=x,
                                                          isDir=True,
                                                          isLink=my_isLink,
                                                          selected=False))

        if self.showFiles:
            for x in files:
                if self.useServiceRef:
                    path = x.getPath()
                    name = path.split('/')[-1]
                else:
                    path = directory + x
                    name = x
                if self.matchingPattern is None or re_compile(
                        self.matchingPattern).search(path):
                    alreadySelected = False
                    for entry in self.selectedFiles:
                        if os_path.basename(entry) == x:
                            alreadySelected = True

                    if alreadySelected:
                        self.list.append(
                            MultiFileSelectEntryComponent(name=name,
                                                          absolute=x,
                                                          isDir=False,
                                                          selected=True))
                    else:
                        self.list.append(
                            MultiFileSelectEntryComponent(name=name,
                                                          absolute=x,
                                                          isDir=False,
                                                          selected=False))

        self.l.setList(self.list)
        if select is not None:
            i = 0
            self.moveToIndex(0)
            for x in self.list:
                p = x[0][0]
                if isinstance(p, eServiceReference):
                    p = p.getPath()
                if p == select:
                    self.moveToIndex(i)
                i += 1
Esempio n. 34
0
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
    """
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    """
    index = index or {}
    source_dir = is_extracted(dist)
    assert source_dir is not None
    pkgs_dir = dirname(source_dir)
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                sys.exit('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                         (src, dst, lt, e))

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        # make sure that the child environment behaves like the parent,
        #    wrt user/system install on win
        # This is critical for doing shortcuts correctly
        if on_win:
            nonadmin = join(sys.prefix, ".nonadmin")
            if isfile(nonadmin):
                open(join(prefix, ".nonadmin"), 'w').close()

        if shortcuts:
            mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(dist)
        try:
            alt_files_path = join(prefix, 'conda-meta',
                                  dist2filename(dist, '.files'))
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Esempio n. 35
0
def israwdir(path):
    """ Check if the path is directly a directory (i.e. don't follow symlinks) """
    return (not islink(path)) and isdir(path)
Esempio n. 36
0
    def calculate(self,
                  atoms=None,
                  properties=['energy'],
                  system_changes=all_changes):
        """Capture the RuntimeError from FileIOCalculator.calculate
        and add a little debug information from the deMon output.

        See base FileIocalculator for documentation.
        """

        if atoms is not None:
            self.atoms = atoms.copy()

        self.write_input(self.atoms, properties, system_changes)
        if self.command is None:
            raise RuntimeError('Please set $%s environment variable ' %
                               ('DEMON_COMMAND') +
                               'or supply the command keyword')
        command = self.command  # .replace('PREFIX', self.prefix)

        # basis path
        basis_path = self.parameters['basis_path']
        if basis_path is None:
            basis_path = os.environ.get('DEMON_BASIS_PATH')

        if basis_path is None:
            raise RuntimeError('Please set basis_path keyword,' +
                               ' or the DEMON_BASIS_PATH' +
                               ' environment variable')

        # link restart file
        value = self.parameters['guess']
        if value.upper() == 'RESTART':
            value2 = self.parameters['deMon_restart_path']

            if op.exists(self.directory + '/deMon.rst')\
                    or op.islink(self.directory + '/deMon.rst'):
                os.remove(self.directory + '/deMon.rst')
            abspath = op.abspath(value2)

            if op.exists(abspath + '/deMon.mem') \
                    or op.islink(abspath + '/deMon.mem'):

                shutil.copy(abspath + '/deMon.mem',
                            self.directory + '/deMon.rst')
            else:
                raise RuntimeError("{0} doesn't exist".format(abspath +
                                                              '/deMon.rst'))

        abspath = op.abspath(basis_path)

        for name in ['BASIS', 'AUXIS', 'ECPS', 'MCPS', 'FFDS']:
            self.link_file(abspath, self.directory, name)

        subprocess.check_call(command, shell=True, cwd=self.directory)

        try:
            self.read_results()
        except Exception:  # XXX Which kind of exception?
            with open(self.directory + '/deMon.out', 'r') as f:
                lines = f.readlines()
            debug_lines = 10
            print('##### %d last lines of the deMon.out' % debug_lines)
            for line in lines[-20:]:
                print(line.strip())
            print('##### end of deMon.out')
            raise RuntimeError
Esempio n. 37
0
def _walk(top, topdown=True, onerror=None, followlinks=False):
    """Like Python 3.5's implementation of os.walk() -- faster than
    the pre-Python 3.5 version as it uses scandir() internally.
    """
    dirs = []
    nondirs = []

    # We may not have read permission for top, in which case we can't
    # get a list of the files the directory contains.  os.walk
    # always suppressed the exception then, rather than blow up for a
    # minor reason when (say) a thousand readable directories are still
    # left to visit.  That logic is copied here.
    try:
        scandir_it = scandir(top)
    except OSError as error:
        if onerror is not None:
            onerror(error)
        return

    while True:
        try:
            try:
                entry = next(scandir_it)
            except StopIteration:
                break
        except OSError as error:
            if onerror is not None:
                onerror(error)
            return

        try:
            is_dir = entry.is_dir()
        except OSError:
            # If is_dir() raises an OSError, consider that the entry is not
            # a directory, same behaviour than os.path.isdir().
            is_dir = False

        if is_dir:
            dirs.append(entry.name)
        else:
            nondirs.append(entry.name)

        if not topdown and is_dir:
            # Bottom-up: recurse into sub-directory, but exclude symlinks to
            # directories if followlinks is False
            if followlinks:
                walk_into = True
            else:
                try:
                    is_symlink = entry.is_symlink()
                except OSError:
                    # If is_symlink() raises an OSError, consider that the
                    # entry is not a symbolic link, same behaviour than
                    # os.path.islink().
                    is_symlink = False
                walk_into = not is_symlink

            if walk_into:
                for entry in walk(entry.path, topdown, onerror, followlinks):
                    yield entry

    # Yield before recursion if going top down
    if topdown:
        yield top, dirs, nondirs

        # Recurse into sub-directories
        for name in dirs:
            new_path = join(top, name)
            # Issue #23605: os.path.islink() is used instead of caching
            # entry.is_symlink() result during the loop on os.scandir() because
            # the caller can replace the directory entry during the "yield"
            # above.
            if followlinks or not islink(new_path):
                for entry in walk(new_path, topdown, onerror, followlinks):
                    yield entry
    else:
        # Yield after recursion if going bottom up
        yield top, dirs, nondirs
Esempio n. 38
0
import struct
import glob
import sys
import os, os.path as path
from conf import conf
import shutil
import gui
import tempfile
import utils
import re

from osbackend import OSBackend

conf.MOBILE = not conf.USESERVICE
conf.VBOX_INSTALLED = path.exists("/Applications/VirtualBox.app") and \
                      not path.islink("/Applications/VirtualBox.app")

logging.debug("Using Mobile mode : " + str(conf.MOBILE.__nonzero__()))
logging.debug("Is VirtualBox installed : " + str(conf.VBOX_INSTALLED))


class MacBackend(OSBackend):

    VBOXMANAGE_EXECUTABLE = "VBoxManage"
    VIRTUALBOX_EXECUTABLE = "VirtualBox"
    RELATIVE_VMDK_POLICY = True
    KEXTS = "kexts"

    def __init__(self):
        OSBackend.__init__(self, "macosx")
        self.OS_VERSION = os.uname()[2]
Esempio n. 39
0
def copyfile(
    originalfile,
    newfile,
    copy=False,
    create_new=False,
    use_hardlink=True,
    copy_related_files=True,
):
    """
    Copy or link files.

    If ``use_hardlink`` is True, and the file can be hard-linked, then a
    link is created, instead of copying the file.

    If a hard link is not created and ``copy`` is False, then a symbolic
    link is created.

    .. admonition:: Copy options for existing files

        * symlink

            * to regular file originalfile            (keep if symlinking)
            * to same dest as symlink originalfile    (keep if symlinking)
            * to other file                           (unlink)

        * regular file

            * hard link to originalfile               (keep)
            * copy of file (same hash)                (keep)
            * different file (diff hash)              (unlink)

    .. admonition:: Copy options for new files

        * ``use_hardlink`` & ``can_hardlink`` => hardlink
        * ``~hardlink`` & ``~copy`` & ``can_symlink`` => symlink
        * ``~hardlink`` & ``~symlink`` => copy

    Parameters
    ----------
    originalfile : :obj:`str`
        full path to original file
    newfile : :obj:`str`
        full path to new file
    copy : Bool
        specifies whether to copy or symlink files
        (default=False) but only for POSIX systems
    use_hardlink : Bool
        specifies whether to hard-link files, when able
        (Default=False), taking precedence over copy
    copy_related_files : Bool
        specifies whether to also operate on related files, as defined in
        ``related_filetype_sets``

    Returns
    -------
    None

    """
    newhash = None
    orighash = None
    logger.debug(newfile)

    if create_new:
        while op.exists(newfile):
            base, fname, ext = split_filename(newfile)
            s = re.search("_c[0-9]{4,4}$", fname)
            i = 0
            if s:
                i = int(s.group()[2:]) + 1
                fname = fname[:-6] + "_c%04d" % i
            else:
                fname += "_c%04d" % i
            newfile = base + os.sep + fname + ext

    # Don't try creating symlinks on CIFS
    if copy is False and on_cifs(newfile):
        copy = True

    keep = False
    if op.lexists(newfile):
        if op.islink(newfile):
            if all((
                    os.readlink(newfile) == op.realpath(originalfile),
                    not use_hardlink,
                    not copy,
            )):
                keep = True
        elif posixpath.samefile(newfile, originalfile):
            keep = True
        else:
            newhash = hash_file(newfile)
            logger.debug("File: %s already exists,%s, copy:%d", newfile,
                         newhash, copy)
            orighash = hash_file(originalfile)
            keep = newhash == orighash
        if keep:
            logger.debug("File: %s already exists, not overwriting, copy:%d",
                         newfile, copy)
        else:
            os.unlink(newfile)

    if not keep and use_hardlink:
        try:
            logger.debug("Linking File: %s->%s", newfile, originalfile)
            # Use realpath to avoid hardlinking symlinks
            os.link(op.realpath(originalfile), newfile)
        except OSError:
            use_hardlink = False  # Disable hardlink for associated files
        else:
            keep = True

    if not keep and not copy and os.name == "posix":
        try:
            logger.debug("Symlinking File: %s->%s", newfile, originalfile)
            os.symlink(originalfile, newfile)
        except OSError:
            copy = True  # Disable symlink for associated files
        else:
            keep = True

    if not keep:
        try:
            logger.debug("Copying File: %s->%s", newfile, originalfile)
            shutil.copyfile(originalfile, newfile)
        except shutil.Error as e:
            logger.warning(e.message)

    # Associated files
    if copy_related_files:
        related_file_pairs = (get_related_files(f, include_this_file=False)
                              for f in (originalfile, newfile))
        for alt_ofile, alt_nfile in zip(*related_file_pairs):
            if op.exists(alt_ofile):
                copyfile(
                    alt_ofile,
                    alt_nfile,
                    copy,
                    use_hardlink=use_hardlink,
                    copy_related_files=False,
                )

    return newfile
Esempio n. 40
0
def fixVisitor(data, dirnm, filesindir):
    for f in filesindir:
        filename = join(dirnm, f)
        if islink(filename):
            fixLink(data, filename)
Esempio n. 41
0
 if mp:
     # Setup the multiprocess pool and queue
     m_queue = Queue(maxsize=mp_threads)
     iolock = Lock()
     pool = Pool(mp_threads, initializer=mp_process, initargs=(m_queue, iolock))
 else:
     m_queue = None
     pool = None
     iolock = noop()
 # Find all valid links and corresponding files
 for root, dirs, files in walk(s_ns, topdown=False):
     for filename in files:
         # Create file name
         filepath = path.join(root, filename)
         # Check if it is link
         if path.islink(filepath):
             # Get link target
             target = readlink(filepath)
             # Check if link address is absolute
             if not path.isabs(target):
                 # Create absolute link address
                 target = path.abspath(path.join(path.dirname(filepath), target))
             # Select only links belonging to src
             if match(escape(src), target):
                 if not path.exists(target):
                     # Delete all matching dead links
                     remove(filepath)
                 else:
                     # Migrate all data
                     with iolock:
                         print('Start migrating file %s: %s' % (str(dtransfers).rjust(10), filepath))
Esempio n. 42
0
 def lchmod(path, mode):
     # On systems that don't allow permissions on symbolic links, skip
     # links entirely.
     if not islink(path):
         chmod(path, mode)
Esempio n. 43
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda, conda-env and any package that depends on them
    drecs = linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            if name == "conda-env":
                filter["conda-env"] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True

    if filter:
        if not quiet:
            print(
                'The following packages cannot be cloned out of the root environment:'
            )
            for pkg in itervalues(filter):
                print(' - ' + pkg.dist_name)
            drecs = {
                dist: info
                for dist, info in iteritems(drecs)
                if info['name'] not in filter
            }

    # Resolve URLs for packages that do not have URLs
    r = None
    index = {}
    unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
    notfound = []
    if unknowns:
        index_args = index_args or {}
        index = get_index(**index_args)
        r = Resolve(index, sort=True)
        for dist in unknowns:
            name = dist.dist_name
            fn = dist.to_filename()
            fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
            if fkeys:
                del drecs[dist]
                dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
                drecs[Dist(dist_str)] = r.index[dist_str]
            else:
                notfound.append(fn)
    if notfound:
        what = "Package%s " % ('' if len(notfound) == 1 else 's')
        notfound = '\n'.join(' - ' + fn for fn in notfound)
        msg = '%s missing in current %s channels:%s' % (what, context.subdir,
                                                        notfound)
        raise CondaRuntimeError(msg)

    # Assemble the URL and channel list
    urls = {}
    for dist, info in iteritems(drecs):
        fkey = dist
        if fkey not in index:
            index[fkey] = Record.from_objects(info, not_fetched=True)
            r = None
        urls[dist] = info['url']

    if r is None:
        r = Resolve(index)
    dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            rm_rf(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls,
                       prefix2,
                       verbose=not quiet,
                       index=index,
                       force_extract=False,
                       index_args=index_args)
    return actions, untracked_files
Esempio n. 44
0
def updatefromnew(old, new, target, rel, entry, linkdir, copy, skip,
                  forcecopy):
    """ Update the target entry from the new entry
      
      If there where any issues this returns True otherwise it returns False.
      """
    relentry = joinpath(rel, entry)
    newentry = joinpath(new, entry)
    targetentry = joinpath(target, entry)
    oldentry = joinpath(old, entry)
    matchentry = relentry
    if israwdir(newentry):
        matchentry += "/"
    if matchespatterns(matchentry, skip):
        return False
    # if dir and ( forcecopy or not linkdir ) then recurse possibly with forcecopy set
    # if not dir and forcecopy or copy then copy file
    # if ( not dir and not forcecopy and not copy ) or ( dir and linkdir and not forcecopy) then symlink file/dir
    if isdir(newentry) and (forcecopy
                            or not matchespatterns(matchentry, linkdir)):
        # update recursively
        if forcecopy or matchespatterns(matchentry, copy):
            return doupdate(oldentry, newentry, targetentry, relentry, linkdir,
                            copy, skip, True)
        else:
            return doupdate(oldentry, newentry, targetentry, relentry, linkdir,
                            copy, skip, False)
    elif (not isdir(newentry)) and (forcecopy
                                    or matchespatterns(matchentry, copy)):
        # not dir and forcecopy or copy so copy files
        if exists(targetentry):
            if exists(oldentry) and checkfiles(targetentry, oldentry):
                # target, new and old and target=old so replace
                os.remove(targetentry)
                copyfile(newentry, targetentry)
            elif not exists(oldentry):
                # target, new and not old
                ensureabsent(targetentry + NEWSUFFIX)
                copyfile(newentry, targetentry + NEWSUFFIX)
                print(
                    "WARNING: File present in new version and local but not in old.\nLeaving your version ({0}) but putting new file in directory with it ({1}).\n"
                    .format(targetentry, targetentry + NEWSUFFIX))
                return True
            elif not checkfiles(newentry, oldentry):
                # target, new and old and old!=target and old!=new (target may or may not equal new)
                ensureabsent(targetentry + NEWSUFFIX)
                copyfile(newentry, targetentry + NEWSUFFIX)
                if not exists(
                        targetentry + OLDSUFFIX
                ):  # if already an old file assume current is still based of that version not of the more recent old version
                    copyfile(oldentry, targetentry + OLDSUFFIX)
                print(
                    "WARNING: File has update but local changes present.\nLeaving your version ({0}) but putting new and old files in directory with it\n({1} and {2})"
                    .format(targetentry, targetentry + NEWSUFFIX,
                            targetentry + OLDSUFFIX))
                return True
            # else: pass # target, new and old but new=old so leave
        else:
            # not in target so just copy
            copyfile(newentry, targetentry)
    else:
        # symlink it
        if exists(targetentry):
            if islink(targetentry):
                os.remove(targetentry)
            else:
                ensureabsent(targetentry + LOCALSUFFIX)
                os.rename(targetentry, targetentry + LOCALSUFFIX)
                os.symlink(newentry, targetentry)
                print(
                    "WARNING: File exists locally but linking new version in.\nLocal version saved as {0}"
                    .format(targetentry + LOCALSUFFIX))
                return True
        else:
            os.symlink(newentry, targetentry)
    return False
Esempio n. 45
0
 def islink(self):
     return islink(self.strpath)
Esempio n. 46
0
def is_macho(path):
    if path.endswith(NO_EXT) or islink(path) or not isfile(path):
        return False
    with open(path, 'rb') as fi:
        head = fi.read(4)
    return bool(head in MAGIC)
Esempio n. 47
0
    GCGGAUUUAgCUCAGuuGGGAGAGCgCCAGAcUgAAgAPcUGGAGgUCcUGUGtPCGaUCCACAGAAUUCGCACCA
    (((((((..((((.....[..)))).((((.........)))).....(((((..]....))))))))))))....
    test_data/rp2_bujnicki_1_rpr.pdb
    >rp2_bujnicki_1_rpr nts=100 [rp2_bujnicki_1_rpr] -- secondary structure derived by DSSR
    CCGGAGGAACUACUG&CCGGCAGCCU&CCGGAGGAACUACUG&CCGGCAGCCU&CCGGAGGAACUACUG&CCGGCAGCCU&CCGGAGGAACUACUG&CCGGCAGCCU
    [[[[(((.....(((&{{{{))))))&(((((((.....(.(&]]]]).))))&[[[[[[......[[[&))))]]].]]&}}}}(((.....(((&]]]]))))))

"""
import re
import argparse

from subprocess import Popen, PIPE
from os import remove, path, readlink

PATH = path.abspath(__file__)
if path.islink(PATH):
    PATH = path.dirname(readlink(PATH))
else:
    PATH = path.dirname(path.abspath(__file__))

from rna_tools.tools.rna_x3dna.rna_x3dna_config import BINARY_PATH


class x3DNAMissingFile(Exception):
    pass


def get_parser():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
Esempio n. 48
0
    def __call__(archive,
                 annex=None,
                 add_archive_leading_dir=False,
                 strip_leading_dirs=False,
                 leading_dirs_depth=None,
                 leading_dirs_consider=None,
                 use_current_dir=False,
                 delete=False,
                 key=False,
                 exclude=None,
                 rename=None,
                 existing='fail',
                 annex_options=None,
                 copy=False,
                 commit=True,
                 allow_dirty=False,
                 stats=None,
                 drop_after=False,
                 delete_after=False):
        """
        Returns
        -------
        annex
        """
        if exclude:
            exclude = assure_tuple_or_list(exclude)
        if rename:
            rename = assure_tuple_or_list(rename)

        # TODO: actually I see possibly us asking user either he wants to convert
        # his git repo into annex
        archive_path = archive
        pwd = getpwd()
        if annex is None:
            annex = get_repo_instance(pwd, class_=AnnexRepo)
            if not isabs(archive):
                # if not absolute -- relative to wd and thus
                archive_path = normpath(opj(realpath(pwd), archive))
                # abspath(archive) is not "good" since dereferences links in the path
                # archive_path = abspath(archive)
        elif not isabs(archive):
            # if we are given an annex, then assume that given path is within annex, not
            # relative to PWD
            archive_path = opj(annex.path, archive)
        annex_path = annex.path

        # _rpath below should depict paths relative to the top of the annex
        archive_rpath = relpath(
            archive_path,
            # Use `get_dataset_root` to avoid resolving the leading path. If no
            # repo is found, downstream code will raise FileNotInRepositoryError.
            get_dataset_root(archive_path) or ".")

        if archive in annex.untracked_files:
            raise RuntimeError(
                "The archive is not under annex yet. You should run 'datalad "
                "add {}' first".format(archive))

        if not allow_dirty and annex.dirty:
            # already saved me once ;)
            raise RuntimeError(
                "You better commit all the changes and untracked files first")

        if not key:
            # we were given a file which must exist
            if not exists(archive_path):
                raise ValueError("Archive {} does not exist".format(archive))
            # TODO: support adding archives content from outside the annex/repo
            origin = 'archive'
            key = annex.get_file_key(archive_rpath)
            archive_dir = dirname(archive_path)
        else:
            origin = 'key'
            key = archive
            archive_dir = None  # We must not have anything to do with the location under .git/annex

        archive_basename = file_basename(archive)

        if not key:
            # TODO: allow for it to be under git???  how to reference then?
            raise NotImplementedError(
                "Provided file %s is not under annex.  We don't support yet adding everything "
                "straight to git" % archive)

        # are we in a subdirectory of the repository?
        pwd_under_annex = commonprefix([pwd, annex_path]) == annex_path
        #  then we should add content under that
        # subdirectory,
        # get the path relative to the repo top
        if use_current_dir:
            # if outside -- extract to the top of repo
            extract_rpath = relpath(pwd, annex_path) \
                if pwd_under_annex \
                else None
        else:
            extract_rpath = relpath(archive_dir, annex_path)

        # relpath might return '.' as the relative path to curdir, which then normalize_paths
        # would take as instructions to really go from cwd, so we need to sanitize
        if extract_rpath == curdir:
            extract_rpath = None  # no special relpath from top of the repo

        # and operate from now on the key or whereever content available "canonically"
        try:
            key_rpath = annex.get_contentlocation(
                key)  # , relative_to_top=True)
        except:
            raise RuntimeError(
                "Content of %s seems to be N/A.  Fetch it first" % key)

        # now we simply need to go through every file in that archive and
        lgr.info("Adding content of the archive %s into annex %s", archive,
                 annex)

        from datalad.customremotes.archives import ArchiveAnnexCustomRemote
        # TODO: shouldn't we be able just to pass existing AnnexRepo instance?
        # TODO: we will use persistent cache so we could just (ab)use possibly extracted archive
        annexarchive = ArchiveAnnexCustomRemote(path=annex_path,
                                                persistent_cache=True)
        # We will move extracted content so it must not exist prior running
        annexarchive.cache.allow_existing = True
        earchive = annexarchive.cache[key_rpath]

        # TODO: check if may be it was already added
        if ARCHIVES_SPECIAL_REMOTE not in annex.get_remotes():
            init_datalad_remote(annex,
                                ARCHIVES_SPECIAL_REMOTE,
                                autoenable=True)
        else:
            lgr.debug("Special remote {} already exists".format(
                ARCHIVES_SPECIAL_REMOTE))

        precommitted = False
        delete_after_rpath = None
        try:
            old_always_commit = annex.always_commit
            # When faking dates, batch mode is disabled, so we want to always
            # commit.
            annex.always_commit = annex.fake_dates_enabled

            if annex_options:
                if isinstance(annex_options, string_types):
                    annex_options = shlex.split(annex_options)

            leading_dir = earchive.get_leading_directory(
                depth=leading_dirs_depth, exclude=exclude, consider=leading_dirs_consider) \
                if strip_leading_dirs else None
            leading_dir_len = len(leading_dir) + len(
                opsep) if leading_dir else 0

            # we need to create a temporary directory at the top level which would later be
            # removed
            prefix_dir = basename(tempfile.mktemp(prefix=".datalad", dir=annex_path)) \
                if delete_after \
                else None

            # dedicated stats which would be added to passed in (if any)
            outside_stats = stats
            stats = ActivityStats()

            for extracted_file in earchive.get_extracted_files():
                stats.files += 1
                extracted_path = opj(earchive.path, extracted_file)

                if islink(extracted_path):
                    link_path = realpath(extracted_path)
                    if not exists(
                            link_path
                    ):  # TODO: config  addarchive.symlink-broken='skip'
                        lgr.warning("Path %s points to non-existing file %s" %
                                    (extracted_path, link_path))
                        stats.skipped += 1
                        continue
                        # TODO: check if points outside of the archive -- warning and skip

                # preliminary target name which might get modified by renames
                target_file_orig = target_file = extracted_file

                # strip leading dirs
                target_file = target_file[leading_dir_len:]

                if add_archive_leading_dir:
                    target_file = opj(archive_basename, target_file)

                if rename:
                    target_file = apply_replacement_rules(rename, target_file)

                # continue to next iteration if extracted_file in excluded
                if exclude:
                    try:  # since we need to skip outside loop from inside loop
                        for regexp in exclude:
                            if re.search(regexp, extracted_file):
                                lgr.debug(
                                    "Skipping {extracted_file} since contains {regexp} pattern"
                                    .format(**locals()))
                                stats.skipped += 1
                                raise StopIteration
                    except StopIteration:
                        continue

                if prefix_dir:
                    target_file = opj(prefix_dir, target_file)
                    # but also allow for it in the orig
                    target_file_orig = opj(prefix_dir, target_file_orig)

                target_file_path_orig = opj(annex.path, target_file_orig)

                url = annexarchive.get_file_url(
                    archive_key=key,
                    file=extracted_file,
                    size=os.stat(extracted_path).st_size)

                # lgr.debug("mv {extracted_path} {target_file}. URL: {url}".format(**locals()))

                target_file_path = opj(extract_rpath, target_file) \
                    if extract_rpath else target_file

                target_file_path = opj(annex.path, target_file_path)

                if lexists(target_file_path):
                    handle_existing = True
                    if md5sum(target_file_path) == md5sum(extracted_path):
                        if not annex.is_under_annex(extracted_path):
                            # if under annex -- must be having the same content,
                            # we should just add possibly a new extra URL
                            # but if under git -- we cannot/should not do
                            # anything about it ATM
                            if existing != 'overwrite':
                                continue
                        else:
                            handle_existing = False
                    if not handle_existing:
                        pass  # nothing... just to avoid additional indentation
                    elif existing == 'fail':
                        raise RuntimeError(
                            "File {} already exists, but new (?) file {} was instructed "
                            "to be placed there while overwrite=False".format(
                                target_file_path, extracted_file))
                    elif existing == 'overwrite':
                        stats.overwritten += 1
                        # to make sure it doesn't conflict -- might have been a tree
                        rmtree(target_file_path)
                    else:
                        target_file_path_orig_ = target_file_path

                        # To keep extension intact -- operate on the base of the filename
                        p, fn = os.path.split(target_file_path)
                        ends_with_dot = fn.endswith('.')
                        fn_base, fn_ext = file_basename(fn, return_ext=True)

                        if existing == 'archive-suffix':
                            fn_base += '-%s' % archive_basename
                        elif existing == 'numeric-suffix':
                            pass  # archive-suffix will have the same logic
                        else:
                            raise ValueError(existing)
                        # keep incrementing index in the suffix until file doesn't collide
                        suf, i = '', 0
                        while True:
                            target_file_path_new = opj(
                                p, fn_base + suf +
                                ('.' if
                                 (fn_ext or ends_with_dot) else '') + fn_ext)
                            if not lexists(target_file_path_new):
                                break
                            lgr.debug("File %s already exists" %
                                      target_file_path_new)
                            i += 1
                            suf = '.%d' % i
                        target_file_path = target_file_path_new
                        lgr.debug("Original file %s will be saved into %s" %
                                  (target_file_path_orig_, target_file_path))
                        # TODO: should we reserve smth like
                        # stats.clobbed += 1

                if target_file_path != target_file_path_orig:
                    stats.renamed += 1

                #target_path = opj(getpwd(), target_file)
                if copy:
                    raise NotImplementedError(
                        "Not yet copying from 'persistent' cache")
                else:
                    # os.renames(extracted_path, target_path)
                    # addurl implementation relying on annex'es addurl below would actually copy
                    pass

                lgr.debug(
                    "Adding %s to annex pointing to %s and with options %r",
                    target_file_path, url, annex_options)

                out_json = annex.add_url_to_file(target_file_path,
                                                 url,
                                                 options=annex_options,
                                                 batch=True)

                if 'key' in out_json and out_json[
                        'key'] is not None:  # annex.is_under_annex(target_file, batch=True):
                    # due to http://git-annex.branchable.com/bugs/annex_drop_is_not___34__in_effect__34___for_load_which_was___34__addurl_--batch__34__ed_but_not_yet_committed/?updated
                    # we need to maintain a list of those to be dropped files
                    if drop_after:
                        annex.drop_key(out_json['key'], batch=True)
                        stats.dropped += 1
                    stats.add_annex += 1
                else:
                    lgr.debug(
                        "File {} was added to git, not adding url".format(
                            target_file_path))
                    stats.add_git += 1

                if delete_after:
                    # delayed removal so it doesn't interfer with batched processes since any pure
                    # git action invokes precommit which closes batched processes. But we like to count
                    stats.removed += 1

                # # chaining 3 annex commands, 2 of which not batched -- less efficient but more bullet proof etc
                # annex.add(target_path, options=annex_options)
                # # above action might add to git or to annex
                # if annex.file_has_content(target_path):
                #     # if not --  it was added to git, if in annex, it is present and output is True
                #     annex.add_url_to_file(target_file, url, options=['--relaxed'], batch=True)
                #     stats.add_annex += 1
                # else:
                #     lgr.debug("File {} was added to git, not adding url".format(target_file))
                #     stats.add_git += 1
                # # TODO: actually check if it is anyhow different from a previous version. If not
                # # then it wasn't really added

                del target_file  # Done with target_file -- just to have clear end of the loop

            if delete and archive and origin != 'key':
                lgr.debug("Removing the original archive {}".format(archive))
                # force=True since some times might still be staged and fail
                annex.remove(archive_rpath, force=True)

            lgr.info("Finished adding %s: %s" %
                     (archive, stats.as_str(mode='line')))

            if outside_stats:
                outside_stats += stats
            if delete_after:
                # force since not committed. r=True for -r (passed into git call
                # to recurse)
                delete_after_rpath = opj(
                    extract_rpath, prefix_dir) if extract_rpath else prefix_dir
                lgr.debug("Removing extracted and annexed files under %s",
                          delete_after_rpath)
                annex.remove(delete_after_rpath, r=True, force=True)
            if commit:
                commit_stats = outside_stats if outside_stats else stats
                annex.precommit(
                )  # so batched ones close and files become annex symlinks etc
                precommitted = True
                if any(
                        r.get('state', None) != 'clean'
                        for p, r in iteritems(annex.status(untracked='no'))):
                    annex.commit("Added content extracted from %s %s\n\n%s" %
                                 (origin, archive_rpath,
                                  commit_stats.as_str(mode='full')),
                                 _datalad_msg=True)
                    commit_stats.reset()
        finally:
            # since we batched addurl, we should close those batched processes
            # if haven't done yet.  explicitly checked to avoid any possible
            # "double-action"
            if not precommitted:
                annex.precommit()

            if delete_after_rpath:
                delete_after_path = opj(annex_path, delete_after_rpath)
                if exists(delete_after_path):  # should not be there
                    # but for paranoid yoh
                    lgr.warning(
                        "Removing temporary directory under which extracted "
                        "files were annexed and should have been removed: %s",
                        delete_after_path)
                    rmtree(delete_after_path)

            annex.always_commit = old_always_commit
            # remove what is left and/or everything upon failure
            earchive.clean(force=True)

        return annex
Esempio n. 49
0
        if onerror is not None:
            onerror(err)
        return

    dirs, nondirs = [], []
    for name in names:
        if isdir(join(top, name)):
            dirs.append(name)
        else:
            nondirs.append(name)

    if topdown:
        yield top, dirs, nondirs
    for name in dirs:
        path = join(top, name)
        if not islink(path):
            for x in walk(path, topdown, onerror):
                yield x
    if not topdown:
        yield top, dirs, nondirs


__all__.append("walk")

# Make sure os.environ exists, at least
try:
    environ
except NameError:
    environ = {}

Esempio n. 50
0
def rm_rf(path, max_retries=5, trash=True):
    """
    Completely delete path

    max_retries is the number of times to retry on failure. The default is
    5. This only applies to deleting a directory.

    If removing path fails and trash is True, files will be moved to the trash directory.
    """
    if islink(path) or isfile(path):
        # Note that we have to check if the destination is a link because
        # exists('/path/to/dead-link') will return False, although
        # islink('/path/to/dead-link') is True.
        try:
            os.unlink(path)
        except (OSError, IOError):
            log.warn("Cannot remove, permission denied: {0}".format(path))

    elif isdir(path):
        try:
            for i in range(max_retries):
                try:
                    shutil.rmtree(path,
                                  ignore_errors=False,
                                  onerror=warn_failed_remove)
                    return
                except OSError as e:
                    msg = "Unable to delete %s\n%s\n" % (path, e)
                    if on_win:
                        try:
                            shutil.rmtree(path, onerror=_remove_readonly)
                            return
                        except OSError as e1:
                            msg += "Retry with onerror failed (%s)\n" % e1

                        p = subprocess.Popen(
                            ['cmd', '/c', 'rd', '/s', '/q', path],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
                        (stdout, stderr) = p.communicate()
                        if p.returncode != 0:
                            msg += '%s\n%s\n' % (stdout, stderr)
                        else:
                            if not isdir(path):
                                return

                        if trash:
                            try:
                                move_path_to_trash(path)
                                if not isdir(path):
                                    return
                            except OSError as e2:
                                raise
                                msg += "Retry with onerror failed (%s)\n" % e2

                    log.debug(msg + "Retrying after %s seconds..." % i)
                    time.sleep(i)
            # Final time. pass exceptions to caller.
            shutil.rmtree(path,
                          ignore_errors=False,
                          onerror=warn_failed_remove)
        finally:
            # If path was removed, ensure it's not in linked_data_
            if not isdir(path):
                delete_linked_data_any(path)
Esempio n. 51
0
    def walk_git_files(self, repo_path=''):
        """
        An iterator method that yields a file path relative to main_repo_abspath
        for each file that should be included in the archive.
        Skips those that match the exclusion patterns found in
        any discovered .gitattributes files along the way.

        Recurs into submodules as well.

        @param repo_path: Path to the git submodule repository relative to main_repo_abspath.
        @type repo_path: str

        @return: Iterator to traverse files under git control relative to main_repo_abspath.
        @rtype: Iterable
        """
        repo_abspath = path.join(self.main_repo_abspath, repo_path)
        repo_file_paths = self.run_git_shell(
            "git ls-files --cached --full-name --no-empty-directory",
            repo_abspath
        ).splitlines()
        exclude_patterns = self.get_exclude_patterns(repo_abspath, repo_file_paths)

        for repo_file_path in repo_file_paths:
            # Git puts path in quotes if file path has unicode characters.
            repo_file_path = repo_file_path.strip('"')  # file path relative to current repo
            repo_file_abspath = path.join(repo_abspath, repo_file_path)  # absolute file path
            main_repo_file_path = path.join(repo_path, repo_file_path)  # file path relative to the main repo

            # Only list symlinks and files.
            if not path.islink(repo_file_abspath) and path.isdir(repo_file_abspath):
                continue

            if self.is_file_excluded(repo_abspath, repo_file_path, exclude_patterns):
                continue

            yield main_repo_file_path

        if self.force_sub:
            self.run_git_shell("git submodule init", repo_abspath)
            self.run_git_shell("git submodule update", repo_abspath)

        try:
            repo_gitmodules_abspath = path.join(repo_abspath, ".gitmodules")

            with open(repo_gitmodules_abspath) as f:
                lines = f.readlines()

            for l in lines:
                m = re.match("^\s*path\s*=\s*(.*)\s*$", l)

                if m:
                    submodule_path = m.group(1)
                    submodule_abspath = path.join(repo_path, submodule_path)

                    if self.is_file_excluded(repo_abspath, submodule_path, exclude_patterns):
                        continue

                    for submodule_file_path in self.walk_git_files(submodule_abspath):
                        rel_file_path = submodule_file_path.replace(repo_path, "", 1).strip("/")
                        if self.is_file_excluded(repo_abspath, rel_file_path, exclude_patterns):
                            continue

                        yield submodule_file_path
        except IOError:
            pass
Esempio n. 52
0
def remove_existing_symlink(target_path):
    if islink(target_path):
        os.remove(target_path)
    return target_path
Esempio n. 53
0
def is_interesting(path):
    """Determine if the file at the given path is interesting enough to
    analyze.

    """
    return path.endswith('.py') and not islink(path)
Esempio n. 54
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix) and
              name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                          (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        meta_dict['files'] = files
        meta_dict['link'] = {'source': source_dir,
                             'type': link_name_map.get(linktype)}
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Esempio n. 55
0
def copyfile(originalfile,
             newfile,
             copy=False,
             create_new=False,
             hashmethod=None,
             use_hardlink=False,
             copy_related_files=True):
    """Copy or link ``originalfile`` to ``newfile``.

    If ``use_hardlink`` is True, and the file can be hard-linked, then a
    link is created, instead of copying the file.

    If a hard link is not created and ``copy`` is False, then a symbolic
    link is created.

    Parameters
    ----------
    originalfile : str
        full path to original file
    newfile : str
        full path to new file
    copy : Bool
        specifies whether to copy or symlink files
        (default=False) but only for POSIX systems
    use_hardlink : Bool
        specifies whether to hard-link files, when able
        (Default=False), taking precedence over copy
    copy_related_files : Bool
        specifies whether to also operate on related files, as defined in
        ``related_filetype_sets``

    Returns
    -------
    None

    """
    newhash = None
    orighash = None
    fmlogger.debug(newfile)

    if create_new:
        while op.exists(newfile):
            base, fname, ext = split_filename(newfile)
            s = re.search('_c[0-9]{4,4}$', fname)
            i = 0
            if s:
                i = int(s.group()[2:]) + 1
                fname = fname[:-6] + "_c%04d" % i
            else:
                fname += "_c%04d" % i
            newfile = base + os.sep + fname + ext

    if hashmethod is None:
        hashmethod = config.get('execution', 'hash_method').lower()

    # Don't try creating symlinks on CIFS
    if copy is False and on_cifs(newfile):
        copy = True

    # Existing file
    # -------------
    # Options:
    #   symlink
    #       to regular file originalfile            (keep if symlinking)
    #       to same dest as symlink originalfile    (keep if symlinking)
    #       to other file                           (unlink)
    #   regular file
    #       hard link to originalfile               (keep)
    #       copy of file (same hash)                (keep)
    #       different file (diff hash)              (unlink)
    keep = False
    if op.lexists(newfile):
        if op.islink(newfile):
            if all((os.readlink(newfile) == op.realpath(originalfile),
                    not use_hardlink, not copy)):
                keep = True
        elif posixpath.samefile(newfile, originalfile):
            keep = True
        else:
            if hashmethod == 'timestamp':
                hashfn = hash_timestamp
            elif hashmethod == 'content':
                hashfn = hash_infile
            else:
                raise AttributeError("Unknown hash method found:", hashmethod)
            newhash = hashfn(newfile)
            fmlogger.debug('File: %s already exists,%s, copy:%d', newfile,
                           newhash, copy)
            orighash = hashfn(originalfile)
            keep = newhash == orighash
        if keep:
            fmlogger.debug('File: %s already exists, not overwriting, copy:%d',
                           newfile, copy)
        else:
            os.unlink(newfile)

    # New file
    # --------
    # use_hardlink & can_hardlink => hardlink
    # ~hardlink & ~copy & can_symlink => symlink
    # ~hardlink & ~symlink => copy
    if not keep and use_hardlink:
        try:
            fmlogger.debug('Linking File: %s->%s', newfile, originalfile)
            # Use realpath to avoid hardlinking symlinks
            os.link(op.realpath(originalfile), newfile)
        except OSError:
            use_hardlink = False  # Disable hardlink for associated files
        else:
            keep = True

    if not keep and not copy and os.name == 'posix':
        try:
            fmlogger.debug('Symlinking File: %s->%s', newfile, originalfile)
            os.symlink(originalfile, newfile)
        except OSError:
            copy = True  # Disable symlink for associated files
        else:
            keep = True

    if not keep:
        try:
            fmlogger.debug('Copying File: %s->%s', newfile, originalfile)
            shutil.copyfile(originalfile, newfile)
        except shutil.Error as e:
            fmlogger.warn(e.message)

    # Associated files
    if copy_related_files:
        related_file_pairs = (get_related_files(f, include_this_file=False)
                              for f in (originalfile, newfile))
        for alt_ofile, alt_nfile in zip(*related_file_pairs):
            if op.exists(alt_ofile):
                copyfile(
                    alt_ofile,
                    alt_nfile,
                    copy,
                    hashmethod=hashmethod,
                    use_hardlink=use_hardlink,
                    copy_related_files=False)

    return newfile
Esempio n. 56
0
def download_data(pkg_name,
                  path,
                  url,
                  md5,
                  download_client=None,
                  extract=False,
                  compressed_bags=None,
                  quiet=True,
                  chmod=True):
    """Install test data checking md5 and rosbag decompress if needed.
       The downloaded data are located in cache_dir, and then linked to specified path.
       cache_dir is set by environment variable `JSK_DATA_CACHE_DIR` if defined, set by ROS_HOME/data otherwise."""
    if download_client is None:
        if is_google_drive_url(url):
            download_client = 'gdown'
        else:
            download_client = 'wget'
    if compressed_bags is None:
        compressed_bags = []
    if not osp.isabs(path):
        # get package path
        rp = rospkg.RosPack()
        try:
            pkg_path = rp.get_path(pkg_name)
        except rospkg.ResourceNotFound:
            print('\033[31m{name} is not found in {path}\033[0m'.format(
                name=pkg_name, path=rp.list()))
            return
        pkg_path = rp.get_path(pkg_name)
        path = osp.join(pkg_path, path)
        if not osp.exists(osp.dirname(path)):
            try:
                os.makedirs(osp.dirname(path))
            except OSError as e:
                # can fail on running with multiprocess
                if not osp.isdir(path):
                    raise
    # prepare cache dir
    if "JSK_DATA_CACHE_DIR" in os.environ:
        cache_root_dir = os.getenv("JSK_DATA_CACHE_DIR")
    else:
        cache_root_dir = osp.join(
            os.getenv('ROS_HOME', osp.expanduser('~/.ros')), "data")
    cache_dir = osp.join(cache_root_dir, pkg_name)
    if not osp.exists(cache_dir):
        try:
            os.makedirs(cache_dir)
        except OSError as e:
            # can fail on running with multiprocess
            if not osp.isdir(path):
                raise
        finally:
            if chmod:
                if not is_file_writable(cache_dir):
                    os.chmod(cache_dir, 0777)
    cache_file = osp.join(cache_dir, osp.basename(path))
    # check if cache exists, and update if necessary
    if not (osp.exists(cache_file) and check_md5sum(cache_file, md5)):
        if osp.exists(cache_file):
            os.remove(cache_file)
        download(download_client, url, cache_file, quiet=quiet, chmod=chmod)
    if osp.islink(path):
        # overwrite the link
        os.remove(path)
        os.symlink(cache_file, path)
    elif not osp.exists(path):
        os.symlink(cache_file, path)  # create link
    else:
        # not link and exists so skipping
        print('[%s] File exists, so skipping creating symlink.' % path,
              file=sys.stderr)
        return
    if extract:
        # extract files in cache dir and create symlink for them
        extracted_files = extract_file(cache_file,
                                       to_directory=cache_dir,
                                       chmod=True)
        for file_ in extracted_files:
            file_ = osp.join(cache_dir, file_)
            dst_path = osp.join(osp.split(path)[0], osp.basename(file_))
            if osp.islink(dst_path):
                os.remove(dst_path)
            elif osp.exists(dst_path) and not osp.isdir(dst_path):
                os.remove(dst_path)
            elif osp.exists(dst_path) and osp.isdir(dst_path):
                shutil.rmtree(dst_path)
            os.symlink(file_, dst_path)
    for compressed_bag in compressed_bags:
        if not osp.isabs(compressed_bag):
            rp = rospkg.RosPack()
            pkg_path = rp.get_path(pkg_name)
            compressed_bag = osp.join(pkg_path, compressed_bag)
        decompress_rosbag(compressed_bag, quiet=quiet, chmod=chmod)
Esempio n. 57
0
def getFileProperties(fName, checkForBrokenLink=True, skipCache=False):
    """Provides the following properties:

    - mime type (could be None)
    - icon
    - syntax file name (could be None)

    Works for non-existing files too.
    Special cases:
    - fName ends with os.path.sep => directory
    - fName is empy or None => unknown file type
    """
    global __filePropertiesCache
    if __filePropertiesCache is None:
        __initFilePropertiesCache()

    if not fName:
        return __filePropertiesCache['']

    if fName.endswith(sep):
        return __filePropertiesCache['/']

    if checkForBrokenLink and islink(fName):
        if not exists(fName):
            return __filePropertiesCache['.']

    if not skipCache and fName in __filePropertiesCache:
        value = __filePropertiesCache[fName]
        if value[0] is None:
            mime, _ = __getMagicMime(fName)
            if mime is not None:
                value[0] = mime
                __filePropertiesCache[fName] = value
        return value

    if skipCache:
        # Remove from cache so that the old value does not stuck there is case
        # if e.g. the file became empty
        __filePropertiesCache.pop(fName, None)

    # The function should work both for existing and non-existing files
    try:
        # If a file exists then it could be a symbolic link to
        # a different name file
        fBaseName = basename(realpath(fName))
    except:
        # File may not exist
        fBaseName = basename(fName)

    if '.' in fBaseName:
        fileExtension = fBaseName.split('.')[-1].lower()
    else:
        fileExtension = ''

    syntaxFile = __getXmlSyntaxFile(fBaseName)
    if syntaxFile is None:
        denied = False
        # Special case: this could be a QT supported image
        if fileExtension in __QTSupportedImageFormats:
            mime = 'image/' + fileExtension
        elif 'readme' in fBaseName.lower():
            mime = 'text/plain'
        else:
            mime, denied = __getMagicMime(fName)
            if mime == 'inode/x-empty':
                # Special case for the empty files; treat them as text files
                # till they are changed, i.e. do not memorize them in cache.
                # The magic library detects them as binary (without extensions)
                return [mime, getIcon('filemisc.png'),
                        getXmlSyntaxFileByMime('text/plain')]
            if mime is not None:
                syntaxFile = getXmlSyntaxFileByMime(mime)

        cacheValue = [mime,
                      getIcon('filedenied.png') if denied else
                      __getIcon(syntaxFile, mime, fBaseName),
                      syntaxFile]
        __filePropertiesCache[fName] = cacheValue
        return cacheValue

    # syntax file was successfully identified.
    # Detect the mime type by a syntax file
    if fileExtension == 'cdm':
        mime = 'text/x-codimension'
    elif fileExtension == 'cdm3':
        mime = 'text/x-codimension3'
    else:
        mime = __getMimeByXmlSyntaxFile(syntaxFile)
        if mime is None:
            mime, _ = __getMagicMime(fName)

    if fileExtension == 'o' and syntaxFile == 'lpc.xml':
        # lpc.xml is bound to .o extension i.e. exactly object files!
        if 'object' in mime:
            syntaxFile = None
    if fileExtension == 'a' and syntaxFile == 'ada.xml':
        tryMime, _ = __getMagicMime(fName)
        if 'x-archive' in tryMime:
            mime = tryMime
            syntaxFile = None
    if fileExtension == 'ttf' and syntaxFile == 'template-toolkit.xml':
        tryMime, _ = __getMagicMime(fName)
        if 'font' in tryMime:
            mime = tryMime
            syntaxFile = None
    if fileExtension == 'dat' and syntaxFile == 'hunspell-dat.xml':
        tryMime, _ = __getMagicMime(fName)
        if 'octet-stream' in tryMime:
            mime = tryMime
            syntaxFile = None

    cacheValue = [mime, __getIcon(syntaxFile, mime, fBaseName), syntaxFile]
    __filePropertiesCache[fName] = cacheValue
    return cacheValue
Esempio n. 58
0
def link(prefix, dist, linktype=LINK_HARD, index=None):
    """
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    """
    log.debug("linking package %s with link type %s", dist, linktype)
    index = index or {}
    source_dir = is_extracted(dist)
    assert source_dir is not None
    pkgs_dir = dirname(source_dir)
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r', pkgs_dir, prefix,
              dist, linktype)

    if not run_script(source_dir, dist, 'pre-link', prefix):
        raise LinkError('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    # for the lock issue
    # may run into lock if prefix not exist
    if not isdir(prefix):
        os.makedirs(prefix)

    with DirectoryLock(prefix), FileLock(source_dir):
        for filepath in files:
            src = join(source_dir, filepath)
            dst = join(prefix, filepath)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.info("file exists, but clobbering: %r" % dst)
                rm_rf(dst)
            lt = linktype
            if filepath in has_prefix_files or filepath in no_link or islink(
                    src):
                lt = LINK_COPY

            try:
                _link(src, dst, lt)
            except OSError as e:
                raise CondaOSError(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        for filepath in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[filepath]
            try:
                update_prefix(join(prefix, filepath), prefix, placeholder,
                              mode)
            except _PaddingError:
                raise PaddingError(dist, placeholder, len(placeholder))

        # make sure that the child environment behaves like the parent,
        #    wrt user/system install on win
        # This is critical for doing shortcuts correctly
        if on_win:
            nonadmin = join(sys.prefix, ".nonadmin")
            if isfile(nonadmin):
                open(join(prefix, ".nonadmin"), 'w').close()

        if context.shortcuts:
            mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            raise LinkError("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(dist)
        alt_files_path = join(prefix, 'conda-meta',
                              dist2filename(dist, '.files'))
        if isfile(alt_files_path):
            # alt_files_path is a hack for noarch
            meta_dict['files'] = list(yield_lines(alt_files_path))
        else:
            meta_dict['files'] = files
        meta_dict['link'] = Link(source=source_dir,
                                 type=link_name_map.get(linktype))
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Esempio n. 59
0
    def validargs():
        from config_problems import reg_problems
        from config_gen import config_gen

        PROBLEM_DIR = res["PROBLEM_DIR"]

        while True:
            sol = res["sol"]
            lang = res["lang"]
            problem = res["problem"]
            mode = res["mode"]

            # print(res)
            # nincs sol a cl-ben
            if sol == "":
                res["msg"] = "legalább a forrást meg kell adni"
                break
            if lang == "":
                res["msg"] = "nem tudom megállapítani a nyelv-et"
                break
            if problem == "":
                res["msg"] = "nem tudom megállapítani a feladat-ot"
                break

            # a programfájl megléte:
            try:
                open(sol).close()
            except:
                res["msg"] = "nem található a megoldásfájl"
                break

            # van-e ilyen nyelv a listán?
            if lang not in config_gen["langs"]:
                res["msg"] = "ismeretlen nyelv"
                break

            if mode not in config_gen["modes"]:
                res["msg"] = "ismeretlen mód"
                break

            if problem not in reg_problems or reg_problems[problem] != 1:
                res["msg"] = "ismeretlen/nem elérhető feladat"
                break

            # egyesek (matlab) kizarasa prog módban
            if mode == "prog":
                if lang in ["matlab"]:
                    res["msg"] = "nem elérhető a prog mód"
                    break

            if mode == "func":
                if lang == "binary":
                    res["msg"] = "nem elérhető a func mód"
                    break

                from os import path
                pth = "apps/app." + config_gen["lang2ext"][lang]
                #print(pth)
                if False == path.islink(pth) and False == path.isfile(pth):
                    res["msg"] = "nem elérhető a func mód"
                    break

            res["cmd"] = config_gen["lang2cmd"][mode][lang]

            break
Esempio n. 60
0
    def build_extension(self, ext):
        extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))

        root = osp.dirname(extdir)
        mode_file = osp.join(root, "bindings/mode.py")
        with open(mode_file, "r") as f:
            contents = [l.strip() for l in f.readlines() if len(l.strip()) > 0]

        contents[-1] = "use_dev_bindings = False"

        with open(mode_file, "w") as f:
            f.write("\n".join(contents))

        is_in_git = True
        try:
            subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"])
        except:
            is_in_git = False

        if is_in_git:
            subprocess.check_call(
                ["git", "submodule", "update", "--init", "--recursive"]
            )

        cmake_args = [
            "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=" + extdir,
            "-DPYTHON_EXECUTABLE=" + sys.executable,
        ]

        cfg = "Debug" if self.debug else "RelWithDebInfo"
        build_args = ["--config", cfg]

        if platform.system() == "Windows":
            cmake_args += [
                "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}".format(cfg.upper(), extdir)
            ]
            if sys.maxsize > 2 ** 32:
                cmake_args += ["-A", "x64"]
            build_args += ["--", "/m"]
        else:
            cmake_args += ["-DCMAKE_BUILD_TYPE=" + cfg]
            build_args += ["--", "-j"]

        cmake_args += ["-DBUILD_GUI_VIEWERS={}".format("ON" if not HEADLESS else "OFF")]

        env = os.environ.copy()
        env["CXXFLAGS"] = '{} -DVERSION_INFO=\\"{}\\"'.format(
            env.get("CXXFLAGS", ""), self.distribution.get_version()
        )
        if not os.path.exists(self.build_temp):
            os.makedirs(self.build_temp)
        subprocess.check_call(
            ["cmake", ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env
        )
        subprocess.check_call(
            ["cmake", "--build", "."] + build_args, cwd=self.build_temp
        )
        if not HEADLESS:
            link_dst = osp.join(osp.dirname(self.build_temp), "viewer")
            if not osp.islink(link_dst):
                os.symlink(
                    osp.abspath(osp.join(self.build_temp, "utils/viewer/viewer")),
                    link_dst,
                )

        print()  # Add an empty line for cleaner output