コード例 #1
0
ファイル: test_posix2.py プロジェクト: abhinavthomas/pypy
def setup_module(mod):
    usemodules = ['binascii', 'posix', 'struct', 'time']
    if os.name != 'nt':
        usemodules += ['fcntl']
    else:
        # On windows, os.popen uses the subprocess module
        usemodules += ['_rawffi', 'thread', 'signal']
    mod.space = gettestobjspace(usemodules=usemodules)
    mod.path = udir.join('posixtestfile.txt')
    mod.path.write("this is a test")
    mod.path2 = udir.join('test_posix2-')
    pdir = udir.ensure('posixtestdir', dir=True)
    pdir.join('file1').write("test1")
    os.chmod(str(pdir.join('file1')), 0600)
    pdir.join('file2').write("test2")
    pdir.join('another_longer_file_name').write("test3")
    mod.pdir = pdir
    unicode_dir = udir.ensure('fi\xc5\x9fier.txt', dir=True)
    unicode_dir.join('somefile').write('who cares?')
    unicode_dir.join('caf\xe9').write('who knows?')
    mod.unicode_dir = unicode_dir

    # in applevel tests, os.stat uses the CPython os.stat.
    # Be sure to return times with full precision
    # even when running on top of CPython 2.4.
    os.stat_float_times(True)
コード例 #2
0
ファイル: test_main.py プロジェクト: realgo/backup-client
    def test_Dir1Fork(self):
        os.stat_float_times(True)
        s1tmp = os.path.join(self.tmpdir, 's1')
        s2tmp = os.path.join(self.tmpdir, 's2')
        s3tmp = os.path.join(self.tmpdir, 's3')

        dr = backup_client.DirectoryRunner(
            os.path.join(os.getcwd(), 'dirrunner1'))

        dr.start()
        os.remove(s1tmp)
        os.remove(s2tmp)
        os.remove(s3tmp)

        forked_pid = os.fork()
        if forked_pid == 0:
            sys.exit = os._exit
            dr.fork_background_stop()
            time.sleep(0.1)
            os._exit(0)

        self.assertEqual(os.path.exists(s1tmp), False)
        self.assertEqual(os.path.exists(s2tmp), False)
        self.assertEqual(os.path.exists(s3tmp), False)
        os.waitpid(forked_pid, 0)
        time.sleep(0.1)
        self.assertEqual(os.path.exists(s1tmp), True)
        self.assertEqual(os.path.exists(s2tmp), True)
        self.assertEqual(os.path.exists(s3tmp), True)
        self.assertTrue(os.stat(s1tmp).st_mtime > os.stat(s2tmp).st_mtime)
        self.assertTrue(os.stat(s2tmp).st_mtime > os.stat(s3tmp).st_mtime)
        os.remove(s1tmp)
        os.remove(s2tmp)
        os.remove(s3tmp)
コード例 #3
0
ファイル: test_posix2.py プロジェクト: ieure/pypy
def setup_module(mod):
    if os.name != 'nt':
        mod.space = gettestobjspace(usemodules=['posix', 'fcntl'])
    else:
        # On windows, os.popen uses the subprocess module
        mod.space = gettestobjspace(usemodules=['posix', '_rawffi', 'thread'])
    mod.path = udir.join('posixtestfile.txt')
    mod.path.write("this is a test")
    mod.path2 = udir.join('test_posix2-')
    pdir = udir.ensure('posixtestdir', dir=True)
    pdir.join('file1').write("test1")
    os.chmod(str(pdir.join('file1')), 0600)
    pdir.join('file2').write("test2")
    pdir.join('another_longer_file_name').write("test3")
    mod.pdir = pdir
    unicode_dir = udir.ensure('fi\xc5\x9fier.txt', dir=True)
    unicode_dir.join('somefile').write('who cares?')
    mod.unicode_dir = unicode_dir

    # in applevel tests, os.stat uses the CPython os.stat.
    # Be sure to return times with full precision
    # even when running on top of CPython 2.4.
    os.stat_float_times(True)

    # Initialize sys.filesystemencoding
    space.call_method(space.getbuiltinmodule('sys'), 'getfilesystemencoding')
コード例 #4
0
ファイル: test_main.py プロジェクト: realgo/backup-client
    def test_Dir1(self):
        os.stat_float_times(True)
        s1tmp = os.path.join(self.tmpdir, 's1')
        s2tmp = os.path.join(self.tmpdir, 's2')
        s3tmp = os.path.join(self.tmpdir, 's3')

        dr = backup_client.DirectoryRunner(
            os.path.join(os.getcwd(), 'dirrunner1'))

        dr.start()
        self.assertEqual(os.path.exists(s1tmp), True)
        self.assertEqual(os.path.exists(s2tmp), True)
        self.assertEqual(os.path.exists(s3tmp), True)
        self.assertTrue(os.stat(s1tmp).st_mtime < os.stat(s2tmp).st_mtime)
        self.assertTrue(os.stat(s2tmp).st_mtime < os.stat(s3tmp).st_mtime)
        os.remove(s1tmp)
        os.remove(s2tmp)
        os.remove(s3tmp)

        dr.stop()
        self.assertEqual(os.path.exists(s1tmp), True)
        self.assertEqual(os.path.exists(s2tmp), True)
        self.assertEqual(os.path.exists(s3tmp), True)
        self.assertTrue(os.stat(s1tmp).st_mtime > os.stat(s2tmp).st_mtime)
        self.assertTrue(os.stat(s2tmp).st_mtime > os.stat(s3tmp).st_mtime)
コード例 #5
0
ファイル: file.py プロジェクト: mcree/syschangemon
    def setup(self, app):
        super(FilePlugin, self).setup(app)

        c = app.config

        conf_keys = c.keys(self._meta.label)

        # get include and exclude list from config file
        if 'include' in conf_keys:
            self.include = self._process_pattern_list(c.get(self._meta.label, 'include').split('\n'))
        if 'exclude' in conf_keys:
            self.exclude = self._process_pattern_list(c.get(self._meta.label, 'exclude').split('\n'))
        if 'no_assume_nochange' in conf_keys:
            self.assume_change = self._process_pattern_list(c.get(self._meta.label, 'no_assume_nochange').split('\n'))

        # compile globre patterns for include and exclude
        self.include_pats = []
        for pat in self.include:
            self.include_pats.append(globre.compile(pat, flags=globre.EXACT, split_prefix=False))

        self.exclude_pats = []
        for pat in self.exclude:
            self.exclude_pats.append(globre.compile(pat, flags=globre.EXACT, split_prefix=False))

        os.stat_float_times(True)

        if 'assume_nochange' in conf_keys:
            self.assume_nochange = [x.strip() for x in c.get(self._meta.label, 'assume_nochange').split(',')]
コード例 #6
0
ファイル: filemanager.py プロジェクト: SilverBut/DrunkenBlog
def getdocumentlist(path, recursive=True, detail=True):
    path=os.path.normpath(path)
    path=os.path.relpath(path)  #convert to rel path
    document_list=[]
    # have the list
    if recursive:
        for curdir, curdirlist, curfilelist in walk(path):
            for filename in curfilelist:
                    document_list.append({
                            "filename":splitext(filename)[0], 
                            "path":curdir+"/"+filename
                        })
    else:
        for filename in listdir(path):  #have a loop test
            if os.path.isfile(path+'/'+filename):
                document_list.append({
                        "filename":splitext(filename)[0], 
                        "path":path+'/'+filename          # just use relative path
                    })
    # find the detailed time if need
    if detail:
        # state save
        _stat=os.stat_float_times() 
        os.stat_float_times(False) 
        for x in range(len(document_list)):
            document_list[x].update(getdocumentdetail(document_list[x].get('path')))
        #state restore
        os.stat_float_times(_stat) 
    return document_list
コード例 #7
0
    def ops(self):
        out = {}
        cur_ops = self.status["opcounters"]

        lastChange = None
        try:
            os.stat_float_times(True)
            lastChange = os.stat(self.ops_tmp_file).st_ctime
            with open(self.ops_tmp_file, "r") as f:
                content = f.read()
                prev_ops = json.loads(content)
        except (ValueError, IOError, OSError):
            prev_ops = {}

        for k, v in cur_ops.iteritems():
            if k in prev_ops:
                name = k + "s_per_second"
                if k == "query":
                    name = "queries_per_second"

                interval = time.time() - lastChange
                if (interval <= 0.0):
                    continue
                out[name] = (max(0, float(v) - float(prev_ops[k])) / interval, "ops/s")

        with open(self.ops_tmp_file, 'w') as f:
            f.write(json.dumps(cur_ops))

        self.callGmetric(out)
コード例 #8
0
def sysStats(*arg, **kw):
	print "Calling system call", kw['cname'], "with parameters", arg
	try:
		os.stat_float_times(False)
		return (0, 0, getattr(os, kw['cname'].rstrip('64'))(arg[0]))
	except OSError, (errno, strerror):
		return (-1, errno)
コード例 #9
0
ファイル: __init__.py プロジェクト: shenal/dotfiles
def main(argv=None, locale=''):
    """Main entry point"""

    # Wrap stdout with a codec that goes from UTF-8 to the preferred encoding
    encoding = getattr(sys.stdout, 'encoding', None)
    if encoding and encoding.lower() != 'utf-8':
        from codecs import getwriter
        try:
            cls = getwriter(sys.stdout.encoding)
        except LookupError:
            pass
        else:
            class StdoutWrapper(cls):
                def write(self, obj):
                    if isinstance(obj, str):
                        obj = obj.decode('utf-8')
                    cls.write(self, obj)
            sys.stdout = StdoutWrapper(sys.stdout, 'replace')

    import locale as locale_
    try:
        locale_.setlocale(locale_.LC_ALL, locale)
    except locale_.Error:
        pass

    if argv is None:
        argv = _win32_utf8_argv() or sys.argv

    os.stat_float_times(False)
    warnings.formatwarning = formatwarning
    try:
        options = parse_args(argv)
    except SystemExit, e:
        return e.code
コード例 #10
0
    def test_fileDependencies(self, depfile):
        filePath = get_abspath("data/dependency1")
        ff = open(filePath,"w")
        ff.write("part1")
        ff.close()

        dependencies = [filePath]
        t1 = Task("t1", None, dependencies)

        # first time execute
        assert 'run' == depfile.get_status(t1)
        assert dependencies == t1.dep_changed

        # second time no
        depfile.save_success(t1)
        assert 'up-to-date' == depfile.get_status(t1)
        assert [] == t1.dep_changed

        os.stat_float_times(True) # for python2.4
        time.sleep(1) # required otherwise timestamp is not modified!
        # a small change on the file
        ff = open(filePath,"a")
        ff.write(" part2")
        ff.close()

        # execute again
        assert 'run' == depfile.get_status(t1)
        assert dependencies == t1.dep_changed
コード例 #11
0
ファイル: local_io.py プロジェクト: longedok/zalgo2
 def run(self):
     os.stat_float_times(int(0))
     for root, _, files in os.walk(self.path):
         validFiles = [f for f in files if os.path.splitext(f)[1] in self.allowedExtensions]
         for validFile in validFiles:
             fullPath = os.path.join(root, validFile)
             fileStat = os.stat(fullPath) 
             lastModified = self.db.getLastModified(fullPath)
             if fileStat.st_mtime != lastModified:
                 lastModified = fileStat.st_mtime
                 shaObj = sha.new()
                 try:
                     file = open(fullPath, 'rb')
                     data = file.read(8192)
                     file.close()
                 except IOError, e:
                     print "FileIndexer.run(): Can't read file '%s'. %s" % (fullPath, str(e))
                 else:
                     id3r = id3reader.Reader(fullPath)
                     shaObj.update(data)
                     hash = shaObj.hexdigest()
                     album = id3r.getValue('album')
                     artist = id3r.getValue('performer')
                     title = id3r.getValue('title')
                     self.db.storeNewFile((title, artist, album, fullPath, hash, lastModified))
コード例 #12
0
ファイル: findjob.py プロジェクト: mmahnic/py-findjob
    def findReferenceFile(self):
        if self._referenceFileModes == None: return
        os.stat_float_times(True)
        thisfile = self.getArchivePath() # not exactly! the real path depends on results of findReferenceFile
        maxtime = self._now
        if os.path.exists(thisfile):
            st = os.stat(thisfile)
            maxtime = datetime.datetime.fromtimestamp(st.st_mtime)
        besttime = None
        bestfile = None
        for mode in self._referenceFileModes:
            modedir = self._getArchiveDir(self._rootdir, self._now, mode)
            basename = self._getArchiveBaseName(self._now, mode)
            for fn in os.listdir(modedir):
                if not fn.endswith(self.ext): continue
                if not fn.startswith(basename): continue
                fnfull = os.path.join(modedir, fn)
                if mode == self._backupmode and thisfile == fnfull: continue
                # print "Checking", fnfull
                st = os.stat(fnfull)
                ftime = datetime.datetime.fromtimestamp(st.st_mtime)
                if ftime >= maxtime: continue
                if besttime == None or ftime > besttime:
                    besttime = ftime
                    bestfile = fnfull
        if besttime == None:
            self.incMode.referenceTime = None
            self.incMode.referenceFile= None
        else:
            self.incMode.referenceTime = besttime - datetime.timedelta(hours=1) # for safety
            self.incMode.referenceFile = bestfile
            self.appendLog("  Reference: %s %s" % (self.incMode.referenceTime, os.path.basename(bestfile)))

        return
コード例 #13
0
ファイル: test_posix2.py プロジェクト: Qointum/pypy
def setup_module(mod):
    usemodules = ["binascii", "posix", "signal", "struct", "time"]
    # py3k os.open uses subprocess, requiring the following per platform
    if os.name != "nt":
        usemodules += ["fcntl", "select"]
    else:
        usemodules += ["_rawffi", "thread"]
    mod.space = gettestobjspace(usemodules=usemodules)
    mod.path = udir.join("posixtestfile.txt")
    mod.path.write("this is a test")
    mod.path2 = udir.join("test_posix2-")
    pdir = udir.ensure("posixtestdir", dir=True)
    pdir.join("file1").write("test1")
    os.chmod(str(pdir.join("file1")), 0o600)
    pdir.join("file2").write("test2")
    pdir.join("another_longer_file_name").write("test3")
    mod.pdir = pdir
    bytes_dir = udir.ensure("fi\xc5\x9fier.txt", dir=True)
    bytes_dir.join("somefile").write("who cares?")
    bytes_dir.join("caf\xe9").write("who knows?")
    mod.bytes_dir = bytes_dir
    # an escaped surrogate
    mod.esurrogate_dir = udir.ensure("foo\x80", dir=True)

    # in applevel tests, os.stat uses the CPython os.stat.
    # Be sure to return times with full precision
    # even when running on top of CPython 2.4.
    os.stat_float_times(True)
コード例 #14
0
ファイル: plugin.py プロジェクト: pwarren/AGDeviceControl
 def getmtime(x):
     sft = stat_float_times()
     stat_float_times(True)
     try:
         return _getmtime(x)
     finally:
         stat_float_times(sft)
コード例 #15
0
ファイル: FileLoader.py プロジェクト: longedok/Zalgo
 def run(self):
     debug('FileLoader.run(): indexing started')
     os.stat_float_times(int(0))
     for dirname in self.__pathes:
         for root, _, files in os.walk(dirname):
             valid_files = [f for f in files if os.path.splitext(f)[1] in self.__allowed_extensions]
             for valid_file in valid_files:
                 full_path = os.path.join(root, valid_file)
                 file_stat = os.stat(full_path) 
                  
                 result_list = self.__db.lookup('last_modified', path=('=', full_path))
                 last_modified = int(result_list[0][0]) if len(result_list) > 0 else None
                 if file_stat.st_mtime != last_modified:
                     last_modified = file_stat.st_mtime
                     sha_obj = sha.new()
                     try:
                         file = open(full_path, 'rb')
                         data = file.read(8192)
                         file.close()
                     except IOError, e:
                         debug("FileLoader.run(): Can't read file '%s'. %s" % (full_path, str(e)))
                     else:
                         id3r = id3reader.Reader(full_path)
                         sha_obj.update(data)
                         hash = sha_obj.hexdigest()
                         title, album, artist = id3r.getValue('title'), id3r.getValue('album'), id3r.getValue('performer')
                         self.__db.store(['title', 'artist', 'album', 'path', 'hash', 'last_modified'], 
                                         [title, artist, album, full_path, hash, last_modified])
コード例 #16
0
def version(path):
    full_path = os.path.join(settings.STATIC_ROOT, path)
    if not settings.DEBUG:
        # Get file modification time.
        os.stat_float_times(False)
        mtime = os.path.getmtime(full_path)  # raises OSError if file does not exist
        path = rx.sub(r"\1.{}.\2".format(mtime), path)

    return os.path.join(settings.STATIC_URL, path)
コード例 #17
0
ファイル: fabricate.py プロジェクト: RTXI/gen-net
    def __call__(self, *args, **kwargs):
        """ Run command and return its dependencies and outputs, using before
            and after access times to determine dependencies. """

        # For Python pre-2.5, ensure os.stat() returns float atimes
        old_stat_float = os.stat_float_times()
        os.stat_float_times(True)

        originals = self.file_times()
        if self.atimes == 2:
            befores = originals
            atime_resolution = 0
            mtime_resolution = 0
        else:
            befores = self._age_atimes(originals)
            atime_resolution = FAT_atime_resolution
            mtime_resolution = FAT_mtime_resolution
        shell_keywords = dict(silent=False)
        shell_keywords.update(kwargs)
        shell(*args, **shell_keywords)
        afters = self.file_times()
        deps = []
        outputs = []
        for name in afters:
            if name in befores:
                # if file exists before+after && mtime changed, add to outputs
                # Note: Can't just check that atimes > than we think they were
                #       before because os might have rounded them to a later
                #       date than what we think we set them to in befores.
                #       So we make sure they're > by at least 1/2 the
                #       resolution.  This will work for anything with a
                #       resolution better than FAT.
                if afters[name][1]-mtime_resolution/2 > befores[name][1]:
                    outputs.append(name)
                elif afters[name][0]-atime_resolution/2 > befores[name][0]:
                    # otherwise add to deps if atime changed
                    if not self.ignore(name):
                        deps.append(name)
            else:
                # file created (in afters but not befores), add as output
                if not self.ignore(name):
                    outputs.append(name)

        if self.atimes < 2:
            # Restore atimes of files we didn't access: not for any functional
            # reason -- it's just to preserve the access time for the user's info
            for name in deps:
                originals.pop(name)
            for name in originals:
                original = originals[name]
                if original != afters.get(name, None):
                    self._utime(name, original[0], original[1])

        os.stat_float_times(old_stat_float)  # restore stat_float_times value
        return deps, outputs
コード例 #18
0
ファイル: collectstatic.py プロジェクト: ChrisEdson/Inquire
    def handle_noargs(self, **options):
        symlink = options['link']
        ignore_patterns = options['ignore_patterns']
        if options['use_default_ignore_patterns']:
            ignore_patterns += ['CVS', '.*', '*~']
        ignore_patterns = list(set(ignore_patterns))
        self.copied_files = set()
        self.symlinked_files = set()
        self.unmodified_files = set()
        self.destination_storage = get_storage_class(settings.STATICFILES_STORAGE)()

        try:
            self.destination_storage.path('')
        except NotImplementedError:
            self.destination_local = False
        else:
            self.destination_local = True

        if symlink:
            if sys.platform == 'win32':
                raise CommandError("Symlinking is not supported by this "
                                   "platform (%s)." % sys.platform)
            if not self.destination_local:
                raise CommandError("Can't symlink to a remote destination.")

        # Warn before doing anything more.
        if options.get('interactive'):
            confirm = raw_input("""
You have requested to collate static files and collect them at the destination
location as specified in your settings file.

This will overwrite existing files.
Are you sure you want to do this?

Type 'yes' to continue, or 'no' to cancel: """)
            if confirm != 'yes':
                raise CommandError("Static files build cancelled.")

        # Use ints for file times (ticket #14665)
        os.stat_float_times(False)

        for finder in finders.get_finders():
            for source, prefix, storage in finder.list(ignore_patterns):
                self.copy_file(source, prefix, storage, **options)

        verbosity = int(options.get('verbosity', 1))
        actual_count = len(self.copied_files) + len(self.symlinked_files)
        unmodified_count = len(self.unmodified_files)
        if verbosity >= 1:
            self.stdout.write("\n%s static file%s %s to '%s'%s.\n"
                              % (actual_count, actual_count != 1 and 's' or '',
                                 symlink and 'symlinked' or 'copied',
                                 settings.STATICFILES_ROOT,
                                 unmodified_count and ' (%s unmodified)'
                                 % unmodified_count or ''))
コード例 #19
0
ファイル: pytddmon.py プロジェクト: Neppord/pytddmon
def build_monitor(file_finder):
    os.stat_float_times(False)

    def get_file_size(file_path):
        stat = os.stat(file_path)
        return stat.st_size

    def get_file_modtime(file_path):
        stat = os.stat(file_path)
        return stat.st_mtime
    return Monitor(file_finder, get_file_size, get_file_modtime)
コード例 #20
0
 def get_dir_modified_time(self, local_dir):
     dir_modified_time = 0
     plan_file_names = os.listdir(local_dir)
     for plan_file_name in plan_file_names:
         plan_file = os.path.join(local_dir, plan_file_name)
         os.stat_float_times(False)
         file_modified_time = os.stat(plan_file).st_mtime
         os.stat_float_times(True)
         
         if dir_modified_time < file_modified_time:
             dir_modified_time = file_modified_time
     return dir_modified_time
コード例 #21
0
ファイル: test_ebuild_src.py プロジェクト: chutz/pkgcore
 def test_get_ebuild_mtime(self):
     f = pjoin(self.dir, "temp-0.ebuild")
     open(f, 'w').close()
     cur = os.stat_float_times()
     try:
         for x in (False, True):
             os.stat_float_times(x)
             self.assertEqual(self.mkinst(repo=malleable_obj(
                 _get_ebuild_path=lambda s:f))._get_ebuild_mtime(None),
                 os.stat(f).st_mtime)
     finally:
         os.stat_float_times(cur)
コード例 #22
0
 def __init__(self, *args, **kwargs):
     self.copied_files = set()
     self.symlinked_files = set()
     self.unmodified_files = set()
     self.destination_storage = get_storage_class(settings.STATICFILES_STORAGE)()
     try:
         self.destination_storage.path('')
     except NotImplementedError:
         self.destination_local = False
     else:
         self.destination_local = True
     # Use ints for file times (ticket #14665)
     os.stat_float_times(False)
コード例 #23
0
ファイル: test_triggers.py プロジェクト: veelai/pkgcore
 def test_float_mtime(self):
     cur = os.stat_float_times()
     try:
         t = self.kls()
         t.set_state([self.dir])
         l = list(t.saved_mtimes)
         self.assertEqual(len(l), 1)
         l = l[0]
         # mtime *must* be a float.
         self.assertInstance(l.mtime, float)
         self.assertEqual(os.stat_float_times(), cur)
     finally:
         os.stat_float_times(cur)
コード例 #24
0
ファイル: pyamp.py プロジェクト: ch3pjw/pyamp
def set_up_environment(user_config):
    '''Set up the environment for pyamp to run in - side effects galore!
    '''
    if user_config.system.GST_DEBUG:
        os.environ['GST_DEBUG'] = user_config.system.GST_DEBUG
        os.environ['GST_DEBUG_FILE'] = user_config.system.GST_DEBUG_FILE
    logging.basicConfig(
        filename=user_config.system.log_file,
        filemode='w',
        level=getattr(logging, user_config.system.log_level.upper()),
        format='[%(asctime)s %(name)s %(levelname)s] %(message)s',
        datefmt='%H:%M:%S')
    os.stat_float_times(True)
コード例 #25
0
ファイル: collectstatic.py プロジェクト: jess3/django
 def __init__(self, *args, **kwargs):
     super(NoArgsCommand, self).__init__(*args, **kwargs)
     self.copied_files = []
     self.symlinked_files = []
     self.unmodified_files = []
     self.storage = get_storage_class(settings.STATICFILES_STORAGE)(**settings.STATICFILES_STORAGE_ARGS)
     try:
         self.storage.path('')
     except NotImplementedError:
         self.local = False
     else:
         self.local = True
     # Use ints for file times (ticket #14665)
     os.stat_float_times(False)
コード例 #26
0
ファイル: __init__.py プロジェクト: SumiTomohiko/pydumpfs
    def do(self, dest, *src):
        if not exists(dest):
            raise PydumpfsError("%(dest)s doesn't exist." % { "dest": dest })

        stat_float_times(False)
        prev_dir = self._get_prev_dir(dest)
        backup_dir = self.decide_backup_dir(dest)
        makedirs(backup_dir)

        for d in src:
            self._do(prev_dir, backup_dir, d)

        self._print_debug(
            "done. The backup directory is %(path)r." % dict(path=backup_dir))
        return backup_dir
コード例 #27
0
ファイル: collectstatic.py プロジェクト: BillyWu/django
 def __init__(self, *args, **kwargs):
     super(NoArgsCommand, self).__init__(*args, **kwargs)
     self.copied_files = []
     self.symlinked_files = []
     self.unmodified_files = []
     self.storage = storage.staticfiles_storage
     try:
         self.storage.path('')
     except NotImplementedError:
         self.local = False
     else:
         self.local = True
     # Use ints for file times (ticket #14665), if supported
     if hasattr(os, 'stat_float_times'):
         os.stat_float_times(False)
コード例 #28
0
 def last_modified(self, cache_key):
     mtime = getmtime(self._full_path(cache_key))
     if os.stat_float_times():
         dt = datetime.datetime.fromtimestamp(mtime)
     else:
         dt = datetime.datetime.fromtimestamp(mktime(mtime))
     return pytz.UTC.localize(dt)
コード例 #29
0
ファイル: build_util.py プロジェクト: sparkhill/kylo-browser
def syncFile(file_in, dir_out, force_write=False):
    file_out = os.path.join(dir_out, os.path.basename(file_in))
    
    src_is_newer = True;
    
    if os.path.exists(file_out):
        # File already exists, check timestamps
        src_is_newer = False;
        
        src_mt = os.path.getmtime(file_in)
        trg_mt = os.path.getmtime(file_out)
        
        if os.stat_float_times():
            src_mt = math.fabs(src_mt)
            trg_mt = math.fabs(trg_mt)
            diff = math.fabs(src_mt - trg_mt)
            
            if diff > 1:
                src_is_newer = True
        else:
            src_is_newer = (src_mt > trg_mt)

    if src_is_newer:
        shutil.copy2(file_in, dir_out)
        if force_write:
            chmod_w(os.path.join(dir_out, os.path.basename(file_in)))
            
    return src_is_newer
コード例 #30
0
 def get_dir_modified_time_list(self, local_dir):
     dir_modified_time = 0
     dir_name = os.path.basename(local_dir)
     dir_modified_time_list = dict({dir_name: dir_modified_time})
     plan_file_names = os.listdir(local_dir)
     for plan_file_name in plan_file_names:
         if(not os.path.isdir(os.path.join(local_dir, plan_file_name))):
             plan_file = os.path.join(local_dir, plan_file_name)
             os.stat_float_times(False)
             file_modified_time = os.stat(plan_file).st_mtime
             os.stat_float_times(True)
             dir_modified_time_list.update({plan_file_name: self.convert_local_time_to_gm(file_modified_time)})
         
             if dir_modified_time < file_modified_time:
                 dir_modified_time = file_modified_time
     dir_modified_time_list.update({dir_name : self.convert_local_time_to_gm(dir_modified_time)})
     return dir_modified_time_list
コード例 #31
0
ファイル: base.py プロジェクト: Erox21/libterm
def make_stat(
	mode=DEFAULT_MODE,
	inode=None,
	dev=None,
	nlinks=1,
	gid=None,
	uid=None,
	size=0,
	atime=None,
	mtime=None,
	ctime=None,
	blocks=1,
	blksize=None,
	rdev=stat.S_IFREG,
	flags=0,
	):
	"""helper function to generate os.stat results."""
	if inode is None:
		inode = random.randint(1000, 9999999)
	if dev is None:
		dev = os.makedev(64, random.randint(1, 100))
	if uid is None:
		uid = os.getuid()
	if gid is None:
		uid2 = os.getuid()
		gid = pwd.getpwuid(uid2).pw_gid
	if atime is None:
		atime = time.time()
	if mtime is None:
		mtime = time.time()
	if ctime is None:
		ctime = time.time()
	if os.stat_float_times():
		ctime = float(ctime)
		mtime = float(mtime)
		atime = float(atime)
	else:
		ctime = int(ctime)
		atime = int(atime)
		mtime = int(mtime)
	if blksize is None:
		blksize = max(size, 2048)
	s = os.stat_result(
		(
			mode,
			inode,
			dev,
			nlinks,
			gid,
			uid,
			size,
			atime,
			mtime,
			ctime,
			),
		{
			"st_blocks": blocks,
			"st_blksize": blksize,
			"st_rdev": rdev,
			"st_flags": flags,
			}
		)
	return s
コード例 #32
0
def run():
    common.output('Generating gallery from {}...'.format(settings.local_path))

    rfn = '{}___gallery_{}.html'.format(settings.local_path, settings._start)
    fh = open(rfn, 'wb+')
    fns = common.getAllFiles(settings.local_path, recursive=False)
    features = []  # feature list
    fc = {}  # categorized filenames
    fm = {}  # meta data
    fmfn = {}  # meta filenames
    fi = len(fns)

    if fi == 0:
        common.output('No files found in {}'.format(settings.local_path))
        return

    common.output('Processing {} files...'.format(fi))

    os.stat_float_times(True)
    for fn in fns:
        lnm = os.path.basename(fn).lower()
        fty = None
        fts = os.path.getmtime(fn)

        if settings.log:
            common.output('Processing: {}'.format(lnm))

        if "camerargb" in lnm:
            fty = "rgb"
        elif "segmentation" in lnm:
            fty = "seg"
        elif "depth" in lnm:
            fty = "depth"
        elif "thermal" in lnm:
            fty = "thermal"
        elif lnm.endswith(".json"):
            while has_attribute(fm, fts):
                fts += .000001
            try:
                fm[fts] = json.loads(common.readAll(fn))
                fmfn[fts] = fn
            except:
                common.output('Invalid JSON data in {}'.format(fn), 'ERROR')
                pass
            continue
        elif lnm.endswith(".debug") or lnm.endswith(".html") or lnm.endswith(
                ".txt"):
            continue

        if fty == None:
            common.output('Unknown file type: {}, skipping'.format(fn), 'WARN')
            continue

        if fty not in features:
            features.append(fty)

        if not has_attribute(fc, fty):
            fc[fty] = {}

        while has_attribute(fc[fty], fts):
            fts += .000001

        fc[fty][os.path.getmtime(fn)] = os.path.basename(fn)

    if len(fm) > 0:
        features.append('bbox')

    total_images = 0
    for i in fc:
        if total_images > 0:
            total_images = min(total_images, len(fc[i]))
        else:
            total_images = len(fc[i])

    common.output('Generating html...')
    html = Template(common.readAll('{}index.tpl'.format(tpl_path)))

    js_static = ''
    for i in static_assets['js']:
        js_static += common.readAll('{}js/{}'.format(tpl_path, i))

    css_static = ''
    for i in static_assets['css']:
        css_static += common.readAll('{}css/{}'.format(tpl_path, i))

    fh.write(
        html.render(title='Gallery',
                    js_static=js_static,
                    css_static=css_static,
                    features=features,
                    fc=json.dumps(fc, sort_keys=True),
                    fm=json.dumps(fm, sort_keys=True),
                    fmfn=json.dumps(fmfn, sort_keys=True),
                    total_images=total_images).encode('utf-8') + b"")
    fh.close()

    common.output('Wrote {}'.format(rfn))
コード例 #33
0
    sys.path.append(app_python_lib_path)
    # PIL path: Unknown Horizons.app/Contents/Resources/lib/python3.3/lib-dynload/PIL
    sys.path.append(os.path.join(app_python_lib_path, 'lib-dynload'))

try:
    from PIL import Image
except ImportError:
    # Logging is not set up at this point.
    print('The Python Imaging Library (PIL / Pillow) package'
          ' is needed to run the atlas generator.')
    sys.exit(1)

# TODO We can probably remove the type ignore in the next release of typeshed/mypy
#      See https://github.com/python/typeshed/commit/08ac3b7742f1fd55f801ac66d7517cf60aa471d6
# make sure os.path.getmtime returns ints
os.stat_float_times(False)  # type: ignore

# make this script work both when started inside development and in the uh root dir
if not os.path.exists('content'):
    os.chdir('..')
assert os.path.exists('content'), 'Content dir not found.'

sys.path.append('.')


class DummyFife:
    use_atlases = False


import horizons.globals  # isort:skip
コード例 #34
0
import os
import shutil
import stat
import unittest

from common import TestCase
import pyuv

# Make stat return integers
os.stat_float_times(False)
pyuv.fs.stat_float_times(False)

BAD_FILE = 'test_file_bad'
TEST_FILE = 'test_file_1234'
TEST_FILE2 = 'test_file_1234_2'
TEST_LINK = 'test_file_1234_link'
TEST_DIR = 'test-dir'
TEST_DIR2 = 'test-dir_2'
BAD_DIR = 'test-dir-bad'
MAX_INT32_VALUE = 2**31 - 1
OFFSET_VALUE = MAX_INT32_VALUE if not os.name == 'nt' else 2**8 - 1


class FileTestCase(TestCase):

    TEST_FILE_CONTENT = 'test'

    def setUp(self):
        super(FileTestCase, self).setUp()
        with open(TEST_FILE, 'w') as f:
            f.write(self.TEST_FILE_CONTENT)
コード例 #35
0
def main():
    config = read_config()
    # Cool! Let's set up everything.
    connect_to_region(config.region, aws_access_key_id=access_key_id, aws_secret_access_key=secret_key)
    glacier = Layer2(aws_access_key_id=access_key_id, aws_secret_access_key=secret_key, region_name=config.region)
    vault = glacier.get_vault(config.vault_name)
    # workaround for UnicodeDecodeError
    # https://github.com/boto/boto/issues/3318
    vault.name = str(vault.name)
    print "Beginning job on " + vault.arn

    # Ah, we don't have a vault listing yet.
    if not config.ls_present:

        # No job yet? Initiate a job.
        if not config.inventory_job:
            config.inventory_job = vault.retrieve_inventory()
            config.write()
            print "Requested an inventory. This usually takes about four hours."
            terminate(0)

        # We have a job, but it's not finished.
        job = vault.get_job(config.inventory_job)
        if not job.completed:
            print "Waiting for an inventory. This usually takes about four hours."
            terminate(0)

        # Finished!
        try:
            data = json.loads(job.get_output().read())
        except ValueError:
            print "Something went wrong interpreting the data Amazon sent!"
            terminate(1)

        config.ls = {}
        for archive in data['ArchiveList']:
            config.ls[archive['ArchiveDescription']] = {
                'id': archive['ArchiveId'],
                'last_modified': int(float(time.mktime(parse_ts(archive['CreationDate']).timetuple()))),
                'size': int(archive['Size']),
                'hash': archive['SHA256TreeHash']
            }

        config.ls_present = '-'
        config.inventory_job = ''
        config.write()
        print "Imported a new inventory from Amazon."

    database = Database(
        host=db_host,
        port=db_port,
        username=db_username,
        password=db_password,
        name=db_name
    )
    print "Connected to database."
    # Let's upload!
    os.stat_float_times(False)
    try:
        i = 0
        transferred = 0
        time_begin = time.time()
        for dir in config.dirs:
            print "Syncing " + dir
            for file in database.files():
                path = dir + os.sep + file

                if not os.path.exists(path):
                    #print >> sys.stderr, "'%s' does not exist" % path
		    print "\n" + "'%s' does not exist" % path
                    continue

                # If it's a directory, then ignore it
                if not os.path.isfile(path):
                    continue

                last_modified = int(os.path.getmtime(path))
                size = os.path.getsize(path)
                updating = False
                if file in config.ls:

                    # Has it not been modified since?
                    if config.ls[file]['last_modified'] >= last_modified and config.ls[file]['size'] == size:
                        continue

                    # It's been changed... we should delete the old one
                    else:
                        vault.delete_archive(config.ls[file]['id'])
                        del config.ls[file]
                        updating = True
                        config.write()

                try:
                    print file + ": uploading... ",
                    id = vault.concurrent_create_archive_from_file(path, file)
                    config.ls[file] = {
                        'id': id,
                        'size': size,
                        'last_modified': last_modified
                    }

                    config.write()
                    i += 1
                    transferred += size
                    if updating:
                        print "updated."
                    else:
                        print "done."

                    database.update(file, id, vault)

                except UploadArchiveError:
                    print "FAILED TO UPLOAD."

    finally:
        database.close()
        elapsed = time.time() - time_begin
        print "\n" + str(i) + " files successfully uploaded."
        print "Transferred " + format_bytes(transferred) + " in " + format_time(elapsed) + " at rate of " + format_bytes(transferred / elapsed) + "/s."
        terminate(0)
コード例 #36
0
ファイル: unreal.py プロジェクト: virtualsquare/view-os
def sysStats(path, **kw):
	try:
		os.stat_float_times(False)
		return (0, 0, getattr(os, kw['cname'].rstrip('64'))(unwrap(path)))
	except OSError, (errno, strerror):
		return (-1, errno)
コード例 #37
0
#!/bin/python


# try:
#     import psyco
#     psyco.full()
# except ImportError:
#     pass
#     traceback.print_exc()



import sys, os, traceback, os.path, glob, shutil, imp, warnings, ConfigParser
os.stat_float_times(True)

if not hasattr(sys, 'frozen'):
    sys.path.insert(0, "lib")
#     sys.path.append("lib")
#     sys.path.append(r"C:\Daten\Projekte\Wikidpad\Current\lib")

    os.environ["PATH"] = os.path.dirname(os.path.abspath(sys.argv[0])) + \
            os.pathsep + os.environ["PATH"]

from Consts import CONFIG_FILENAME, CONFIG_GLOBALS_DIRNAME

# imports VERSION_TUPLE for plugins which may expect it here
from Consts import VERSION_STRING, VERSION_TUPLE

import __builtin__

# Dummies for localization
コード例 #38
0
ファイル: scan_log_dirs.py プロジェクト: xiaohelong/Prophecis
    def run(self):
        self.logger.info("GetAndPushLogLinesRunner thread running: %s" %
                         self.log_file)

        file_pos = 0
        last_modified_time = 0.0
        shutdown_start_time = 0.0

        states.register_scanner()

        try:
            os.stat_float_times(True)
            while True:
                # elapsed_time_since_last_mod = time.time() - last_modified_time
                # self.logger.debug("time_since_last_push %r", time_since_last_push)
                # elapsed_time_since_last_mod > 2.0 and
                if shutdown_start_time == 0.0 and states.is_learner_done(
                        logger=self.logger):
                    self.logger.info(
                        "Learner done, begin GetAndPushLogLinesRunner shutdown: %s",
                        self.log_file)
                    shutdown_start_time = time.time()

                if shutdown_start_time != 0.0:
                    elapsed_time_since_shutdown = (time.time() -
                                                   shutdown_start_time)
                    if elapsed_time_since_shutdown > states.DURATION_SHUTDOWN_DELAY:
                        self.logger.info(
                            "time since shutdown start: %f, Flushing GetAndPushLogLinesRunner with force: %s",
                            elapsed_time_since_shutdown, self.log_file)

                        self.td_client.flush_maybe(force=True)

                        time.sleep(states.SLEEP_BEFORE_LC_DONE)

                        break

                try:
                    latest_modified_time = stat_nfs_safe_modification_time(
                        self.log_file)

                    if last_modified_time == latest_modified_time:
                        self.logger.debug("file mod NOT changed: %f, %s",
                                          latest_modified_time, self.log_file)
                        self.td_client.flush_maybe()
                        time.sleep(.25)  # Micro sleep
                        continue
                    else:
                        self.logger.debug("file mod changed: %f, %s",
                                          latest_modified_time, self.log_file)
                        last_modified_time = latest_modified_time
                except OSError as oserr:
                    # Doesn't exist?
                    self.logger.info("OSError: %s", str(oserr))
                    time.sleep(0.25)  # Micro sleep
                    continue

                file_pos = self.processLogLines(file_pos)
                self.td_client.flush_maybe()

        finally:
            self.logger.info("Signaling GetAndPushLogLinesRunner is done: %s",
                             self.log_file)
            # if not self.running_in_foreground:
            #     # If we're running in background, assume the main thread will do the signal
            #     time.sleep(15)
            states.signal_lc_done(logger=self.logger)
            # I seem to have problems exiting directly, so, this sleep seems to help.
            # My unsubstantiated theory is that gRPC needs time to flush.
            # Note since we signaled, we won't actually wait n seconds, the
            # job monitor will delete us.
            time.sleep(states.SLEEP_BEFORE_EXIT_TIME)
            self.logger.info("Exiting GetAndPushLogLinesRunner: %s",
                             self.log_file)
コード例 #39
0
def run():
    common.output('Generating gallery from {}...'.format(settings.local_path))
    iext = ["jpg", "png", "tif", "tiff", "gif", "bmp"]
    rfn = '{}___gallery_{}.html'.format(settings.local_path, settings._start)
    match_ts = r'(_|\s|\-)(\d+\.\d+|\d+)'
    fh = open(rfn, 'wb+')

    common.output('Collecting files...')
    fns = common.getAllFiles(settings.local_path, recursive=False)

    iFeatures = []  # image feature list
    jFeatures = []  # json feature list
    pFeatures = []  # points feature list
    fc = {}  # categorized image filenames
    fm = {}  # image meta data
    jm = {}  # json/other meta data
    fmfn = {}  # meta filenames
    spoints = {}  # screenpoints meta data
    metadataCol = {}  # metadata collection bags
    fi = len(fns)

    if fi == 0:
        common.output('No files found in {}'.format(settings.local_path))
        return

    common.output('Processing {} files...'.format(fi))

    try:
        os.stat_float_times(True)
    except:
        pass

    for fn in tqdm(fns):
        lnm = os.path.basename(fn).lower()
        fty = None  # image feature type, eg: thermal, rgb, depth, etc

        ext = lnm.split(".")[-1:][0]
        isJSON = True if ext == 'json' else False
        isImage = True if ext in iext else False

        if not isJSON and not isImage:
            continue

        if settings.align == 'time':
            fts = os.path.getmtime(fn)
        elif settings.align == 'linear':
            fts = '.'.join(os.path.basename(fn).split('.')[:-1])
        elif settings.align == 'sequential':
            try:
                fts = re.findall(match_ts, lnm)[-1][-1:][0]
            except IndexError:
                common.output(
                    'Unable to find number on {}, skipping'.format(fn),
                    'WARNING')
                continue

        if settings.debug:
            common.output('Processing: {}'.format(lnm))

        fty = "default" if settings.flat_gallery == True else re.sub(
            match_ts, '', " ".join(lnm.split(".")[0:-1]))

        if isJSON:
            if 'lookup' in lnm:
                # skip lookups
                continue
            elif 'metadata' in lnm:
                # this is a collection of objects following a lookup key and misc data
                try:
                    obj = common.loadJSON(fn)
                except:
                    common.output('[0] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    obj = {}

                metadataCol[fts] = copy.deepcopy(obj)
            elif "seg" in lnm or "bbox" in lnm:
                if settings.align == 'time':
                    # HACK
                    while has_attribute(fm, fts):
                        fts += .000001

                try:
                    jsonContents = common.loadJSON(fn)
                    fmfn[fts] = fn

                    if isinstance(jsonContents, list):
                        fm[fts] = jsonContents
                    else:
                        fm[fts] = jsonContents["boundingBoxes"]
                except KeyError:
                    fm[fts] = {}
                except:
                    common.output('[1] Invalid JSON data in {}'.format(fn),
                                  'ERROR')

            elif 'screenpoints' in lnm:
                try:
                    point = common.loadJSON(fn)
                except:
                    common.output('[2] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    continue

                if fty not in pFeatures:
                    pFeatures.append(fty)
                    spoints[fty] = {}

                val = json.loads(point.get('value'))
                spoints[fty][fts] = copy.deepcopy(val)

            else:
                try:
                    obj = common.loadJSON(fn)
                except:
                    common.output('[3] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    continue

                # HACK: Skip multi value objects, they cannot be graphed
                try:
                    if obj.get('value') != None and isinstance(
                            obj.get('value'),
                            str) and (obj.get('value')[0] == '['
                                      or obj.get('value')[0] == '{'):
                        continue
                        # test = common.loadJSONS(obj['value'])
                        # obj['value'] = test
                except:
                    pass

                if fty not in jFeatures:
                    jFeatures.append(fty)
                    jm[fty] = {}

                jm[fty][fts] = copy.deepcopy(obj)

                if settings.debug:
                    common.output(
                        'Found JSON object in {} that\'s not linked to cameras, skipping'
                        .format(fn), 'WARN')

            continue

        if fty not in iFeatures:
            iFeatures.append(fty)
        if not has_attribute(fc, fty):
            fc[fty] = {}
        if settings.align == 'time':
            while has_attribute(fc[fty], fts):
                fts += .000001

            fc[fty][os.path.getmtime(fn)] = os.path.basename(fn)
        else:
            fc[fty][fts] = os.path.basename(fn)

    if len(fm) > 0:
        iFeatures.append('bbox')

    total_images = 0

    for i in fc:
        if total_images > 0:
            total_images = min(total_images, len(fc[i]))
        else:
            total_images = len(fc[i])

    common.output('Generating html...')
    html = Template(common.readAll('{}index.tpl'.format(tpl_path)))

    js_static = ''
    for i in static_assets['js']:
        js_static += '// {}\n\n{}\n'.format(
            i, common.readAll('{}js/{}'.format(tpl_path, i)))

    css_static = ''
    for i in static_assets['css']:
        css_static += '// {}\n\n{}\n'.format(
            i, common.readAll('{}css/{}'.format(tpl_path, i)))

    fh.write(
        html.render(title='Gallery [{}]'.format(settings._start),
                    js_static=js_static,
                    css_static=css_static,
                    iFeatures=iFeatures,
                    jFeatures=jFeatures,
                    pFeatures=pFeatures,
                    metadataCol=metadataCol,
                    fc=json.dumps(fc, sort_keys=True),
                    fm=json.dumps(fm, sort_keys=True),
                    jm=json.dumps(jm),
                    fmfn=json.dumps(fmfn, sort_keys=True),
                    total_images=total_images,
                    invert_bboxx='false' if settings.no_invert_bboxx ==
                    True else 'true',
                    spoints=json.dumps(spoints)).encode('utf-8') + b"")

    fh.close()

    common.output('Wrote {}'.format(rfn))
コード例 #40
0
#!/usr/bin/python
PYTHONIOENCODING = "utf-8"

import sys, os, hashlib, subprocess, shutil, datetime, signal
from textwrap import TextWrapper

assert os.stat_float_times()


def print_diag(level, value, linefeed=True):
    if level < config.verbosity:
        try:
            # If unicode, convert to UTF-8 string
            value = value.encode("utf-8", "replace")
        except:
            pass
        try:
            print str(value),
        except UnicodeEncodeError:
            # OK, failed to output a UTF-8 string so try plain ASCII
            value = value.encode("ascii", "replace")
            print str(value),
        if linefeed:
            print


CRITICAL, IMPORTANT, INFOMATION, DEBUG, EXTRA_DEBUG = range(5)

UNCHANGED, NEW, UPDATED, BAD_BBF = range(4)

defaults = {
コード例 #41
0
def mtime_from_string(mtime):
    if os.stat_float_times():
        return float(mtime)
    return int(mtime)
コード例 #42
0
ファイル: main.py プロジェクト: XinTONGXT/salome-1
def start():
    # ----- divers
    os.stat_float_times(True)

    # ----- backward compatibility
    from asrun.backward_compatibility import change_argv
    sys.argv = change_argv(sys.argv)

    # ----- initialisation
    run = AsterRun()
    magic.run = run

    # ----- retrieve options and arguments
    opts, args = run.ParseArgs()
    # init magic
    magic.set_stdout(run['stdout'])
    magic.set_stderr(run['stderr'])
    magic.init_logger(filename=run['log_progress'], debug=run['debug'])
    run.current_action = opts.action

    if run.current_action == None:
        # if symbolic link "action" -> "as_run --action"
        alias = os.path.basename(sys.argv[0])
        if alias in run.actions_info.keys():
            run.current_action = alias
        else:
            # default to 'run'
            run.current_action = 'run'
            #run.parser.error(_(u'you must specify an action'))

    # ----- get system commands
    run.DBG("Command line run on '%s'" % local_full_host,
            "using python executable '%s' :" % sys.executable, sys.argv)
    run.system = AsterSystem(run)
    run.PostConf()

    # ----- debug information
    if run['debug']:
        run.PrintConfig()
        print3(_(u'Arguments :'), repr(args))
        print3()

    # ----- start 'current_action'
    try:
        act = run.current_action
        if run.options['proxy'] is True:
            act = 'call_proxy'
        meth = run.actions_info[act]['method']
    except KeyError:
        run.Mess(
            _(u'dictionnary bad defined :') + ' actions_info',
            '<F>_PROGRAM_ERROR')
    else:
        # trap <Control+C>
        try:
            meth(run, *args)
        except KeyboardInterrupt:
            run.Mess(
                _(u"'--%s' stopped by user") % run.current_action,
                '<F>_INTERRUPT')

    run.Sortie(0)
コード例 #43
0
ファイル: test_usecase.py プロジェクト: ksunden/pubs
 def setUp(self, nsec_stat=True):
     super(CommandTestCase, self).setUp()
     os.stat_float_times(nsec_stat)
     # self.fs = fake_env.create_fake_fs([content, filebroker, conf, init_cmd, import_cmd, configobj, update], nsec_stat=nsec_stat)
     self.default_pubs_dir = os.path.expanduser('~/.pubs')
     self.default_conf_path = os.path.expanduser('~/.pubsrc')
コード例 #44
0
ファイル: test_usecase.py プロジェクト: dotlambda/pubs
 def setUp(self, nsec_stat=True):
     super(CommandTestCase, self).setUp()
     os.stat_float_times(nsec_stat)
     self.default_pubs_dir = os.path.expanduser('~/.pubs')
     self.default_conf_path = os.path.expanduser('~/.pubsrc')
コード例 #45
0
# @changelog Added python3 `os file -> stat float times` example

import os
'''
概述
    os.stat_float_times() 方法用于决定stat_result是否以float对象显示时间戳。

语法
    stat_float_times()方法语法格式如下:
    os.stat_float_times([newvalue])

参数
    newvalue -- 如果为 True, 调用 stat() 返回 floats,如果 False, 调用 stat 返回 ints。如果没有该参数返回当前设置。

返回值
        返回 True 或 False。
'''

st = os.stat('/tmp/foo.txt')
print('stat: %s' % st)

st = os.stat_float_times()
print('stat_float_times: %s' % st)

print('stat: %s' % os.stat('/tmp/foo.txt'))

###########################################################
st = os.stat_float_times()
print('stat_float_times: %s' % st)

print('stat: %s' % os.stat('/tmp/foo.txt'))
コード例 #46
0
def run(prefix, wdir, bssid, jobmode, opts):
    e = eigerclient.DEigerClient(host=opts.eiger_host)

    print "#####################################"
    print "Download %s_* to %s (hash %s jobmode %s)" % (prefix, wdir, bssid,
                                                        jobmode)
    print "--hit-extract=%s --omega-offset-by-trigger=%s" % (
        opts.hit_extract, opts.omega_offset_by_trigger)
    print "#####################################"

    try:
        move_original_files(prefix,
                            wdir)  # FIXME!! (think if this is really ok)
    except:
        print traceback.format_exc()

    last_dl_time = time.time()
    timeout = 60 * 60 if bssid else 60 * 10  # in seconds

    tmpdir = None
    #if opts.hit_extract and jobmode=="4":
    #    from yamtbx.util import get_temp_local_dir
    #    tmpdir = get_temp_local_dir("forhitsonly", min_gb=1)

    i = 0
    failed_files = []

    while True:
        print now(), "In %dth trial.." % (i + 1)
        files = check_files(e, prefix, bssid)
        files = filter(lambda x: x not in failed_files, files)

        if files:
            failed_files.extend(
                download_files(e, files, wdir, bssid, tmpdir,
                               opts.omega_offset_by_trigger))
            last_dl_time = time.time()
        elif time.time() - last_dl_time > timeout:
            print now(), "Download timeout!"
            sys.exit(1)

        sys.stdout.flush()

        missing_files = check_all_files_done(prefix, wdir)
        if bssid: missing_files = map(lambda x: bssid + x, missing_files)

        if not missing_files:
            print now(), "Download %s to %s Success!" % (prefix, wdir)

            if opts.hit_extract and jobmode == "4":
                print "STARTING HIT EXTRACT"
                os.stat_float_times(False)
                master_h5 = os.path.join(wdir, "%s_master.h5" % prefix)
                ctime_master = os.path.getctime(master_h5)
                dbfile = os.path.join(os.path.dirname(master_h5),
                                      "_spotfinder", "shika.db")
                if not opts.no_sge:
                    args = [
                        "qsub", "-q", opts.sge_q, "-cwd", "-N",
                        "hitextract_%s" % prefix, "-v",
                        'ctime_master=%d,master_h5="%s",tmpdir="%s",dbfile="%s"'
                        % (ctime_master, master_h5, tmpdir, dbfile),
                        "/blconfig/local_bss/yam/qsub_hit_extract_online.sh"
                    ]
                    print " ".join(args)
                    p = subprocess.Popen(" ".join(args),
                                         shell=True,
                                         cwd=wdir,
                                         stdout=subprocess.PIPE)
                    p.wait()
                    print p.stdout.read()
                    p = subprocess.Popen("qstat",
                                         shell=True,
                                         cwd=wdir,
                                         stdout=subprocess.PIPE)
                    p.wait()
                    print p.stdout.read()
                else:
                    args = [
                        "ssh", opts.ssh_host,
                        """\
"cd '%s'; env ctime_master=%d master_h5='%s' tmpdir='%s' dbfile='%s' bash /oys/xtal/yamtbx/bl32xu/eiger/qsub_hit_extract_online.sh" > hitextract_%s.log 2>&1 & \
""" % (wdir, ctime_master, master_h5, tmpdir, dbfile, prefix)
                    ]
                    print " ".join(args)
                    p = subprocess.Popen(" ".join(args), shell=True,
                                         cwd=wdir)  # as background job
                """
                import hit_extract_online
                os.stat_float_times(False)
                master_h5 = os.path.join(wdir, "%s_master.h5" % prefix)
                hit_extract_online.run(master_h5=master_h5,
                                       master_h5_ctime=os.path.getctime(master_h5),
                                       master_h5_in_tmp=master_h5,
                                       dbfile=os.path.join(os.path.dirname(master_h5), "_spotfinder", "shika.db"),
                                       spots_min=0)
                """
            sys.exit()
        elif set(missing_files).issubset(set(failed_files)):
            print now(
            ), "Error occurred during downloading following files. Check the logfile!"
            for f in failed_files:
                print "  %s" % f
            sys.exit(1)

        if not files:
            time.sleep(3)
        i += 1

    print now(), "Download Failed!!!!!"
    sys.exit(1)
コード例 #47
0
"""
文件权限
"""
os.access(path,mode)
# 检验权限模式

os.chmod(path,mode)
# 更改权限

os.chown(path,uid,gid)
# 更改文件所有者

"""
创建
"""
os.stat_float_times([newvalue])
# 决定stat_result是否以float对象显示时间戳

os.chflags(path,flags)
# 设置路径的标记为数字标记

os.makedirs(path[,mode])
# 递归文件夹创建函数。像mkdir(),但创建的所有intermediate-level文件夹需要包含子文件夹

os.mkfifo(path[,mode])
# 创建命名管道,mode 为数字,默认为 0666 (八进制)

os.mknod(filename[,mode=0600,device])
# 创建一个名为filename文件系统节点(文件,设备特别文件或命名pipe)

os.open(file,flags[,mode])
コード例 #48
0
#!/usr/bin/python

import os,sys

statinfo = os.stat("foo7.txt")

print statinfo

statinfo = os.stat_float_times()

print statinfo