Example #1
0
    def setUp(self):
        # The filename for our temporary xar file
        self.archive = os.path.join(tempfile.gettempdir(), "%sxar%sxar" %
                (tempfile.gettempprefix(), os.extsep))

        # A test subdoc object
        self.test_subdoc = xarfile.XarSubdoc("testdoc", subdoc)
         
        # A single file to xar up
        self.test_file = makeTestFile(dir = os.path.join(os.sep,
            tempfile.gettempprefix()))
            
        # A non-existent filename
        self.test_dne = os.path.join(tempfile.gettempprefix(), "dne" + \
                str(random.random()))

        # Create a few temporary files in a directory to xar up recursively
        # XXX: Robustify this to include multiple levels of directories
        self.test_dir = tempfile.mkdtemp()
        self.test_files = []
        for i in xrange(2):
            self.test_files.append(makeTestFile(dir = self.test_dir))
        self.test_members = self.test_files + \
                [tempfile.gettempprefix(), self.test_dir, self.test_file]
        self.test_members = map(lambda m: m.lstrip(os.sep), self.test_members)
 def setUp(self):
     # Does our output director exist?  If not, create it
     if not os.path.isdir('KEGG'):
         os.mkdir('KEGG')
     # Define some data to work with as a list of tuples:
     # (infilename, outfilename, (entry_count, ortholog_count,
     # compound_count, map_counts), pathway_image,
     # show_image_map)
     self.data = [PathwayData(os.path.join("KEGG", "ko01100.xml"),
                              tempfile.gettempprefix() + ".ko01100.kgml",
                              (3628, 1726, 1746, 149),
                              os.path.join("KEGG", "map01100.png")),
                  PathwayData(os.path.join("KEGG", "ko03070.xml"),
                              tempfile.gettempprefix() + ".ko03070.kgml",
                              (81, 72, 8, 1),
                              os.path.join("KEGG", "map03070.png"),
                              True)]
     # A list of KO IDs that we're going to use to modify pathway
     # appearance. These are KO IDs for reactions that take part in ko00020,
     # the TCA cycle
     self.ko_ids = \
         set(['ko:K00239', 'ko:K00240', 'ko:K00241', 'ko:K00242', 'ko:K00244',
              'ko:K00245', 'ko:K00246', 'ko:K00247', 'ko:K00174', 'ko:K00175',
              'ko:K00177', 'ko:K00176', 'ko:K00382', 'ko:K00164', 'ko:K00164',
              'ko:K00658', 'ko:K01902', 'ko:K01903', 'ko:K01899', 'ko:K01900',
              'ko:K01899', 'ko:K01900', 'ko:K00031', 'ko:K00030', 'ko:K00031',
              'ko:K01648', 'ko:K00234', 'ko:K00235', 'ko:K00236', 'ko:K00237',
              'ko:K01676', 'ko:K01677', 'ko:K01678', 'ko:K01679', 'ko:K01681',
              'ko:K01682', 'ko:K01681', 'ko:K01682', 'ko:K01647', 'ko:K00025',
              'ko:K00026', 'ko:K00024', 'ko:K01958', 'ko:K01959', 'ko:K01960',
              'ko:K00163', 'ko:K00161', 'ko:K00162', 'ko:K00163', 'ko:K00161',
              'ko:K00162', 'ko:K00382', 'ko:K00627', 'ko:K00169', 'ko:K00170',
              'ko:K00172', 'ko:K00171', 'ko:K01643', 'ko:K01644', 'ko:K01646',
              'ko:K01610', 'ko:K01596'])
Example #3
0
def empty_temp_dir():
    """ Create a sub directory in the temp directory for use in tests
    """
    import tempfile
    d = catalog.default_dir()
    for i in range(10000):
        new_d = os.path.join(d,tempfile.gettempprefix()[1:-1]+`i`)
        if not os.path.exists(new_d):
            os.mkdir(new_d)
            break
    return new_d
Example #4
0
	def CreateTempFile(FilePath):
		TempPath = os.path.join(
		  tempfile.gettempdir(),
		  tempfile.gettempprefix() + "-" + os.path.basename(FilePath) + ".tmp"
		  )
		TempFile = open(TempPath, 'w')
		return TempFile
Example #5
0
    def safe_write_po(self, catalog, filepath, **kwargs):
        """
        Safely write a PO file
        
        This means that the PO file is firstly created in a temporary file, so 
        if it fails it does not overwrite the previous one, if success the 
        temporary file is moved over the previous one.
        
        Some part of code have been stealed from babel.messages.frontend
        """
        tmpname = os.path.join(os.path.dirname(filepath), tempfile.gettempprefix() + os.path.basename(filepath))
        tmpfile = open(tmpname, 'w')
        try:
            try:
                write_po(tmpfile, catalog, **kwargs)
            finally:
                tmpfile.close()
        except:
            os.remove(tmpname)
            raise

        try:
            os.rename(tmpname, filepath)
        except OSError:
            # We're probably on Windows, which doesn't support atomic
            # renames, at least not through Python
            # If the error is in fact due to a permissions problem, that
            # same error is going to be raised from one of the following
            # operations
            os.remove(filepath)
            shutil.copy(tmpname, filepath)
            os.remove(tmpname)
Example #6
0
    def draw_image(self, x, y, im, bbox):
        filename = os.path.join (tempfile.gettempdir(),
                                 tempfile.gettempprefix() + '.png'
                                 )

        verbose.report ('Writing image file for include: %s' % filename)
        # im.write_png() accepts a filename, not file object, would be
        # good to avoid using files and write to mem with StringIO

        # JDH: it *would* be good, but I don't know how to do this
        # since libpng seems to want a FILE* and StringIO doesn't seem
        # to provide one.  I suspect there is a way, but I don't know
        # it

        im.flipud_out()

        h,w = im.get_size_out()
        y = self.height-y-h
        im.write_png(filename)

	imfile = file (filename, 'r')
	image64 = base64.b64encode (imfile.read())
	imfile.close()
	os.remove(filename)
        lines = [image64[i:i+76] for i in range(0, len(image64), 76)]

        self._svgwriter.write (
            '<image x="%f" y="%f" width="%f" height="%f" '
            'xlink:href="data:image/png;base64,\n%s" />\n'
            % (x, y, w+1, h+1, '\n'.join(lines))
            )

         # unflip
        im.flipud_out()
Example #7
0
def transform_command_with_value(command, value, notification_timestamp):
    python_download_script = 'zk_download_data.py'

    if len(value) > _LONG_VALUE_THRESHOLD:
        # If the value is too long (serverset is too large), OSError may be thrown.
        # Instead of passing it in command line, write to a temp file and
        # let zk_download_data read from it.
        value = value.replace("\n", "").replace("\r", "")
        md5digest = zk_util.get_md5_digest(value)
        tmp_filename = 'zk_update_largefile_' + md5digest + '_' + str(notification_timestamp)
        tmp_dir = tempfile.gettempprefix()
        tmp_filepath = os.path.join('/', tmp_dir, tmp_filename)

        log.info("This is a long value, write it to temp file %s", tmp_filepath)
        try:
            with open(tmp_filepath, 'w') as f:
                f.write(value + '\n' + md5digest)
        except Exception as e:
            log.exception(
                "%s: Failed to generate temp file %s for storing large size values"
                % (e.message, tmp_filepath))
            return (None, None)
        finally:
            f.close()
        transformed_command = command.replace(
            python_download_script, "%s -l %s" % (
                python_download_script, tmp_filepath))
        return transformed_command, tmp_filepath
    else:
        transformed_command = command.replace(
            python_download_script, "%s -v '%s'" % (
                python_download_script, value))
        return transformed_command, None
Example #8
0
File: Babel.py Project: Tapyr/tapyr
 def _update (self, lang, po_file_n, cmd, pot_file, pot_file_n) :
     print ("Update catalog `%s` based on `%s`" % (po_file_n, pot_file_n))
     po_file = TFL.Babel.PO_File.load (po_file_n, locale = lang)
     po_file.update                   (pot_file, cmd.no_fuzzy)
     tmpname = os.path.join\
         ( os.path.dirname (po_file_n)
         , "%s%s.po" % (tempfile.gettempprefix (), lang)
         )
     try :
         po_file.save \
             ( tmpname
             , ignore_obsolete  = cmd.ignore_obsolete
             , include_previous = cmd.previous
             , sort             = cmd.sort
             )
     except :
         os.remove (tmpname)
         raise
     try :
         os.rename (tmpname, po_file_n)
     except OSError:
         # We're probably on Windows, which doesn't support atomic
         # renames, at least not through Python
         # If the error is in fact due to a permissions problem, that
         # same error is going to be raised from one of the following
         # operations
         os.remove   (po_file_n)
         shutil.copy (tmpname, po_file_n)
         os.remove   (tmpname)
Example #9
0
def gen_args ( args, infile_path, outfile ) :
    """
    Return the argument list generated from 'args' and the infile path
    requested.

    Arguments :
        args  ( string )
            Keyword or arguments to use in the call of Consense, excluding
            infile and outfile arguments.
        infile_path  ( string )
            Input alignment file path.
        outfile  ( string )
            Consensus tree output file.

    Returns :
        list
            List of arguments (excluding binary file) to call Consense.
    """
    if ( outfile ) :
        outfile_path = get_abspath(outfile)
    else :
        # Output files will be saved in temporary files to retrieve the
        # consensus tree
        outfile_path = os.path.join(tempfile.gettempdir(),
                                    tempfile.gettempprefix() + \
                                        next(tempfile._get_candidate_names()))
    # Create full command line list
    argument_list = [infile_path, outfile_path]
    return ( argument_list )
Example #10
0
 def get_path_for_file(self, filename):
     """
     given the filename, get the path for the temporary file
     """
     prefix = tempfile.gettempprefix()
     tempdir = tempfile.gettempdir()
     return '%s/%s%s'% (tempdir, prefix, filename)
Example #11
0
	def ontimer(self):
		
		if not self.afterOpened:
			return
		
		while 1:
			eventType,data = self.camera.waitForEvent(timeout=0)
			if eventType == constants.GPEvent.TIMEOUT:
				return
			if self.hasCaptureEvents is None:
				if (eventType == constants.GPEvent.UNKNOWN and data.startswith('PTP Property')) or eventType == constants.GPEvent.FILE_ADDED or eventType == constants.GPEvent.CAPTURE_COMPLETE: 
					### TEMP: if we get any valid event we guess that the camera supports PTP end capture events -- not ideal!  
					self.hasCaptureEvents = True
			if not eventType == constants.GPEvent.UNKNOWN and data.startswith('PTP Property'):
				# log everything except timeouts and PTP property change events
				log.debug('%s %r'%(EVENTTYPE_TO_NAME[eventType],data))  
			if eventType == constants.GPEvent.UNKNOWN and data.startswith('PTP Property'):
				changed = self.configurationFromCamera()
				if not changed:
					continue
				changedProperties = [self.getPropertyByName(widget['name']) for widget in changed]
				event = interface.PropertiesChangedEvent(self,changedProperties)
				self.propertiesChanged.emit(event)
			elif eventType == constants.GPEvent.FILE_ADDED:
				path,fn = data
				tempFn = os.path.join(tempfile.gettempdir(),tempfile.gettempprefix()+fn)
				self.camera.downloadFile(path,fn,tempFn)
				self.capturedAuxFiles.append(tempFn)
			elif eventType == constants.GPEvent.CAPTURE_COMPLETE:
				e = interface.CaptureCompleteEvent(self,data=self.capturedData,auxFiles=self.capturedAuxFiles)
				self.capturedAuxFiles = []
				self.captureComplete.emit(e)
Example #12
0
 def __init__(self, vim):
     self.vim = vim
     self.pvs_command    = "pvs-studio-analyzer analyze"
     self.ans_cmmmand    = "plog-converter -a %s -t errorfile PVS-Studio.log"
     self.tidy_command   = 'clang-tidy -format-style=file -p {} -checks={} {}'
     self.oclint_command = 'oclint -p {} {}'
     self.tempFile = join(tempfile.gettempdir(), tempfile.gettempprefix() + "_analysis")
     self.buildDir = ''
Example #13
0
 def delete_file(self, filename):
     """
     remove the tempfile
     """
     prefix = tempfile.gettempprefix()
     tempdir = tempfile.gettempdir()
     filename = '%s/%s%s'% (tempdir, prefix, filename)
     os.remove(filename)
Example #14
0
def get_tempfile_path () :
    """
    Returns :
        string
            Path of a new temporary file name (without creating it).
    """
    return ( os.path.join(tempfile.gettempdir(), tempfile.gettempprefix() +\
                              next(tempfile._get_candidate_names())) )
Example #15
0
    def _setup_scratch_area(self):
	self.scratch_dir = "%s/%sipkg" % (tempfile.gettempdir(),
					   tempfile.gettempprefix())
	self.file_dir = "%s/files" % (self.scratch_dir)
	self.meta_dir = "%s/meta" % (self.scratch_dir)

	os.mkdir(self.scratch_dir)
	os.mkdir(self.file_dir)
	os.mkdir(self.meta_dir)
Example #16
0
 def test_name(self):
     f = TextTempFile()
     self.assertIsInstance(f.name, unicode, "file name not unicode string")
     name_prefix = os.path.join(tempfile.gettempdir(),
             tempfile.gettempprefix())
     self.assertTrue(f.name.startswith(name_prefix),
             "file name does not start with '{}'".format(name_prefix))
     with self.assertRaises(AttributeError):
         f.name = "new_name"
Example #17
0
    def draw_image(self, x, y, im, bbox):
        trans = [1,0,0,1,0,0]
        transstr = ''
        if rcParams['svg.image_noscale']:
            trans = list(im.get_matrix())
            if im.get_interpolation() != 0:
                trans[4] += trans[0]
                trans[5] += trans[3]
            trans[5] = -trans[5]
            transstr = 'transform="matrix(%s %s %s %s %s %s)" '%tuple(trans)
            assert trans[1] == 0
            assert trans[2] == 0
            numrows,numcols = im.get_size()
            im.reset_matrix()
            im.set_interpolation(0)
            im.resize(numcols, numrows)

        h,w = im.get_size_out()

        if rcParams['svg.image_inline']:
            filename = os.path.join (tempfile.gettempdir(),
                                    tempfile.gettempprefix() + '.png'
                                    )

            verbose.report ('Writing temporary image file for inlining: %s' % filename)
            # im.write_png() accepts a filename, not file object, would be
            # good to avoid using files and write to mem with StringIO

            # JDH: it *would* be good, but I don't know how to do this
            # since libpng seems to want a FILE* and StringIO doesn't seem
            # to provide one.  I suspect there is a way, but I don't know
            # it

            im.flipud_out()
            im.write_png(filename)
            im.flipud_out()

            imfile = file (filename, 'rb')
            image64 = base64.encodestring (imfile.read())
            imfile.close()
            os.remove(filename)
            hrefstr = 'data:image/png;base64,\n' + image64

        else:
            self._imaged[self.basename] = self._imaged.get(self.basename,0) + 1
            filename = '%s.image%d.png'%(self.basename, self._imaged[self.basename])
            verbose.report( 'Writing image file for inclusion: %s' % filename)
            im.flipud_out()
            im.write_png(filename)
            im.flipud_out()
            hrefstr = filename

        self._svgwriter.write (
            '<image x="%s" y="%s" width="%s" height="%s" '
            'xlink:href="%s" %s/>\n'%(x/trans[0], (self.height-y)/trans[3]-h, w, h, hrefstr, transstr)
            )
Example #18
0
 def get_mimetype(self, filename):
     """
     use python-magic to guess the mimetype or use application/octet-stream
     if no guess
     """
     prefix = tempfile.gettempprefix()
     tempdir = tempfile.gettempdir()
     mimetype = magic.from_file('%s/%s%s'% \
                 (tempdir,prefix,filename),mime=True)
     return mimetype or 'application/octet-stream'
Example #19
0
 def test_clear_disk(self):
     import tempfile
     basename = tempfile.gettempprefix()
     instance = self._makeOne(basename)
     instance.index_doc(1, (1, 2, 3, 4))
     instance.clear()
     self.assertEqual(0, instance.count((0, 0, 5, 5)))
     import os
     os.unlink('%s.%s' % (basename, 'dat'))
     os.unlink('%s.%s' % (basename, 'idx'))
Example #20
0
 def cleanup_temporary_files(self):
     """
     cleanup_temporary_files will remove all the files created by the show
     method.
     """
     temp_dir = tempfile.gettempdir()
     file_list = os.listdir(temp_dir)
     file_names = filter(lambda s: s.startswith(tempfile.gettempprefix()),
                         file_list)
     map(lambda fn: os.remove(os.path.join(temp_dir, fn)), file_names)
Example #21
0
def gettempprefix():
    """
    Returns:
        `fsnative`

    Like :func:`python3:tempfile.gettempprefix`, but always returns a
    `fsnative` path
    """

    return path2fsn(tempfile.gettempprefix())
Example #22
0
def create_temp_file_name(suffix, prefix=None, dir=None):
    """small convenience function that creates temporal file.

    This function is a wrapper aroung Python built-in function - tempfile.mkstemp
    """
    if not prefix:
        prefix = tempfile.gettempprefix()
    fd, name = tempfile.mkstemp( suffix=suffix, prefix=prefix, dir=dir )
    file_obj = os.fdopen( fd )
    file_obj.close()
    return name
Example #23
0
def classify_vector(svm_predict, svm_model, vector, output_dir):
    filepath = output_dir + tempfile.gettempprefix()
    fi = open(filepath, 'w')
    _write_line(fi, '0', vector)
    fi.close()
    result_file = filepath + '.output'
    run_classification(svm_predict, svm_model, filepath, result_file)
    res = load_svm_results(result_file)
    os.remove(filepath)
    os.remove(result_file)
    return res
Example #24
0
    def test_sane_template(self):
        # gettempprefix returns a nonempty prefix string
        p = tempfile.gettempprefix()

        self.assertIsInstance(p, str)
        self.assertGreater(len(p), 0)

        pb = tempfile.gettempprefixb()

        self.assertIsInstance(pb, bytes)
        self.assertGreater(len(pb), 0)
 def mkstemp(suffix = "", prefix = None, dir = None, text = 0):
   if prefix is None:
     prefix = tempfile.gettempprefix()
   fileName = tempfile.mktemp(suffix)
   if dir is None:
     dir = os.path.dirname(fileName)
   newFileName = os.path.join(dir, prefix + os.path.basename(fileName))
   if text:
     flags = "w"
   else:
     flags = "wb"
   return (file(newFileName, flags), newFileName)
Example #26
0
def print_marca_agua(c, logo, medidas):
    x, y, ancho, alto = medidas
    i = Image.open(logo)
    color_pagina = (255, 255, 255, 255)  # Por defecto blanco inmaculado.
    iwmark = watermark(Image.new("RGBA", i.size, color_pagina), i, (0, 0), 0.1)
    import tempfile

    ext = logo.split(".")[-1]
    # Se supone que reportlab puede pintar imágenes PIL, pero me ha cascado.
    tmpim = os.path.join(tempfile.gettempdir(), tempfile.gettempprefix() + "." + ext)
    iwmark.save(tmpim)
    c.drawImage(tmpim, x, y - iwmark.size[1], ancho, alto)
Example #27
0
def Calcfile(aoalist, xfoutput=tempfile.gettempprefix() + '/xfoutput.dat', renum=200000):
    """Export Calculation command file"""
    temporarydict = tempfile.gettempprefix()
    print(temporarydict)
    cfile = '/' + temporarydict + '/xfoilcommand.dat'

    xfcommand = open(cfile, 'w')
    xfcommand.write('plop\ng\n\n')
    ##define Re-Num
    xfcommand.write('OPER\nRE\n' + str(renum) + '\n')
    xfcommand.write('VISC\n')
    xfcommand.write('PACC\n' + xfoutput + '\n' + xfoutput + '2.dat\n')
    if isinstance(aoalist, list):
        for aoa in aoalist:
            xfcommand.write('ALFA\n')
            xfcommand.write(str(aoa) + '\n')
    else:
        xfcommand.write('ALFA\n')
        xfcommand.write(str(aoalist) + '\n')
    xfcommand.write('\nquit')
    xfcommand.close()
    return cfile
Example #28
0
def open_shared_mem(name):
    if sys.platform == 'win32':
        tfile = tempfile.TemporaryFile(prefix=name,suffix="tmp")
        fno = tfile.fileno()
        return mmap.mmap(fno, 1024, tagname='shared_memory')
    else:
        tfile = file(os.path.join(tempfile.gettempdir(), tempfile.gettempprefix() + name + "SharedMemory"), 'w+b')
        # ensure at least 1024 bytes in file
        tfile.write("*")
        tfile.seek(1024)
        tfile.write(" ")
        tfile.flush()
        fno = tfile.fileno()
        return mmap.mmap(fno, 1024)
Example #29
0
 def store_file(self, fieldstorage):
     """
     Given a filehandle, store the file and return an identifier, in this
     case the original filename
     """
     fileno, filename = tempfile.mkstemp( \
                     suffix='%s-%s'% (uuid.uuid4().hex,fieldstorage.filename))
     filehandle = os.fdopen(fileno, 'wb')
     filehandle.write(fieldstorage.value)
     filehandle.close()
     prefix = tempfile.gettempprefix()
     tempdir = tempfile.gettempdir()
     filename = ''.join( filename[(len(tempdir)+len(prefix)+1):] )
     return filename
Example #30
0
    def process_filename(self):
        """Return base filename portion of self.url"""
        # Isolate the pure name
        filename = self.url
        # Filename may be either a local file path or a URL
        if '/' in filename:
            filename = filename.split('/')[-1]

        if '\\' in filename:
            filename = filename.split('\\')[-1]

        if self.urlEncoding:
            filename = urllib.unquote(filename.decode(self.urlEncoding))

        if self.useFilename:
            filename = self.useFilename
        if not self.keepFilename:
            pywikibot.output(
                u"The filename on the target wiki will default to: %s"
                % filename)
            ok = False
            ext = os.path.splitext(filename)[1].lower().strip('.')
            # FIXME: these 2 belong somewhere else, presumably in family
            forbidden = '/' # to be extended
            allowed_formats = (u'gif', u'jpg', u'jpeg', u'mid', u'midi',
                               u'ogg', u'png', u'svg', u'xcf', u'djvu',
                               u'ogv', u'oga', u'tif', u'tiff')
            # ask until it's valid
            ok = True
            filename = tempfile.gettempprefix()

        # MediaWiki doesn't allow spaces in the file name.
        # Replace them here to avoid an extra confirmation form
        filename = filename.replace(' ', '_')
        # A proper description for the submission.
        pywikibot.output(u"The suggested description is:")
        pywikibot.output(self.description)
        if self.verifyDescription:
            newDescription = u''
            choice = pywikibot.inputChoice(
                u'Do you want to change this description?',
                ['Yes', 'No'], ['y', 'N'], 'n')
            if choice == 'y':
                import editarticle
                editor = editarticle.TextEditor()
                newDescription = editor.edit(self.description)
            # if user saved / didn't press Cancel
            if newDescription:
                self.description = newDescription
        return filename
Example #31
0
import tempfile


def one():
    file = tempfile.mkstemp()
    with open(file[1], 'w+') as f:
        f.write("This is a test")
        f.seek(0)
        print(f.read())


print(tempfile.gettempdir())
print(tempfile.gettempdirb())
print(tempfile.gettempprefix())
print(tempfile.gettempprefixb())
print(tempfile.tempdir)
Example #32
0
    def run(self):
        check_status = {}
        po_files = []
        if not self.output_file:
            if self.locale:
                po_files.append(
                    (self.locale,
                     os.path.join(self.output_dir, self.locale, 'LC_MESSAGES',
                                  self.domain + '.po')))
            else:
                for locale in os.listdir(self.output_dir):
                    po_file = os.path.join(self.output_dir, locale,
                                           'LC_MESSAGES', self.domain + '.po')
                    if os.path.exists(po_file):
                        po_files.append((locale, po_file))
        else:
            po_files.append((self.locale, self.output_file))

        if not po_files:
            raise OptionError('no message catalogs found')

        domain = self.domain
        if not domain:
            domain = os.path.splitext(os.path.basename(self.input_file))[0]

        with open(self.input_file, 'rb') as infile:
            template = read_po(infile)

        for locale, filename in po_files:
            if self.init_missing and not os.path.exists(filename):
                if self.check:
                    check_status[filename] = False
                    continue
                self.log.info('creating catalog %s based on %s', filename,
                              self.input_file)

                with open(self.input_file, 'rb') as infile:
                    # Although reading from the catalog template, read_po must
                    # be fed the locale in order to correctly calculate plurals
                    catalog = read_po(infile, locale=self.locale)

                catalog.locale = self._locale
                catalog.revision_date = datetime.now(LOCALTZ)
                catalog.fuzzy = False

                with open(filename, 'wb') as outfile:
                    write_po(outfile, catalog)

            self.log.info('updating catalog %s based on %s', filename,
                          self.input_file)
            with open(filename, 'rb') as infile:
                catalog = read_po(infile, locale=locale, domain=domain)

            catalog.update(template,
                           self.no_fuzzy_matching,
                           update_header_comment=self.update_header_comment)

            tmpname = os.path.join(
                os.path.dirname(filename),
                tempfile.gettempprefix() + os.path.basename(filename))
            try:
                with open(tmpname, 'wb') as tmpfile:
                    write_po(tmpfile,
                             catalog,
                             omit_header=self.omit_header,
                             ignore_obsolete=self.ignore_obsolete,
                             include_previous=self.previous,
                             width=self.width)
            except:
                os.remove(tmpname)
                raise

            if self.check:
                with open(filename, "rb") as origfile:
                    original_catalog = read_po(origfile)
                with open(tmpname, "rb") as newfile:
                    updated_catalog = read_po(newfile)
                updated_catalog.revision_date = original_catalog.revision_date
                check_status[filename] = updated_catalog.is_identical(
                    original_catalog)
                os.remove(tmpname)
                continue

            try:
                os.rename(tmpname, filename)
            except OSError:
                # We're probably on Windows, which doesn't support atomic
                # renames, at least not through Python
                # If the error is in fact due to a permissions problem, that
                # same error is going to be raised from one of the following
                # operations
                os.remove(filename)
                shutil.copy(tmpname, filename)
                os.remove(tmpname)

        if self.check:
            for filename, up_to_date in check_status.items():
                if up_to_date:
                    self.log.info('Catalog %s is up to date.', filename)
                else:
                    self.log.warning('Catalog %s is out of date.', filename)
            if not all(check_status.values()):
                raise BaseError("Some catalogs are out of date.")
            else:
                self.log.info("All the catalogs are up-to-date.")
            return
Example #33
0
NETWORKX = os.environ.get("NETWORKX", "ON")
try:
    import gscoordinator

    COORDINATOR_HOME = Path(gscoordinator.__file__).parent.parent.absolute()
except ModuleNotFoundError:
    COORDINATOR_HOME = Path(
        os.path.join(os.path.dirname(__file__), "..", "coordinator"))

TEMPLATE_DIR = COORDINATOR_HOME / "gscoordinator" / "template"
BUILTIN_APP_RESOURCE_PATH = (COORDINATOR_HOME / "gscoordinator" / "builtin" /
                             "app" / "builtin_app.gar")
CMAKELISTS_TEMPLATE = TEMPLATE_DIR / "CMakeLists.template"
GRAPHSCOPE_HOME = (os.environ["GRAPHSCOPE_HOME"]
                   if "GRAPHSCOPE_HOME" in os.environ else "/opt/graphscope")
WORKSPACE = Path(os.path.join("/", tempfile.gettempprefix(), "gs", "builtin"))


def cmake_and_make(cmake_commands):
    try:
        cmake_process = subprocess.run(cmake_commands,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       check=True)
        make_process = subprocess.run(
            [shutil.which("make"), "-j4"],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            check=True,
        )
        shutil.rmtree("CMakeFiles")
Example #34
0
def simulate_in_dymola(heaPum, data, tableName, tableFileName):
    """ Evaluate the heat pump performance from the model in Dymola.

    :param heaPum: Heat pump model (object).
    :param data: Reference performance data (object).
    :param tableName: Name of the combiTimeTable.
    :param tableFileName: Name of the text file containing the combiTimeTable.

    :return: Performance data of the modeled heat pump (object).

    .. note:: Performance data from the model is evaluated at the same
              operating conditions (inlet water temperatures and mass flow
              rates at the source and load sides) as in the reference data.

    """
    import buildingspy.simulate.Simulator as si
    from buildingspy.io.outputfile import Reader
    from scipy.interpolate import interp1d
    from builtins import str
    import getpass
    import os
    import tempfile
    from pathlib import Path

    # Find absolute path to buildings library
    packagePath = os.path.normpath(
        os.path.join(os.path.normpath(os.path.dirname(__file__)),
                     '..', '..', '..', '..', '..', '..'))

    # Create temporary directory for simulation files
    modelica_root = os.environ['MODELICAPATH']
    dirPrefix = tempfile.gettempprefix()
    dirSim = dirPrefix + '-' + 'HeatPumpCalibration' + '-' + getpass.getuser()
    tmpDir = os.path.abspath(Path.cwd() / modelica_root / 'Temp' / dirSim)
    print('Temporay working directory is: ' + tmpDir)


    # Set parameters for simulation in Dymola
    calModelPath = heaPum.modelicaCalibrationModelPath()
    s = si.Simulator(calModelPath,
                     'dymola',
                     outputDirectory=tmpDir,
                     packagePath=packagePath)
    s = heaPum.set_ModelicaParameters(s)
    m1_flow_nominal = min(data.flowSource)
    m2_flow_nominal = min(data.flowLoad)
    tableFilePath = \
        str(os.path.join(tmpDir, tableFileName).replace(os.sep, '/'))
    s.addParameters({'m1_flow_nominal': m1_flow_nominal,
                     'm2_flow_nominal': m2_flow_nominal,
                     'calDat.fileName': tableFilePath})

    # Write CombiTimeTable for dymola
    data.write_modelica_combiTimeTable(tableName, tmpDir,
                                       tableFileName, heaPum.CoolingMode)

    # Simulation parameters
    s.setStopTime(len(data.EWT_Source))
    s.setSolver('dassl')
    # Kill the process if it does not finish in 2 minutes
    s.setTimeOut(500)
    s.showProgressBar(False)
    s.printModelAndTime()
#    s.showGUI(show=True)
#    s.exitSimulator(exitAfterSimulation=False)

    print('Now we simulate')
    s.simulate()

    # Read results
    modelName = heaPum.modelicaModelName()
    ofr = Reader(os.path.join(tmpDir, modelName), 'dymola')
    (time1, QCon) = ofr.values('heaPum.QCon_flow')
    (time1, QEva) = ofr.values('heaPum.QEva_flow')
    (time1, P) = ofr.values('heaPum.P')

    t = [float(i) + 0.5 for i in range(len(data.EWT_Source))]

    f_P = interp1d(time1, P)
    P = f_P(t)
    f_QCon = interp1d(time1, QCon)
    QCon = f_QCon(t)
    f_QEva = interp1d(time1, QEva)
    QEva = f_QEva(t)

#    # Clean up
#    shutil.rmtree('calibrationModel')
    if heaPum.CoolingMode:
        Capacity = -QEva
        HR = QCon
    else:
        Capacity = QCon
        HR = -QEva
    dymRes = SimulationResults(data.EWT_Source,
                               data.EWT_Load,
                               data.flowSource,
                               data.flowLoad,
                               Capacity,
                               HR,
                               P,
                               'Modelica')
    return dymRes
Example #35
0
    def update_cache(self, clipboard, event, clipboard_manager):

        data_tuple = clipboard_manager.clipboard_changed(clipboard, event)

        if data_tuple is not None:
            target, content, source_app, source_icon, created, protected, thumbnail, file_extension, content_type = data_tuple

            temp_filename = next(
                tempfile._get_candidate_names()) + tempfile.gettempprefix()
            type = content_type
            cache_filetype = file_extension
            temp_cache_uri = os.path.join(self.cache_filedir,
                                          temp_filename + cache_filetype)
            temp_cache_thumbnail_uri = os.path.join(
                self.cache_filedir, temp_filename + "-thumb" + ".png")
            if 'Workspace' in source_app:
                source = 'screenshot'
            elif 'files' in content_type:
                source = 'file-manager'
            else:
                source = 'application'

            # save content in temp
            file = open(temp_cache_uri, "wb")
            file.write(content.get_data())
            file.close()

            # get checksum value
            checksum = self.get_checksum(open(temp_cache_uri, 'rb').read())

            # rename cache file using its checksum value
            cache_file = checksum + "." + cache_filetype
            cache_uri = self.cache_filedir + '/' + cache_file
            os.renames(temp_cache_uri, cache_uri)

            if "yes" in protected and type == "plaintext":
                cache_file = self.encrypt_file(cache_uri)

            # save thumbnail if available
            if thumbnail is not None:
                file = open(temp_cache_thumbnail_uri, "wb")
                file.write(thumbnail.get_data())
                file.close()
                cache_thumbnail_file = checksum + "-thumb" + ".png"
                cache_thumbnail_uri = self.cache_filedir + '/' + cache_thumbnail_file
                os.renames(temp_cache_thumbnail_uri, cache_thumbnail_uri)

            from datetime import datetime
            if "http" in type:
                # GLib.idle_add(self.app.utils.get_web_favicon, content.get_text(), self.icon_cache_filedir, cache_uri)
                # with open(cache_uri, "a") as file:
                #     file.write("\n"+self.app.utils.get_web_title(content.get_text()))
                url = content.get_text()
                self.app.utils.get_web_data(url, cache_uri,
                                            self.icon_cache_filedir, checksum)

            if "mail" in type:
                url = "https://" + content.get_text().split("@")[-1]
                self.app.utils.get_web_data(url, cache_uri,
                                            self.icon_cache_filedir, checksum)

            # fallback for source_icon
            # save a copy of the icon in case the app is uninstalled and no icon to use
            icon_theme = self.app.icon_theme
            try:
                if source_icon.find("/") != -1:
                    pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(
                        source_icon, 64, 64,
                        True)  # load from path for non-theme icons
                else:
                    pixbuf = icon_theme.load_icon(
                        source_icon, 64, Gtk.IconLookupFlags.USE_BUILTIN)
            except:
                pixbuf = icon_theme.load_icon("image-missing", 64,
                                              Gtk.IconLookupFlags.USE_BUILTIN)

            source_icon_cache = os.path.join(
                self.icon_cache_filedir,
                source_app.replace(" ", ".").lower() + ".png")
            pixbuf.savev(source_icon_cache, 'png', [],
                         [])  # save to icon cache folder

            record = (str(target), created, source, source_app, source_icon,
                      cache_file, type, protected)

            clips_view = self.main_window.clips_view

            # print(datetime.now(), "start populating clip content")

            # check duplicates using checksum
            if len(self.check_duplicate(checksum)) == 0:
                self.add_record(record)  # add to database
                new_record = self.select_record(
                    self.db_cursor.lastrowid)[0]  # prepare record for gui
                clips_view.new_clip(new_record)  # add to gui
            else:
                self.update_cache_on_recopy(checksum)

            self.check_total_clips()
Example #36
0
 def _get_temp_path(self):
     result = os.path.join(tempfile.gettempdir(), tempfile.gettempprefix())
     n = datetime.datetime.now()
     result = result + f'-{n.year % 100:2}{n.month:02}{n.day:02}-{n.hour:02}{n.minute:02}{n.second:02}-' + \
              f'{n.microsecond:06}'
     return result
Example #37
0
 def ObtenerDirectorio(self):
     return tempfile.gettempdir() + '/' + tempfile.gettempprefix()
Example #38
0
    def update(self, changed = 0):
        node = self.node
        oldrp = self.rp
        if node.GetType() != 'ext' or \
           node.GetChannelType() != 'RealPix':
            # not a RealPix node anymore
##             if hasattr(node, 'expanded'):
##                 import HierarchyView
##                 HierarchyView.collapsenode(node)
            del node.slideshow
            self.destroy()
            # XXX what to do with node.tmpfile?
            if hasattr(node, 'tmpfile'):
                try:
                    os.unlink(node.tmpfile)
                except:
                    pass
                del node.tmpfile
            return
        if oldrp is None:
            return
        ctx = node.GetContext()
        url = MMAttrdefs.getattr(node, 'file')
        ourl = url
        if url:
            url = ctx.findurl(url)
            utype, host, path, params, query, tag = urlparse.urlparse(url)
            url = urlparse.urlunparse((utype, host, path, params, query, ''))
        if url != self.url:
            # different URL specified
            try:
                if not url:
                    raise IOError
                fp = MMurl.urlopen(url)
            except IOError:
                # new file does not exist, keep content
                rp = self.rp
            else:
                # new file exists, use it
                import realsupport
                self.filename = url
                rp = realsupport.RPParser(url, baseurl = ourl, printfunc = self.printfunc)
                try:
                    rp.feed(fp.read())
                    rp.close()
                except:
                    import sys
                    tp, vl, tb = sys.exc_info()
                    msg = 'Error reading RealPix file %s:\n%s' % (url, vl)
                    windowinterface.showmessage(msg, mtype = 'warning')
                    self.node.set_infoicon('error', msg)
                    rp = DummyRP()
                fp.close()
            if rp is not self.rp and hasattr(node, 'tmpfile'):
                # new content, delete temp file
##                 windowinterface.showmessage('You have edited the content of the slideshow file in node %s on channel %s' % (MMAttrdefs.getattr(node, 'name') or '<unnamed>', node.GetChannelName()), mtype = 'warning')
                choice = self.asksavechanges()
                if choice == 2:
                    # cancel
                    node.SetAttr('file', self.url)
                    self.update()
                    return
                if choice == 0:
                    # yes, save file
                    node.SetAttr('file', self.url)
                    writenode(node)
                    node.SetAttr('file', url)
                else:
                    # no, discard changes
                    try:
                        os.unlink(node.tmpfile)
                    except:
                        pass
                    del node.tmpfile
            self.url = url
            self.rp = rp
        rp = self.rp
        attrdict = node.GetAttrDict()
        bitrate = MMAttrdefs.getattr(node, 'bitrate')
        if bitrate != rp.bitrate:
            if rp is oldrp:
                rp.bitrate = bitrate
                changed = 1
            else:
                attrdict['bitrate'] = rp.bitrate
        size = MMAttrdefs.getattr(node, 'size')
        if size != (rp.width, rp.height):
            if rp is oldrp:
                rp.width, rp.height = size
                changed = 1
            else:
                if rp.width == 0 and rp.height == 0:
                    rp.width, rp.height = node.GetChannel().get('base_winoff',(0,0,256,256))[2:4]
                attrdict['size'] = rp.width, rp.height
        duration = MMAttrdefs.getattr(node, 'duration')
        if abs(float(duration - rp.duration)) / max(duration, rp.duration, 1) > 0.00001:
##         if duration != rp.duration:
            if rp is oldrp:
                rp.duration = duration
                changed = 1
            else:
                attrdict['duration'] = rp.duration
        aspect = MMAttrdefs.getattr(node, 'aspect')
        if (rp.aspect == 'true') != aspect:
            if rp is oldrp:
                rp.aspect = ['false','true'][aspect]
                changed = 1
            else:
                attrdict['aspect'] = rp.aspect == 'true'
        author = MMAttrdefs.getattr(node, 'author')
        if author != rp.author:
            if rp is oldrp:
                rp.author = author
                changed = 1
            elif rp.author:
                attrdict['author'] = rp.author
            elif attrdict.has_key('author'):
                del attrdict['author']
        copyright = MMAttrdefs.getattr(node, 'copyright')
        if copyright != rp.copyright:
            if rp is oldrp:
                rp.copyright = attrdict.get('copyright')
                changed = 1
            elif rp.copyright:
                attrdict['copyright'] = rp.copyright
            elif attrdict.has_key('copyright'):
                del attrdict['copyright']
        title = MMAttrdefs.getattr(node, 'title')
        if title != rp.title:
            if rp is oldrp:
                rp.title = attrdict.get('title')
                changed = 1
            elif rp.title:
                attrdict['title'] = rp.title
            elif attrdict.has_key('title'):
                del attrdict['title']
        href = MMAttrdefs.getattr(node, 'href')
        if href != rp.url:
            if rp is oldrp:
                rp.url = attrdict.get('href')
                changed = 1
            elif rp.url:
                attrdict['href'] = rp.url
            elif attrdict.has_key('href'):
                del attrdict['href']
        maxfps = MMAttrdefs.getattr(node, 'maxfps')
        if maxfps != rp.maxfps:
            if rp is oldrp:
                rp.maxfps = maxfps
                changed = 1
            elif rp.maxfps is not None:
                attrdict['maxfps'] = rp.maxfps
            elif attrdict.has_key('maxfps'):
                del attrdict['maxfps']
        preroll = MMAttrdefs.getattr(node, 'preroll')
        if preroll != rp.preroll:
            if rp is oldrp:
                rp.preroll = preroll
                changed = 1
            elif rp.preroll is not None:
                attrdict['preroll'] = rp.preroll
            elif attrdict.has_key('preroll'):
                del attrdict['preroll']
        if hasattr(node, 'expanded'):
            if oldrp is rp:
                i = 0
                children = node.children
                nchildren = len(children)
                taglist = rp.tags
                ntags = len(taglist)
                rp.tags = []
                nnodes = max(ntags, nchildren)
                while i < nnodes:
                    if i < nchildren:
                        childattrs = children[i].attrdict
                        rp.tags.append(childattrs.copy())
                    else:
                        changed = 1
                        childattrs = None
                    if i < ntags:
                        attrs = taglist[i]
                    else:
                        changed = 1
                        attrs = None
                    if childattrs != attrs:
                        changed = 1
                    i = i + 1
##             else:
##                 # re-create children
##                 import HierarchyView
##                 HierarchyView.collapsenode(node)
##                 HierarchyView.expandnode(node)
        if changed:
            if not hasattr(node, 'tmpfile'):
                url = MMAttrdefs.getattr(node, 'file')
                url = node.context.findurl(url)
##                 if not url:
##                     windowinterface.showmessage('specify a location for this node')
##                     return
                utype, host, path, params, query, fragment = urlparse.urlparse(url)
                if (utype and utype != 'file') or \
                   (host and host != 'localhost'):
                    windowinterface.showmessage('Cannot edit remote RealPix files.')
                    return
                import tempfile
                pre = tempfile.gettempprefix()
                dir = os.path.dirname(MMurl.url2pathname(path))
                while 1:
                    tempfile.counter = tempfile.counter + 1
                    file = os.path.join(dir, pre+`tempfile.counter`+'.rp')
                    if not os.path.exists(file):
                        break
                node.tmpfile = file
                if not SlideShow.__callback_added:
                    windowinterface.addclosecallback(
                            deltmpfiles, ())
                    SlideShow.__callback_added = 1
                SlideShow.tmpfiles.append(file)
##             import realsupport
##             realsupport.writeRP(node.tmpfile, rp, node)
            MMAttrdefs.flushcache(node)
#!/usr/bin/python
#
# Copyright (C) 2014
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

import tempfile

print("Current prefix for temp files: %s" % tempfile.gettempprefix())
Example #40
0
def non_zipfile_gar():
    path = os.path.join("/", tempfile.gettempprefix(), "test.txt")
    Path(path).touch()
    yield path
    os.remove(path)
Example #41
0
    def update(self, argv):
        """Subcommand for updating existing message catalogs from a template.

        :param argv: the command arguments
        :since: version 0.9
        """
        parser = OptionParser(usage=self.usage % ('update', ''),
                              description=self.commands['update'])
        parser.add_option('--domain', '-D', dest='domain',
                          help="domain of PO file (default '%default')")
        parser.add_option('--input-file', '-i', dest='input_file',
                          metavar='FILE', help='name of the input file')
        parser.add_option('--output-dir', '-d', dest='output_dir',
                          metavar='DIR', help='path to output directory')
        parser.add_option('--output-file', '-o', dest='output_file',
                          metavar='FILE',
                          help="name of the output file (default "
                               "'<output_dir>/<locale>/LC_MESSAGES/"
                               "<domain>.po')")
        parser.add_option('--locale', '-l', dest='locale', metavar='LOCALE',
                          help='locale of the translations catalog')
        parser.add_option('--ignore-obsolete', dest='ignore_obsolete',
                          action='store_true',
                          help='do not include obsolete messages in the output '
                               '(default %default)')
        parser.add_option('--no-fuzzy-matching', '-N', dest='no_fuzzy_matching',
                          action='store_true',
                          help='do not use fuzzy matching (default %default)')
        parser.add_option('--previous', dest='previous', action='store_true',
                          help='keep previous msgids of translated messages '
                               '(default %default)')

        parser.set_defaults(domain='messages', ignore_obsolete=False,
                            no_fuzzy_matching=False, previous=False)
        options, args = parser.parse_args(argv)

        if not options.input_file:
            parser.error('you must specify the input file')
        if not options.output_file and not options.output_dir:
            parser.error('you must specify the output file or directory')
        if options.output_file and not options.locale:
            parser.error('you must specify the locale')
        if options.no_fuzzy_matching and options.previous:
            options.previous = False

        po_files = []
        if not options.output_file:
            if options.locale:
                po_files.append((options.locale,
                                 os.path.join(options.output_dir,
                                              options.locale, 'LC_MESSAGES',
                                              options.domain + '.po')))
            else:
                for locale in os.listdir(options.output_dir):
                    po_file = os.path.join(options.output_dir, locale,
                                           'LC_MESSAGES',
                                           options.domain + '.po')
                    if os.path.exists(po_file):
                        po_files.append((locale, po_file))
        else:
            po_files.append((options.locale, options.output_file))

        domain = options.domain
        if not domain:
            domain = os.path.splitext(os.path.basename(options.input_file))[0]

        infile = open(options.input_file, 'U')
        try:
            template = read_po(infile)
        finally:
            infile.close()

        if not po_files:
            parser.error('no message catalogs found')

        for locale, filename in po_files:
            self.log.info('updating catalog %r based on %r', filename,
                          options.input_file)
            infile = open(filename, 'U')
            try:
                catalog = read_po(infile, locale=locale, domain=domain)
            finally:
                infile.close()

            catalog.update(template, options.no_fuzzy_matching)

            tmpname = os.path.join(os.path.dirname(filename),
                                   tempfile.gettempprefix() +
                                   os.path.basename(filename))
            tmpfile = open(tmpname, 'w')
            try:
                try:
                    write_po(tmpfile, catalog,
                             ignore_obsolete=options.ignore_obsolete,
                             include_previous=options.previous)
                finally:
                    tmpfile.close()
            except:
                os.remove(tmpname)
                raise

            try:
                os.rename(tmpname, filename)
            except OSError:
                # We're probably on Windows, which doesn't support atomic
                # renames, at least not through Python
                # If the error is in fact due to a permissions problem, that
                # same error is going to be raised from one of the following
                # operations
                os.remove(filename)
                shutil.copy(tmpname, filename)
                os.remove(tmpname)
Example #42
0
import bisect
import math
import os
import tempfile
import urllib.parse
from datetime import datetime

from pysaurus.core.components import AbsolutePath

EPOCH = datetime.utcfromtimestamp(0)
TEMP_DIR = tempfile.gettempdir()
TEMP_PREFIX = tempfile.gettempprefix() + "_pysaurus_"


def permute(values, initial_permutation=()):
    """Generate a sequence of permutations from given values list."""
    initial_permutation = list(initial_permutation)
    if not values:
        yield initial_permutation
        return
    for position in range(len(values)):
        extended_permutation = initial_permutation + [values[position]]
        remaining_values = values[:position] + values[(position + 1):]
        for permutation in permute(remaining_values, extended_permutation):
            yield permutation


def bool_type(mixed):
    """Convert a value to a boolean, with following rules (in that order):
    - "true" (case insensitive) => True
    - "false" (case insensitive) => False
 def make_temp(self):
     return tempfile._mkstemp_inner(tempfile.gettempdir(),
                                    tempfile.gettempprefix(), '',
                                    tempfile._bin_openflags, str)
Example #44
0
    def run(self):
        po_files = []
        if not self.output_file:
            if self.locale:
                po_files.append(
                    (self.locale,
                     os.path.join(self.output_dir, self.locale, 'LC_MESSAGES',
                                  self.domain + '.po')))
            else:
                for locale in os.listdir(self.output_dir):
                    po_file = os.path.join(self.output_dir, locale,
                                           'LC_MESSAGES', self.domain + '.po')
                    if os.path.exists(po_file):
                        po_files.append((locale, po_file))
        else:
            po_files.append((self.locale, self.output_file))

        if not po_files:
            raise DistutilsOptionError('no message catalogs found')

        domain = self.domain
        if not domain:
            domain = os.path.splitext(os.path.basename(self.input_file))[0]

        with open(self.input_file, 'rb') as infile:
            template = read_po(infile)

        for locale, filename in po_files:
            self.log.info('updating catalog %s based on %s', filename,
                          self.input_file)
            with open(filename, 'rb') as infile:
                catalog = read_po(infile, locale=locale, domain=domain)

            catalog.update(template,
                           self.no_fuzzy_matching,
                           update_header_comment=self.update_header_comment)

            tmpname = os.path.join(
                os.path.dirname(filename),
                tempfile.gettempprefix() + os.path.basename(filename))
            try:
                with open(tmpname, 'wb') as tmpfile:
                    write_po(tmpfile,
                             catalog,
                             ignore_obsolete=self.ignore_obsolete,
                             include_previous=self.previous,
                             width=self.width)
            except:
                os.remove(tmpname)
                raise

            try:
                os.rename(tmpname, filename)
            except OSError:
                # We're probably on Windows, which doesn't support atomic
                # renames, at least not through Python
                # If the error is in fact due to a permissions problem, that
                # same error is going to be raised from one of the following
                # operations
                os.remove(filename)
                shutil.copy(tmpname, filename)
                os.remove(tmpname)
def tempfile_settings():
    """临时文件默认设置: 包含临时文件的目录和创建的临时文件的前缀"""
    print("gettempdir(): ", tempfile.gettempdir())
    print("gettempprofix(): ", tempfile.gettempprefix())
    r"""
Example #46
0
    def onInit(self, showSplash=True, testMode=False):
        """This is launched immediately *after* the app initialises with wx
        :Parameters:

          testMode: bool
        """
        self.SetAppName('PsychoPy3')

        # Single instance check is done here prior to loading any GUI stuff.
        # This permits one instance of PsychoPy from running at any time.
        # Clicking on files will open them in the extant instance rather than
        # loading up a new one.
        #
        # Inter-process messaging is done via a memory-mapped file created by
        # the first instance. Successive instances will write their args to
        # this file and promptly close. The main instance will read this file
        # periodically for data and open and file names stored to this buffer.
        #
        # This uses similar logic to this example:
        # https://github.com/wxWidgets/wxPython-Classic/blob/master/wx/lib/pydocview.py

        # Create the memory-mapped file if not present, this is handled
        # differently between Windows and UNIX-likes.
        if wx.Platform == '__WXMSW__':
            tfile = tempfile.TemporaryFile(prefix="ag", suffix="tmp")
            fno = tfile.fileno()
            self._sharedMemory = mmap.mmap(fno, self.mmap_sz, "shared_memory")
        else:
            tfile = open(
                os.path.join(
                    tempfile.gettempdir(),
                    tempfile.gettempprefix() + self.GetAppName() + '-' +
                    wx.GetUserId() + "AGSharedMemory"), 'w+b')

            # insert markers into the buffer
            tfile.write(b"*")
            tfile.seek(self.mmap_sz)
            tfile.write(b" ")
            tfile.flush()
            fno = tfile.fileno()
            self._sharedMemory = mmap.mmap(fno, self.mmap_sz)

        # use wx to determine if another instance is running
        self._singleInstanceChecker = wx.SingleInstanceChecker(
            self.GetAppName() + '-' + wx.GetUserId(), tempfile.gettempdir())

        # If another instance is running, message our args to it by writing the
        # path the the buffer.
        if self._singleInstanceChecker.IsAnotherRunning():
            # Message the extant running instance the arguments we want to
            # process.
            args = sys.argv[1:]

            # if there are no args, tell the user another instance is running
            if not args:
                errMsg = "Another instance of PsychoPy is already running."
                errDlg = wx.MessageDialog(None,
                                          errMsg,
                                          caption="PsychoPy Error",
                                          style=wx.OK | wx.ICON_ERROR,
                                          pos=wx.DefaultPosition)
                errDlg.ShowModal()
                errDlg.Destroy()

                self.quit(None)

            # serialize the data
            data = pickle.dumps(args)

            # Keep alive until the buffer is free for writing, this allows
            # multiple files to be opened in succession. Times out after 5
            # seconds.
            attempts = 0
            while attempts < 5:
                # try to write to the buffer
                self._sharedMemory.seek(0)
                marker = self._sharedMemory.read(1)
                if marker == b'\0' or marker == b'*':
                    self._sharedMemory.seek(0)
                    self._sharedMemory.write(b'-')
                    self._sharedMemory.write(data)
                    self._sharedMemory.seek(0)
                    self._sharedMemory.write(b'+')
                    self._sharedMemory.flush()
                    break
                else:
                    # wait a bit for the buffer to become free
                    time.sleep(1)
                    attempts += 1
            else:
                if not self.testMode:
                    # error that we could not access the memory-mapped file
                    errMsg = \
                        "Cannot communicate with running PsychoPy instance!"
                    errDlg = wx.MessageDialog(None,
                                              errMsg,
                                              caption="PsychoPy Error",
                                              style=wx.OK | wx.ICON_ERROR,
                                              pos=wx.DefaultPosition)
                    errDlg.ShowModal()
                    errDlg.Destroy()

            # since were not the main instance, exit ...
            self.quit(None)

        # ----

        if showSplash:
            # show splash screen
            splashFile = os.path.join(self.prefs.paths['resources'],
                                      'psychopySplash.png')
            splashImage = wx.Image(name=splashFile)
            splashImage.ConvertAlphaToMask()
            splash = AS.AdvancedSplash(None,
                                       bitmap=splashImage.ConvertToBitmap(),
                                       timeout=3000,
                                       agwStyle=AS.AS_TIMEOUT
                                       | AS.AS_CENTER_ON_SCREEN)
            w, h = splashImage.GetSize()
            splash.SetTextPosition((340, h - 30))
            splash.SetText(
                _translate("Copyright (C) 2022 OpenScienceTools.org"))
        else:
            splash = None

        # SLOW IMPORTS - these need to be imported after splash screen starts
        # but then that they end up being local so keep track in self

        from psychopy.compatibility import checkCompatibility
        # import coder and builder here but only use them later
        from psychopy.app import coder, builder, runner, dialogs

        if '--firstrun' in sys.argv:
            del sys.argv[sys.argv.index('--firstrun')]
            self.firstRun = True
        if 'lastVersion' not in self.prefs.appData:
            # must be before 1.74.00
            last = self.prefs.appData['lastVersion'] = '1.73.04'
            self.firstRun = True
        else:
            last = self.prefs.appData['lastVersion']

        if self.firstRun and not self.testMode:
            pass

        # setup links for URLs
        # on a mac, don't exit when the last frame is deleted, just show menu
        if sys.platform == 'darwin':
            self.menuFrame = MenuFrame(parent=None, app=self)
        # fetch prev files if that's the preference
        if self.prefs.coder['reloadPrevFiles']:
            scripts = self.prefs.appData['coder']['prevFiles']
        else:
            scripts = []
        appKeys = list(self.prefs.appData['builder'].keys())
        if self.prefs.builder['reloadPrevExp'] and ('prevFiles' in appKeys):
            exps = self.prefs.appData['builder']['prevFiles']
        else:
            exps = []
        runlist = []

        self.dpi = int(wx.GetDisplaySize()[0] /
                       float(wx.GetDisplaySizeMM()[0]) * 25.4)
        # detect retina displays
        self.isRetina = self.dpi > 80 and wx.Platform == '__WXMAC__'
        if self.isRetina:
            fontScale = 1.2  # fonts are looking tiny on macos (only retina?) right now
            # mark icons as being retina
            icons.retStr = "@2x"
        else:
            fontScale = 1
        # adjust dpi to something reasonable
        if not (50 < self.dpi < 120):
            self.dpi = 80  # dpi was unreasonable, make one up

        # Manage fonts
        if sys.platform == 'win32':
            # wx.SYS_DEFAULT_GUI_FONT is default GUI font in Win32
            self._mainFont = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
        else:
            self._mainFont = wx.SystemSettings.GetFont(wx.SYS_ANSI_FIXED_FONT)
            # rescale for tiny retina fonts

        if hasattr(wx.Font, "AddPrivateFont") and sys.platform != "darwin":
            # Load packaged fonts if possible
            for fontFile in (Path(__file__).parent / "Resources" /
                             "fonts").glob("*"):
                if fontFile.suffix in ['.ttf', '.truetype']:
                    wx.Font.AddPrivateFont(str(fontFile))
            # Set fonts as those loaded
            self._codeFont = wx.Font(
                wx.FontInfo(
                    self._mainFont.GetPointSize()).FaceName("JetBrains Mono"))
        else:
            # Get system defaults if can't load fonts
            try:
                self._codeFont = wx.SystemSettings.GetFont(
                    wx.SYS_ANSI_FIXED_FONT)
            except wx._core.wxAssertionError:
                # if no SYS_ANSI_FIXED_FONT then try generic FONTFAMILY_MODERN
                self._codeFont = wx.Font(self._mainFont.GetPointSize(),
                                         wx.FONTFAMILY_TELETYPE,
                                         wx.FONTSTYLE_NORMAL,
                                         wx.FONTWEIGHT_NORMAL)

        if self.isRetina:
            self._codeFont.SetPointSize(
                int(self._codeFont.GetPointSize() * fontScale))
            self._mainFont.SetPointSize(
                int(self._mainFont.GetPointSize() * fontScale))

        # that gets most of the properties of _codeFont but the FaceName
        # FaceName is set in the setting of the theme:
        self.theme = prefs.app['theme']

        # removed Aug 2017: on newer versions of wx (at least on mac)
        # this looks too big
        # if hasattr(self._mainFont, 'Larger'):
        #     # Font.Larger is available since wyPython version 2.9.1
        #     # PsychoPy still supports 2.8 (see ensureMinimal above)
        #     self._mainFont = self._mainFont.Larger()
        #     self._codeFont.SetPointSize(
        #         self._mainFont.GetPointSize())  # unify font size

        # create both frame for coder/builder as necess
        if splash:
            splash.SetText(_translate("  Creating frames..."))

        # Parse incoming call
        parser = argparse.ArgumentParser(prog=self)
        parser.add_argument('--builder', dest='builder', action="store_true")
        parser.add_argument('-b', dest='builder', action="store_true")
        parser.add_argument('--coder', dest='coder', action="store_true")
        parser.add_argument('-c', dest='coder', action="store_true")
        parser.add_argument('--runner', dest='runner', action="store_true")
        parser.add_argument('-r', dest='runner', action="store_true")
        parser.add_argument('-x', dest='direct', action='store_true')
        view, args = parser.parse_known_args(sys.argv)
        # Check from filetype if any windows need to be open
        if any(arg.endswith('.psyexp') for arg in args):
            view.builder = True
            exps = [file for file in args if file.endswith('.psyexp')]
        if any(arg.endswith('.psyrun') for arg in args):
            view.runner = True
            runlist = [file for file in args if file.endswith('.psyrun')]
        # If still no window specified, use default from prefs
        if not any(
                getattr(view, key) for key in ['builder', 'coder', 'runner']):
            if self.prefs.app['defaultView'] in view:
                setattr(view, self.prefs.app['defaultView'], True)
            elif self.prefs.app['defaultView'] == 'all':
                view.builder = True
                view.coder = True
                view.runner = True

        # set the dispatcher for standard output
        # self.stdStreamDispatcher = console.StdStreamDispatcher(self)
        # self.stdStreamDispatcher.redirect()

        # Create windows
        if view.runner:
            self.showRunner(fileList=runlist)
        if view.coder:
            self.showCoder(fileList=scripts)
        if view.builder:
            self.showBuilder(fileList=exps)
        if view.direct:
            self.showRunner()
            for exp in [
                    file for file in args
                    if file.endswith('.psyexp') or file.endswith('.py')
            ]:
                self.runner.panel.runFile(exp)

        # send anonymous info to www.psychopy.org/usage.php
        # please don't disable this, it's important for PsychoPy's development
        self._latestAvailableVersion = None
        self.updater = None
        self.news = None
        self.tasks = None

        prefsConn = self.prefs.connections

        ok, msg = checkCompatibility(last, self.version, self.prefs, fix=True)
        # tell the user what has changed
        if not ok and not self.firstRun and not self.testMode:
            title = _translate("Compatibility information")
            dlg = dialogs.MessageDialog(parent=None,
                                        message=msg,
                                        type='Info',
                                        title=title)
            dlg.ShowModal()

        if self.prefs.app['showStartupTips'] and not self.testMode:
            tipFile = os.path.join(self.prefs.paths['resources'],
                                   _translate("tips.txt"))
            tipIndex = self.prefs.appData['tipIndex']
            if parse_version(wx.__version__) >= parse_version('4.0.0a1'):
                tp = wx.adv.CreateFileTipProvider(tipFile, tipIndex)
                showTip = wx.adv.ShowTip(None, tp)
            else:
                tp = wx.CreateFileTipProvider(tipFile, tipIndex)
                showTip = wx.ShowTip(None, tp)

            self.prefs.appData['tipIndex'] = tp.GetCurrentTip()
            self.prefs.saveAppData()
            self.prefs.app['showStartupTips'] = showTip
            self.prefs.saveUserPrefs()

        self.Bind(wx.EVT_IDLE, self.onIdle)

        # doing this once subsequently enables the app to open & switch among
        # wx-windows on some platforms (Mac 10.9.4) with wx-3.0:
        v = parse_version
        if sys.platform == 'darwin':
            if v('3.0') <= v(wx.version()) < v('4.0'):
                _Showgui_Hack()  # returns ~immediately, no display
                # focus stays in never-land, so bring back to the app:
                if prefs.app['defaultView'] in [
                        'all', 'builder', 'coder', 'runner'
                ]:
                    self.showBuilder()
                else:
                    self.showCoder()
        # after all windows are created (so errors flushed) create output
        self._appLoaded = True
        if self.coder:
            self.coder.setOutputWindow()  # takes control of sys.stdout

        # flush any errors to the last run log file
        logging.flush()
        sys.stdout.flush()
        # we wanted debug mode while loading but safe to go back to info mode
        if not self.prefs.app['debugMode']:
            logging.console.setLevel(logging.INFO)

        # if the program gets here, there are no other instances running
        self._timer = wx.PyTimer(self._bgCheckAndLoad)
        self._timer.Start(250)

        return True
Example #47
0
 def __init__(self, suffix='', prefix=tempfile.gettempprefix(), dir=None):
     self.pathname = tempfile.mkdtemp(suffix, prefix, dir)
Example #48
0
# Write to a temporary file
tempFile.write(b"Save this special number for me: 5678309")  # binary
tempFile.seek(0)

# Read the temporary file
print(tempFile.read())

# Close the temporary file
tempFile.close()

print()

# get information about the temp data environment
print('gettempdir():', tempfile.gettempdir())
print('gettempprefix():', tempfile.gettempprefix())

print()

# create a temporary file using mkstemp()
(tempfh, tempfp) = tempfile.mkstemp(".tmp", "testTemp", None, True)
f = os.fdopen(tempfh, "w+t")
f.write('This is some text data')
f.seek(0)
print(f.read())
f.close()
os.remove(tempfp)

print()

# create a temp file using the TemporaryFile class
Example #49
0
# 判斷文件類型和時間,大小
os.path.isdir('d')
os.path.isfile('a.txt')
os.path.getatime('a.txt')
os.path.getsize('a.txt')

# 使用臨時文件
# 好處是不用命名,關閉後就會被刪除
from tempfile import TemporaryFile, NamedTemporaryFile

tf = TemporaryFile()
tf.write(b'*' * 1024 * 1024)
tf.seek(0)
tf.read(512)
tf.close()
# TemporaryFile背後就是調用系統的open然後指定爲臨時文件
# NamedTemporaryFile則是python庫封裝的
# 背後是會在硬盤上開一個臨時文件,關閉時再刪除它
ntf = NamedTemporaryFile()
# 獲取路徑
ntf.name

# 獲取默認設定的帶名字的臨時文件目錄和前綴
import tempfile

tempfile.gettempdir()
tempfile.gettempprefix()

# 設置delete爲False則在關閉時不會刪除
ntf = NamedTemporaryFile(delete=False)
Example #50
0
def run_lombscargle(time, flux, ferr, pmin=0.1, pmax=15, subsample=0.001, npeaks=3, extras=''):
    with tempfile.NamedTemporaryFile() as lcf:
        np.savetxt(lcf, np.vstack((time, flux, ferr)).T)

        cmdline = 'vartools -i %s -ascii -LS %f %f %f %d 1 /%s/ %s' % (lcf.name, pmin, pmax, subsample, npeaks, tempfile.gettempprefix(), extras)
        ls = subprocess.run(cmdline.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        lsres = ls.stdout.split()

        freq, ls, logfap = np.loadtxt('%s.ls' % lcf.name, unpack=True)
        os.unlink('%s.ls' % lcf.name)
        
        rv = {}
        rv['freq'] = freq
        rv['ls'] = ls
        rv['logfap'] = logfap
        rv['periods'] = [float(lsres[1+4*i+0]) for i in range(npeaks)]
        rv['logfaps'] = [float(lsres[1+4*i+1]) for i in range(npeaks)]
        rv['persnrs'] = [float(lsres[1+4*i+2]) for i in range(npeaks)]
        rv['lsstats'] = [float(lsres[1+4*i+3]) for i in range(npeaks)]

        return rv
Example #51
0
def _get_extra_data():
    # Copy
    #   1) /opt/graphscope
    #   2) headers of arrow/glog/gflags/google/openmpi/vineyard
    #   3) openmpi
    #   4) zetcd
    #   5) /tmp/gs/builtin
    # into site-packages/graphscope.runtime
    #
    #  For shrink the package size less than "100M", we split graphscope into
    #   1) graphscope: libs include *.so and *.jar
    #   2) gs-coordinator: include python releated code of gscoordinator
    #   3) gs-include: header files and full-openmpi
    #   4) gs-engine: other runtime info such as 'bin', 'conf'
    #   5) gs-apps: precompiled builtin applications

    def __get_openmpi_prefix():
        openmpi_prefix = ""
        if platform.system() == "Linux":
            # install "/opt/openmpi" in gsruntime image
            openmpi_prefix = "/opt/openmpi"
        elif platform.system() == "Darwin":
            openmpi_prefix = (
                subprocess.check_output([shutil.which("brew"), "--prefix", "openmpi"])
                .decode("utf-8")
                .strip("\n")
            )
        else:
            raise RuntimeError(
                "Get openmpi prefix failed on {0}".format(platform.system())
            )
        return openmpi_prefix

    name = os.environ.get("package_name", "gs-coordinator")
    RUNTIME_ROOT = "graphscope.runtime"

    data = {}

    # data format:
    #   {"source_dir": "package_dir"} or
    #   {"source_dir": (package_dir, [exclude_list])}
    if name == "graphscope":
        # lib
        data = {
            "/opt/graphscope/lib/": os.path.join(RUNTIME_ROOT, "lib"),
            "/usr/local/lib/libvineyard_internal_registry.{0}".format(
                get_lib_suffix()
            ): os.path.join(RUNTIME_ROOT, "lib"),
        }

    elif name == "gs-engine":
        data = {
            "/opt/graphscope/bin/": os.path.join(RUNTIME_ROOT, "bin"),
            "/opt/graphscope/conf/": os.path.join(RUNTIME_ROOT, "conf"),
            "/opt/graphscope/lib64/": os.path.join(RUNTIME_ROOT, "lib64"),
            "/opt/graphscope/*.jar": os.path.join(RUNTIME_ROOT),
            "/usr/local/bin/zetcd": os.path.join(RUNTIME_ROOT, "bin"),
        }
    elif name == "gs-include":
        data = {
            "/opt/graphscope/include/": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/grape": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/string_view": os.path.join(RUNTIME_ROOT, "include"),
            "/opt/vineyard/include/": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/arrow": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/boost": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/glog": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/gflags": os.path.join(RUNTIME_ROOT, "include"),
            "/usr/local/include/google": os.path.join(RUNTIME_ROOT, "include"),
        }
        if platform.system() == "Linux":
            data["/usr/include/rapidjson"] = os.path.join(RUNTIME_ROOT, "include")
            data["/usr/include/msgpack"] = os.path.join(RUNTIME_ROOT, "include")
            data["/usr/include/msgpack.hpp"] = os.path.join(RUNTIME_ROOT, "include")
        elif platform.system() == "Darwin":
            data["/usr/local/include/rapidjson"] = os.path.join(RUNTIME_ROOT, "include")
            data["/usr/local/include/msgpack"] = os.path.join(RUNTIME_ROOT, "include")
            data["/usr/local/include/msgpack.hpp"] = os.path.join(
                RUNTIME_ROOT, "include"
            )
        # openmpi
        data.update(
            {
                __get_openmpi_prefix(): os.path.join(RUNTIME_ROOT),
            }
        )
    elif name == "gs-apps":
        # precompiled applications
        data = {
            os.path.join("/", tempfile.gettempprefix(), "gs", "builtin"): os.path.join(
                RUNTIME_ROOT, "precompiled"
            ),
        }
    return data
Example #52
0
    def get_logfile(self, quick=False, subject_nr=0):
        """
		Gets the logfile for the current session, either by falling back to a
		default value ('quickrun.csv') or through a pop-up dialogue.

		Keyword arguments:
		quick		--	Indicates whether we are quickrunning the experiment.
						(default=False)
		subject_nr	--	Indicates the subject number, which is used to
						suggest a logfile. (default=0)

		Returns:
		A pathname for the logfile or None if no logfile was chosen (i.e. the
		dialogue was cancelled).
		"""

        remember_logfile = True
        if quick:
            logfile = os.path.join(config.get_config( \
             u'default_logfile_folder'), config.get_config( \
             u'quick_run_logfile'))
            try:
                open(logfile, u'w').close()
            except:
                import tempfile
                from libopensesame import misc
                debug.msg(u'Failed to open %s' % logfile)
                logfile = os.path.join(tempfile.gettempdir().decode(
                 misc.filesystem_encoding()), tempfile.gettempprefix() \
                 .decode(misc.filesystem_encoding())+u'quickrun.csv')
                debug.msg(u'Using temporary file %s' % logfile)
                remember_logfile = False
        else:
            # Suggested filename
            suggested_path = os.path.join(config.get_config( \
             u'default_logfile_folder'), u'subject-%d.csv' % subject_nr)
            # Get the data file
            csv_filter = u'Comma-separated values (*.csv)'
            logfile = unicode(QtGui.QFileDialog.getSaveFileName( \
             self.main_window.ui.centralwidget, \
             _(u"Choose location for logfile (press 'escape' for default location)"), \
             suggested_path, filter=csv_filter))
            # An empty string indicates that the dialogue was cancelled, in
            # which case we fall back to a default location.
            if logfile == u'':
                logfile = os.path.join(config.get_config( \
                  'default_logfile_folder'), u'defaultlog.csv')
            # If a logfile was provided, but it did not have a proper extension,
            # we add a `.csv` extension.
            else:
                if os.path.splitext(logfile)[1].lower() not in \
                 self.valid_logfile_extensions:
                    logfile += self.default_logfile_extension
        # If the logfile is not writable, inform the user and cancel.
        try:
            open(logfile, u'w').close()
        except:
            self.main_window.experiment.notify( \
             _(u"The logfile '%s' is not writable. Please choose another location for the logfile.") \
             % logfile)
            return None
        if remember_logfile:
            # Remember the logfile folder for the next run
            config.set_config('default_logfile_folder',
                              os.path.dirname(logfile))
        return logfile
Example #53
0
def download_file(  # noqa: C901
    fname,
    origin,
    file_hash=None,
    hash_algorithm="auto",
    extract=False,
    archive_format="auto",
    cache_dir=None,
    cache_subdir="datasets",
):
    """Downloads a file from a URL if it not already in the cache.

    By default the file at the url `origin` is downloaded to the cache_dir
    `~/.graphscope, placed in the cache_subdir `datasets`, and given the
    filename `fname`. The final location of a file `example.txt` would
    therefore be `~/.graphscope/datsets/example.txt`

    File in tar, tar.gz, tar.bz, and zip formats can also be extracted.
    Passing a hash will verify the file after download. The command line
    programs `shasum` and `sha256sum` can compute the hash.

    Args:
        fname: `PathLike` object of the file. If an absolute path `/path/to/file`
            is specified the file will be saved at that location.
        origin (str): Original URL of the file.
        file_hash (str): The excepted hash string of the file after download.
            The sha256 and md5 hash algorithms are both supported.
        hash_algorithm (str): Select the hash algorithm to verify the file.
            Options are `"md5"`, `"sha256"`, and `"auto"`
            The default "auto" detects the hash algorithm in use.
        extract (bool): True tries extracting the file as an Archive, like zip.
        archive_format (str): Archive format to try for extracting the file.
            Options are `"auto"` `"tar"` `"zip"` and `None`.
            `"tar"` includes "tar", "tar.gz", and "tar.bz" files.
            The default `"auto"` corresponds to `["tar", "zip"]`.
            None or an empty list will return no matches found.
        cache_dir: Location of `PathLike` object to store cached files, when None,
            it defaults to the default directory `~/.graphscope`
        cache_subdir: Subdirectory under the cache dir where the file is saved.

    Returns:
        Path to the download file.
    """
    if cache_dir is None:
        cache_dir = os.path.join(os.path.expanduser("~"), ".graphscope")
    cache_dir = os.path.expanduser(cache_dir)
    if os.path.exists(cache_dir) and not os.access(cache_dir, os.W_OK):
        cache_dir = os.path.join("/", tempfile.gettempprefix(), ".graphscope")
    datadir = os.path.join(cache_dir, cache_subdir)
    os.makedirs(datadir, exist_ok=True)

    fname = _path_to_string(fname)
    fpath = os.path.join(datadir, fname)

    download = False
    if os.path.exists(fpath):
        # file found, verify if a hash was provided
        if file_hash is not None:
            if not validate_file(fpath, file_hash, algorithm=hash_algorithm):
                logger.warning(
                    "A local file was found, but it seems to be incomplete "
                    "or outdated because the %s file hash does not match the "
                    "original value of %s, so we will re-download the data.",
                    hash_algorithm,
                    file_hash,
                )
                download = True
    else:
        download = True

    if download:
        logger.info("Downloading data from %s", origin)

        class ProgressTracker(object):
            # Maintain progbar for the lifetime of download
            progbar = None
            record_downloaded = None

        def show_progress(block_num, block_size, total_size):
            if ProgressTracker.progbar is None:
                ProgressTracker.progbar = tqdm(total=total_size,
                                               unit="iB",
                                               unit_scale=True)
            downloaded = min(block_num * block_size, total_size)
            if ProgressTracker.record_downloaded is None:
                ProgressTracker.record_downloaded = downloaded
                update_downloaded = downloaded
            else:
                update_downloaded = downloaded - ProgressTracker.record_downloaded
                ProgressTracker.record_downloaded = downloaded
            ProgressTracker.progbar.update(update_downloaded)
            if downloaded >= total_size:
                ProgressTracker.progbar.close()
                ProgressTracker.progbar = None
                ProgressTracker.record_downloaded = None

        max_retries = gs_config.dataset_download_retries
        error_msg_tpl = "URL fetch failure on {}:{} -- {}"
        try:
            for retry in range(max_retries):
                backoff = max(2**retry, 1.0)
                try:
                    urlretrieve(origin, fpath, show_progress)
                except urllib.error.HTTPError as e:
                    error_msg = error_msg_tpl.format(origin, e.code, e.msg)
                    logger.warning("{0}, retry {1} times...".format(
                        error_msg, retry))
                    if retry >= max_retries - 1:
                        raise Exception(error_msg)
                    time.sleep(backoff)
                except urllib.error.URLError as e:
                    # `URLError` has been made a subclass of OSError since version 3.3
                    # https://docs.python.org/3/library/urllib.error.html
                    error_msg = error_msg.format(origin, e.errno, e.reason)
                    logger.warning("{0}, retry {1} times...".format(
                        error_msg, retry))
                    if retry >= max_retries - 1:
                        raise Exception(error_msg)
                    time.sleep(backoff)
                else:
                    break
        except (Exception, KeyboardInterrupt):
            if os.path.exists(fpath):
                os.remove(fpath)
            raise

    if extract:
        _extract_archive(fpath, datadir, archive_format)

    return fpath
Example #54
0
def get_tempdir():
    return os.path.join("/", tempfile.gettempprefix())
Example #55
0
#
# TSDuck sample Python application running a chain of plugins on the long
# run with CPU and memory monitoring.
#
#----------------------------------------------------------------------------

import ts
import os
import tempfile

# Create an asynchronous report to log multi-threaded messages.
rep = ts.AsyncReport(severity=ts.Report.VERBOSE)

# Build a temporary file name to download a real TS file.
url = "https://tsduck.io/streams/france-dttv/tnt-uhf30-546MHz-2019-01-22.ts"
tsfile = tempfile.gettempdir() + os.path.sep + tempfile.gettempprefix() + str(
    os.getpid()) + ".ts"

# First phase: Download the TS file:
print("Downloading %s to %s ..." % (url, tsfile))

tsp = ts.TSProcessor(rep)
tsp.input = ['http', url]
tsp.output = ['file', tsfile]
tsp.start()
tsp.waitForTermination()

# Second phase: Play the file at regulated speed a large number of times.
# Must use another instance of ts.TSProcessor.
print("Playing %s ..." % (tsfile))
Example #56
0
 def get_fname(self, idx=0):
     r"""Path to temporary file."""
     return os.path.join(
         tempfile.gettempdir(),
         '%s_%s_%d.yml' % (tempfile.gettempprefix(), self.uuid, idx))
Example #57
0
# 当内容超过max_size时,数据被写入磁盘保存,同时缓冲池被替换为TemporaryFile,也可显式调用rollover
with tempfile.SpooledTemporaryFile(max_size=1000, mode="w+t",
                                   encoding="utf-8") as temp:
    print(f"temp:  {temp!r}")
    for i in range(3):
        temp.write("this line repeat \n")
        print(temp._rolled, temp._file)
    temp.rollover()
    print(temp._rolled, temp._file)
print()

print("五、 临时目录")
with tempfile.TemporaryDirectory() as directory_name:
    the_dir = pathlib.Path(directory_name)
    print(the_dir)
    a_file = the_dir / "a.txt"
    a_file.write_text("some thing is deleted")
print("Dir exists after?", the_dir.exists())
print("Contents after?", list(the_dir.glob("*")))
print()

print("六、 设置临时文件的位置和名称")
with tempfile.NamedTemporaryFile(suffix="_suf", prefix="pre_",
                                 dir="/tmp") as temp:
    print(f"temp:  {temp!r}")
    print(f"temp.name:  {temp.name!r}")
print("gettempdir: ", tempfile.gettempdir())
print("gettempprefix: ", tempfile.gettempprefix())
tempfile.tempdir = "/Home"
print("gettempdir: ", tempfile.gettempdir())
    def test_sane_template(self):
        # gettempprefix returns a nonempty prefix string
        p = tempfile.gettempprefix()

        self.assertIsInstance(p, basestring)
        self.assertTrue(len(p) > 0)
Example #59
0
# -*- coding: utf-8 -*-
'''
Created on 2017年3月20日
@author: guanglin
'''
import tempfile, os, time, sys
print('getcwd==>', os.getcwd())
print('gettempdir()==>', tempfile.gettempdir())
print('gettempprefix()==>', tempfile.gettempprefix())
print('mktemp()==>', tempfile.mktemp('.abc', 'cn_', '.'))

tmpf = tempfile.mkstemp('.txt', 'CN_', '.')
print(tmpf)
tfile = os.open(tmpf[1], os.O_WRONLY)
os.write(tfile, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
os.write(tfile, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()))
# tfile=open(tmpf[1],'w')
# tfile.write(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())+'\n')
# tfile.write(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime()))

os.close(tfile)
os.close(tmpf[0])  ##需要再次关闭
# tfile.close()

tmpdir = tempfile.mkdtemp('_t', 'Tdir_', '.')
print(tmpdir)

#创建临时文件,close后自动删除
# tempfile.TemporaryFile(mode, bufsize, suffix, prefix, dir)
TmpFile = tempfile.TemporaryFile('w+b', -1, '.txt', 'Tmp_', '.')
for i in range(5):
Example #60
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Prestop hook for delete all resource created by coordinator in kubernetes
"""

import json
import os
import subprocess
import tempfile

DEFAULT_PATH = os.path.join("/", tempfile.gettempprefix(), "resource_object")


class KubernetesResources(object):
    def __init__(self):
        self._resources = {}

    def load_json_file(self, path):
        try:
            with open(path, "r") as f:
                self._resources = json.load(f)
        except FileNotFoundError:
            # expect, pass
            pass

    def cleanup(self):