Exemplo n.º 1
0
    def Run(self, tree):

        self.CreateTemporaryFiles()

        tempfile = open(self.mFilenameTempInput, "w")
        tempfile.write(
            to_string(input_tree,
                      branchlengths=self.mBranchLengths,
                      support=self.mSupport))
        tempfile.close()

        if self.mLogLevel >= 2:
            os.system("cat %s" % self.mFilenameTempInput)

        statement = string.join(
            (self.mExecutable, "-v", self.mFilenameTempInput), " ")

        s = subprocess.Popen(statement,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             cwd=self.mTempDirectory,
                             close_fds=True)

        (out, err) = s.communicate()

        if s.returncode != 0:
            raise TreeGraphError, "Error in calculating svg file\n%s" % err

        d = open(self.mFilenameTempOutput).readlines()

        self.DeleteTemporaryFiles()

        return "".join(d)
Exemplo n.º 2
0
    def _store_output(self):
        log.debug("Storing output ...")

        tempfile = open(self._tempfile, 'r')

        log.debug("Current position: " + str(self._current_position))

        tempfile.seek(self._current_position)
        finished = False

        started = False
        while self.silent and not finished:
            lines = tempfile.readlines()
            self._current_position = tempfile.tell()
            for line in lines:
                line = line.strip()
                log.debug("Started: %s | Line: %s" %(str(started), line))
                log.debug("Condition: " +str(line.strip().endswith(self.STARTED)))
                if not started:
                    started = line.endswith(self.STARTED)
                    continue

                if line.endswith(self.FINISHED):
                    finished = True
                    break
                line = self._prepare_output(line)
                if line:
                    self.output.append(line)

        tempfile.close()
Exemplo n.º 3
0
    def Run(self, tree):

        self.CreateTemporaryFiles()

        tempfile = open(self.mFilenameTempInput, "w")
        tempfile.write(to_string(input_tree, branchlengths=self.mBranchLengths, support=self.mSupport))
        tempfile.close()

        if self.mLogLevel >= 2:
            os.system("cat %s" % self.mFilenameTempInput)

        statement = string.join((self.mExecutable, "-v", self.mFilenameTempInput), " ")

        s = subprocess.Popen(
            statement,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=self.mTempDirectory,
            close_fds=True,
        )

        (out, err) = s.communicate()

        if s.returncode != 0:
            raise TreeGraphError, "Error in calculating svg file\n%s" % err

        d = open(self.mFilenameTempOutput).readlines()

        self.DeleteTemporaryFiles()

        return "".join(d)
Exemplo n.º 4
0
    def setUp(self):
        provision_device()

        self.client = Client()
        self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest()
        self.extension = file_formats.PDF
        self.filename = "{}.{}".format(self.hash, self.extension)
        self.title = "abc123!@#$%^&*();'[],./?><"
        self.contentnode = ContentNode(title=self.title)
        self.available = True
        self.preset = format_presets.DOCUMENT
        self.local_file = LocalFile(id=self.hash,
                                    extension=self.extension,
                                    available=self.available)
        self.file = File(local_file=self.local_file,
                         available=self.available,
                         contentnode=self.contentnode,
                         preset=self.preset)

        self.path = get_content_storage_file_path(self.filename)
        path_dir = os.path.dirname(self.path)
        if not os.path.exists(path_dir):
            os.makedirs(path_dir)
        tempfile = open(self.path, "w")
        tempfile.write("test")
        tempfile.close()
Exemplo n.º 5
0
 def tearDown(self):
     provider_manager.unregister('fakesub = fakesubprovider:FakeSubProvider')
     for patcher in self.patchers:
         patcher.stop()
     for tempfile in self.tempfiles:
         tempfile.close()
     rmtree(self.db_path)
Exemplo n.º 6
0
def kill_slaves(slave_kill_filename):
    """Kill all remote slaves which are stored in the given file.
    
    This functions is only meant for emergency situations, when something
    went wrong and the slaves have to be killed manually.
    """
    tempfile = open(slave_kill_filename)
    try:
        for line in tempfile:
            address, pid, ssh_pid = line.split(":")
            pid = int(pid)
            ssh_pid = int(ssh_pid)
            # open ssh connection to to kill remote slave
            proc = subprocess.Popen(["ssh","-T", address],
                                    stdin=subprocess.PIPE,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
            proc.stdin.write("kill %d\n" % pid)
            proc.stdin.flush()
            # kill old ssh connection
            try:
                os.kill(ssh_pid, signal.SIGKILL)
            except:
                pass
            # a kill might prevent the kill command transmission
            # os.kill(proc.pid, signal.SIGQUIT)
            print "killed slave " + address + " (pid %d)" % pid
        print "all slaves killed."
    finally:
        tempfile.close()
 def edit(D, text):
     L = '\r\n'
     K = 'utf-8-sig'
     A = text
     import tempfile as H
     A = A or ''
     E = type(A) in [bytes, bytearray]
     if not E and A and not A.endswith(_D): A += _D
     I, B = H.mkstemp(prefix='editor-', suffix=D.extension)
     try:
         if not E:
             if WIN:
                 F = K
                 A = A.replace(_D, L)
             else:
                 F = 'utf-8'
             A = A.encode(F)
         C = os.fdopen(I, 'wb')
         C.write(A)
         C.close()
         J = os.path.getmtime(B)
         D.edit_file(B)
         if D.require_save and os.path.getmtime(B) == J: return _A
         C = open(B, 'rb')
         try:
             G = C.read()
         finally:
             C.close()
         if E: return G
         else: return G.decode(K).replace(L, _D)
     finally:
         os.unlink(B)
Exemplo n.º 8
0
    def setUp(self):
        # create DeviceOwner to pass the setup_wizard middleware check
        DeviceOwner.objects.create(username='******', password=123)

        self.client = Client()
        self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest()
        self.extension = dict(file_formats.choices).get("pdf")
        self.filename = "{}.{}".format(self.hash, self.extension)
        self.title = "abc123!@#$%^&*();'[],./?><"
        self.contentnode = ContentNode(title=self.title)
        self.available = True
        self.preset = format_presets.DOCUMENT
        self.file = File(checksum=self.hash,
                         extension=self.extension,
                         available=self.available,
                         contentnode=self.contentnode,
                         preset=self.preset)

        self.path = get_content_storage_file_path(self.filename)
        path_dir = os.path.dirname(self.path)
        if not os.path.exists(path_dir):
            os.makedirs(path_dir)
        tempfile = open(self.path, "w")
        tempfile.write("test")
        tempfile.close()
Exemplo n.º 9
0
def _configure_logging():
    import os
    import logging

    log = logging.getLogger("pyoram")
    formatter = logging.Formatter(
        fmt=("[%(asctime)s.%(msecs)03d,"
             "%(name)s,%(levelname)s] %(threadName)s %(message)s"),
        datefmt="%Y-%m-%d %H:%M:%S")

    level = os.environ.get("PYORAM_LOGLEVEL", "WARNING")
    logfilename = os.environ.get("PYORAM_LOGFILE", None)
    if len(logging.root.handlers) == 0:
        # configure the logging with some sensible
        # defaults.
        try:
            import tempfile
            tempfile = tempfile.TemporaryFile(dir=".")
            tempfile.close()
        except OSError:
            # cannot write in current directory, use the
            # console logger
            handler = logging.StreamHandler()
        else:
            if logfilename is None:
                handler = logging.StreamHandler()
            else:
                # set up a basic logfile in current directory
                handler = logging.FileHandler(logfilename)
        handler.setFormatter(formatter)
        handler.setLevel(level)
        log.addHandler(handler)
        log.setLevel(level)
        log.info("PyORAM log configured using built-in "
                 "defaults, level=%s", level)
Exemplo n.º 10
0
 def _save_entries(self):
     """
     saves the file entries
     """
     tempname = NamedTemporaryFile().name
     # create a JSON dictionary
     store_dict = {}
     store_dict["max_id"] = self._max_id
     entry_list = []
     for entry in self._entries:
         entry_dict = {}
         entry_dict["filepath"] = entry.get_filepath()
         entry_dict["timestamp"] = entry.get_timestamp()
         entry_dict["state"] = entry.get_state()
         entry_dict["entry_id"] = entry.get_entry_id()
         entry_list.append(entry_dict)
     store_dict["entries"] = entry_list
     line = json.dumps(store_dict)
     # crite JSON to temporary file
     try:
         tempfile = open(tempname, "w")
         tempfile.write(line)
         tempfile.close()
     except IOError:
         show_error_message("Unable to create temporary file %s." % tempname, True)
     # copy encrypted temporary file to cryptstore
     key = self.get_key()
     fname = "cryptbox.00000001"
     destpath = os.path.join(self._rootpath, fname)
     encrypt_file(tempname, destpath, key)
     # delete temporary file
     try:
         os.remove(tempname)
     except OSError:
         show_error_message("Unable to remove temporary file %s." % tempname)
Exemplo n.º 11
0
def _configure_logging():
    import os
    import logging

    log = logging.getLogger("pyoram")
    formatter = logging.Formatter(
        fmt=("[%(asctime)s.%(msecs)03d,"
             "%(name)s,%(levelname)s] %(threadName)s %(message)s"),
        datefmt="%Y-%m-%d %H:%M:%S")

    level = os.environ.get("PYORAM_LOGLEVEL", "WARNING")
    logfilename = os.environ.get("PYORAM_LOGFILE", None)
    if len(logging.root.handlers) == 0:
        # configure the logging with some sensible
        # defaults.
        try:
            import tempfile
            tempfile = tempfile.TemporaryFile(dir=".")
            tempfile.close()
        except OSError:
            # cannot write in current directory, use the
            # console logger
            handler = logging.StreamHandler()
        else:
            if logfilename is None:
                handler = logging.StreamHandler()
            else:
                # set up a basic logfile in current directory
                handler = logging.FileHandler(logfilename)
        handler.setFormatter(formatter)
        handler.setLevel(level)
        log.addHandler(handler)
        log.setLevel(level)
        log.info("PyORAM log configured using built-in "
                 "defaults, level=%s", level)
Exemplo n.º 12
0
def bedAnnotateDownstream(bedFile, geneFile):
    """ annotate bed features with the gene downstream of it """

    tempfile = tempfile.NamedTemporaryFile()
    cmd = 'bedFindNeighbors %s %s --onlyDownstream > %s' % (bedFile, geneFile, tempfile.name)
    util.execCmdLine(cmd)
    beds =  parseBedFilename(tempfile.name)
    tempfile.close()
    return beds
Exemplo n.º 13
0
def bedAnnotateDownstream(bedFile, geneFile):
    """ annotate bed features with the gene downstream of it """

    tempfile = tempfile.NamedTemporaryFile()
    cmd = "bedFindNeighbors %s %s --onlyDownstream > %s" % (bedFile, geneFile, tempfile.name)
    util.execCmdLine(cmd)
    beds = parseBedFilename(tempfile.name)
    tempfile.close()
    return beds
Exemplo n.º 14
0
def test_named_tempfile1():
    name = None
    with named_tempfile() as tempfile:
        name = tempfile.name
        assert_true(os.path.isfile(name))
        tempfile.write('hello'.encode('utf8'))
        tempfile.close()
        assert_true(os.path.isfile(name))
    assert_false(os.path.isfile(name))
Exemplo n.º 15
0
    def setUpClass(cls):
        # Create a predictable 296.1 MB temporary file
        elements = [200, 50, 25] * 9999
        cls.temp_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', "%s.bin" % uuid.uuid4())
        tempfile = open(cls.temp_filename, 'wb')

        for i in xrange(0, 9872):
            tempfile.write(bytearray(elements))

        tempfile.close()
Exemplo n.º 16
0
def browse_disptrace(dt):
    import tempfile, webbrowser, urllib, os

    html = dt.render()
    tempfiledes, temppath = tempfile.mkstemp(suffix='.html')
    tempfile = os.fdopen(tempfiledes, "w")
    tempfile.write(html)
    tempfile.close()
    tempurl = "file://{}".format(urllib.pathname2url(temppath))
    webbrowser.get(None).open_new(tempurl)
Exemplo n.º 17
0
def test_named_tempfile2():
    name = None
    # The specification of delete=True should be ignored.
    with named_tempfile(delete=True) as tempfile:
        name = tempfile.name
        assert_true(os.path.isfile(name))
        tempfile.write('hello'.encode('utf8'))
        tempfile.close()
        assert_true(os.path.isfile(name))
    assert_false(os.path.isfile(name))
Exemplo n.º 18
0
def browse_disptrace(dt):
    import tempfile, webbrowser, urllib, os

    html = dt.render()
    tempfiledes, temppath = tempfile.mkstemp(suffix='.html')
    tempfile = os.fdopen(tempfiledes, "w")
    tempfile.write(html)
    tempfile.close()
    tempurl = "file://{}".format(urllib.pathname2url(temppath))
    webbrowser.get(None).open_new(tempurl)
def open_url(url, wait=_C, locate=_C):
    F = '"'
    D = locate
    C = wait
    A = url
    import subprocess as G

    def E(url):
        A = url
        import urllib as B
        if A.startswith('file://'): A = B.unquote(A[7:])
        return A

    if sys.platform == 'darwin':
        B = ['open']
        if C: B.append('-W')
        if D: B.append('-R')
        B.append(E(A))
        H = open('/dev/null', 'w')
        try:
            return G.Popen(B, stderr=H).wait()
        finally:
            H.close()
    elif WIN:
        if D:
            A = E(A.replace(F, ''))
            B = f'explorer /select,"{A}"'
        else:
            A = A.replace(F, '')
            C = '/WAIT' if C else ''
            B = f'start {C} "" "{A}"'
        return os.system(B)
    elif CYGWIN:
        if D:
            A = os.path.dirname(E(A).replace(F, ''))
            B = f'cygstart "{A}"'
        else:
            A = A.replace(F, '')
            C = '-w' if C else ''
            B = f'cygstart {C} "{A}"'
        return os.system(B)
    try:
        if D: A = os.path.dirname(E(A)) or '.'
        else: A = E(A)
        I = G.Popen(['xdg-open', A])
        if C: return I.wait()
        return 0
    except OSError:
        if A.startswith(('http://', 'https://')) and not D and not C:
            import webbrowser as J
            J.open(A)
            return 0
        return 1
Exemplo n.º 20
0
 def test_named_temporary_file(self):
     tempfile = self.client.tempfile.NamedTemporaryFile()
     self.assertTrue(os.path.isfile(tempfile.name))
     self.assertTrue(self.client.os.path.isfile(tempfile.name))
     tempfile.close()
     self.assertFalse(os.path.exists(tempfile.name))
     self.assertFalse(self.client.os.path.exists(tempfile.name))
     with self.client.tempfile.NamedTemporaryFile() as fh:
         fh.write("hi")
         fh.flush()
         with open(fh.name, "rb") as local_file:
             self.assertEqual(local_file.read(), "hi")
Exemplo n.º 21
0
    def test_namedtemporaryfile_closes(self):
        """
        The symbol django.core.files.NamedTemporaryFile is assigned as
        a different class on different operating systems. In
        any case, the result should minimally mock some of the API of
        tempfile.NamedTemporaryFile from the Python standard library.
        """
        tempfile = NamedTemporaryFile()
        self.assertTrue(hasattr(tempfile, "closed"))
        self.assertFalse(tempfile.closed)

        tempfile.close()
        self.assertTrue(tempfile.closed)
Exemplo n.º 22
0
    def test_namedtemporaryfile_closes(self):
        """
        The symbol django.core.files.NamedTemporaryFile is assigned as
        a different class on different operating systems. In
        any case, the result should minimally mock some of the API of
        tempfile.NamedTemporaryFile from the Python standard library.
        """
        tempfile = NamedTemporaryFile()
        self.assertTrue(hasattr(tempfile, "closed"))
        self.assertFalse(tempfile.closed)

        tempfile.close()
        self.assertTrue(tempfile.closed)
Exemplo n.º 23
0
 def _load_entries(self):
     """
     loads the file entries
     """
     # decrypt entries file to a temporary file
     key = self.get_key()
     fname = "cryptbox.00000001"
     srcpath = os.path.join(self._rootpath, fname)
     if not os.path.isfile(srcpath):
         # entry file does not exist
         return
     tempname = NamedTemporaryFile().name
     decrypt_file(srcpath, tempname, key)
     # read decrypted file
     line = None
     try:
         tempfile = open(tempname, "r")
         line = tempfile.readline()
         tempfile.close()
     except IOError:
         show_error_message("Unable to read temporary file %s." % tempname,
                            True)
     # parse JSON content
     try:
         store_dict = json.loads(line)
         if type(store_dict) == dict:
             self._max_id = store_dict["max_id"]
             entry_list = store_dict["entries"]
             self._entries = []
             self._entry_dict = {}
             for entry_dict in entry_list:
                 filepath = entry_dict["filepath"]
                 timestamp = entry_dict["timestamp"]
                 state = entry_dict["state"]
                 entry_id = entry_dict["entry_id"]
                 entry = CryptStoreEntry(filepath, timestamp, state,
                                         entry_id)
                 self._entries.append(entry)
                 self._entry_dict[unicode(filepath)] = entry
     except ValueError:
         show_error_message("Unable to parse entry file.", False)
     # delete temporary file
     try:
         os.remove(tempname)
     except OSError:
         show_error_message("Unable to remove temporary file %s." %
                            tempname)
Exemplo n.º 24
0
 def write_config(self, config,
                  unlink_function=os.unlink,
                  temp_function=tempfile.NamedTemporaryFile):
     """Write the configuration contents to vertica.cnf file."""
     LOG.debug('Defining config holder at %s.' % system.VERTICA_CONF)
     tempfile = temp_function(delete=False)
     try:
         config.write(tempfile)
         tempfile.close()
         command = (("install -o root -g root -m 644 %(source)s %(target)s"
                     ) % {'source': tempfile.name,
                          'target': system.VERTICA_CONF})
         system.shell_execute(command)
         unlink_function(tempfile.name)
     except Exception:
         unlink_function(tempfile.name)
         raise
Exemplo n.º 25
0
 def _load_entries(self):
     """
     loads the file entries
     """
     # decrypt entries file to a temporary file
     key = self.get_key()
     fname = "cryptbox.00000001"
     srcpath = os.path.join(self._rootpath, fname)
     if not os.path.isfile(srcpath):
         # entry file does not exist
         return
     tempname = NamedTemporaryFile().name
     decrypt_file(srcpath, tempname, key)
     # read decrypted file
     line = None
     try:
         tempfile = open(tempname, "r")
         line = tempfile.readline()
         tempfile.close()
     except IOError:
         show_error_message("Unable to read temporary file %s." % tempname, True)
     # parse JSON content
     try:
         store_dict = json.loads(line)
         if type(store_dict) == dict:
             self._max_id = store_dict["max_id"]
             entry_list = store_dict["entries"]
             self._entries = []
             self._entry_dict = {}
             for entry_dict in entry_list:
                 filepath = entry_dict["filepath"]
                 timestamp = entry_dict["timestamp"]
                 state = entry_dict["state"]
                 entry_id = entry_dict["entry_id"]
                 entry = CryptStoreEntry(filepath, timestamp, state, entry_id)
                 self._entries.append(entry)
                 self._entry_dict[unicode(filepath)] = entry
     except ValueError:
         show_error_message("Unable to parse entry file.", False)
     # delete temporary file
     try:
         os.remove(tempname)
     except OSError:
         show_error_message("Unable to remove temporary file %s." % tempname)
Exemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser(description='Amazon S3 Backup.')
    parser.add_argument('config_file', help='JSON configuration file')
    args = parser.parse_args()

    try:
        config = read_config(args.config_file)
    except ConfigError as e:
        logger.exception("Error reading configuration file")
        sys.exit(1)

    logger.debug(config)

    logger.debug("Creating temporary tar file")
    tempfile = create_tempfile()

    try:
        create_tarfile(tempfile, config.get("sources"),
                       config.get("exclusions"))
    except TarFileCreationError as e:
        logger.error(f"Error creating tar file: {e}")
        sys.exit(1)

    tempfile.seek(0)  # Needed for S3 upload to work

    logger.debug("Creating S3 bucket object")
    s3_bucket = S3Bucket(
        config.get("s3").get("bucket_name"),
        config.get("s3").get("bucket_region"),
        config.get("s3").get("storage_class"))

    s3_metadata = {
        "sources": ",".join(config.get("sources")),
        "exclusions": ",".join(config.get("exclusions"))
    }
    s3_key = generate_s3_key(config.get("tarfile_name_prefix"))

    logger.info("Uploading file to S3")
    s3_bucket.upload_fileobj(tempfile, s3_key, s3_metadata)

    logger.debug("Closing temporary tar file")
    tempfile.close()

    logger.info("Done!")
Exemplo n.º 27
0
    def __from_wave__(cls, filename, wave_filename, compression=None):
        if (str(compression) not in cls.COMPRESSION_MODES):
            compression = cls.DEFAULT_COMPRESSION

        #mppenc requires files to end with .mpc for some reason
        if (not filename.endswith(".mpc")):
            import tempfile
            actual_filename = filename
            tempfile = tempfile.NamedTemporaryFile(suffix=".mpc")
            filename = tempfile.name
        else:
            actual_filename = tempfile = None

        ###Musepack SV7###
        #sub = subprocess.Popen([BIN['mppenc'],
        #                        "--silent",
        #                        "--overwrite",
        #                        "--%s" % (compression),
        #                        wave_filename,
        #                        filename],
        #                       preexec_fn=ignore_sigint)

        ###Musepack SV8###
        sub = subprocess.Popen([BIN['mpcenc'],
                                "--silent",
                                "--overwrite",
                                "--%s" % (compression),
                                wave_filename,
                                filename])

        if (sub.wait() == 0):
            if (tempfile is not None):
                filename = actual_filename
                f = file(filename, 'wb')
                tempfile.seek(0, 0)
                transfer_data(tempfile.read, f.write)
                f.close()
                tempfile.close()

            return MusepackAudio(filename)
        else:
            if (tempfile is not None):
                tempfile.close()
            raise EncodingError(u"error encoding file with mpcenc")
Exemplo n.º 28
0
    def __from_wave__(cls, filename, wave_filename, compression=None):
        if (str(compression) not in cls.COMPRESSION_MODES):
            compression = cls.DEFAULT_COMPRESSION

        #mppenc requires files to end with .mpc for some reason
        if (not filename.endswith(".mpc")):
            import tempfile
            actual_filename = filename
            tempfile = tempfile.NamedTemporaryFile(suffix=".mpc")
            filename = tempfile.name
        else:
            actual_filename = tempfile = None

        ###Musepack SV7###
        #sub = subprocess.Popen([BIN['mppenc'],
        #                        "--silent",
        #                        "--overwrite",
        #                        "--%s" % (compression),
        #                        wave_filename,
        #                        filename],
        #                       preexec_fn=ignore_sigint)

        ###Musepack SV8###
        sub = subprocess.Popen([BIN['mpcenc'],
                                "--silent",
                                "--overwrite",
                                "--%s" % (compression),
                                wave_filename,
                                filename])

        if (sub.wait() == 0):
            if (tempfile is not None):
                filename = actual_filename
                f = file(filename, 'wb')
                tempfile.seek(0, 0)
                transfer_data(tempfile.read, f.write)
                f.close()
                tempfile.close()

            return MusepackAudio(filename)
        else:
            if (tempfile is not None):
                tempfile.close()
            raise EncodingError(u"error encoding file with mpcenc")
Exemplo n.º 29
0
        def copy_to_temp(response):
            import tempfile
            import shutil

            (td, path) = tempfile.mkstemp()

            tempfile = os.fdopen(td, "w+")

            logger.debug("downloading to temporary file {0}".format(path))

            try:
                shutil.copyfileobj(response, tempfile)
            except:
                logger.debug("removing temporary file {0}".format(path))
                tempfile.close()
                os.remove(path)
                raise

            return tempfile, path
Exemplo n.º 30
0
def _configLogging():
    """Do some basic config of the logging module at package import time.
    The configuring is done only if the PYRO_LOGLEVEL env var is set.
    If you want to use your own logging config, make sure you do
    that before any Pyro imports. Then Pyro will skip the autoconfig.
    Set the env var PYRO_LOGFILE to change the name of the autoconfigured
    log file (default is pyro.log in the current dir). Use '{stderr}' to
    make the log go to the standard error output."""
    import os
    import logging

    level = os.environ.get("PYRO_LOGLEVEL")
    logfilename = os.environ.get("PYRO_LOGFILE", "pyro.log")
    if logfilename == "{stderr}":
        logfilename = None
    if level not in (None, ""):
        levelvalue = getattr(logging, level)
        if len(logging.root.handlers) == 0:
            # configure the logging with some sensible defaults.
            try:
                if logfilename:
                    import tempfile
                    logfile_dir = os.path.dirname(os.path.expanduser(logfilename))
                    tempfile = tempfile.TemporaryFile(dir=logfile_dir)
                    tempfile.close()
            except OSError:
                # cannot write in the desired logfile directory, use the default console logger
                logging.basicConfig(level=levelvalue)
                logging.getLogger("Pyro4").warn("unable to write to the desired logfile (access rights?), falling back to console logger")
            else:
                # set up a basic logfile in current directory
                logging.basicConfig(
                    level=levelvalue,
                    filename=logfilename,
                    datefmt="%Y-%m-%d %H:%M:%S",
                    format="[%(asctime)s.%(msecs)03d,%(name)s,%(levelname)s] %(message)s"
                )
            log = logging.getLogger("Pyro4")
            log.info("Pyro log configured using built-in defaults, level=%s", level)
    else:
        # PYRO_LOGLEVEL is not set, disable Pyro logging. No message is printed about this fact.
        log = logging.getLogger("Pyro4")
        log.setLevel(9999)
Exemplo n.º 31
0
        def copy_to_temp(response):
            import tempfile
            import shutil

            (td, path) = tempfile.mkstemp()

            tempfile = os.fdopen(td, "w+")

            logger.debug("downloading to temporary file {0}".format(path))

            try:
                shutil.copyfileobj(response, tempfile)
            except:
                logger.debug("removing temporary file {0}".format(path))
                tempfile.close()
                os.remove(path)
                raise

            return tempfile, path
Exemplo n.º 32
0
    def message_open(self, muuid, online=False):

        """Extracts the HTML Site to a Temp File and
        Shows it in Webbrowser.
        The Message is set read.
        Temp Files are deleted in __del__ function.
        """

        self.log.info('Open: %s'%muuid)
        if online:
            url = self.message_get_meta(muuid)['url']
            webbrowser.open_new_tab(url)
        else:
            mime = self.message_get_meta(muuid)['mimetype']
            extension = self.mimetypes.get_extension(mime)

            name = str(uuid.uuid4()) + extension[0]
            path = os.path.join(self.tempdir,name)
            tempfile = open(path,'wb')

            if extension[0] == '.html':
                meta = self.message_get_meta(muuid)
                data = self.message_get_data(muuid).encode(meta['encoding']).encode(meta['encoding'])
                tempfile.write(data)
                tempfile.close()
                webbrowser.open_new_tab(path)
            else:

                data = self.message_get_data(muuid)
                tempfile.write(data)
                tempfile.close()
                if sys.platform.startswith('darwin'):
                    subprocess.call(('open', path))
                elif os.name == 'nt':
                    os.startfile(path)
                elif os.name == 'posix':
                    subprocess.call(('xdg-open', path))


        self.message_set_meta(muuid, 'read', True)
Exemplo n.º 33
0
def _configLogging():
    """Do some basic config of the logging module at package import time.
    The configuring is done only if the PYRO_LOGLEVEL env var is set.
    If you want to use your own logging config, make sure you do
    that before any Pyro imports. Then Pyro will skip the autoconfig.
    Set the env var PYRO_LOGFILE to change the name of the autoconfigured
    log file (default is pyro.log in the current dir). Use '{stderr}' to
    make the log go to the standard error output."""
    import os
    import logging

    level = os.environ.get("PYRO_LOGLEVEL")
    logfilename = os.environ.get("PYRO_LOGFILE", "pyro.log")
    if logfilename == "{stderr}":
        logfilename = None
    if level is not None:
        levelvalue = getattr(logging, level)
        if len(logging.root.handlers) == 0:
            # configure the logging with some sensible defaults.
            try:
                import tempfile
                tempfile = tempfile.TemporaryFile(dir=".")
                tempfile.close()
            except OSError:
                # cannot write in current directory, use the default console logger
                logging.basicConfig(level=levelvalue)
            else:
                # set up a basic logfile in current directory
                logging.basicConfig(
                    level=levelvalue,
                    filename=logfilename,
                    datefmt="%Y-%m-%d %H:%M:%S",
                    format="[%(asctime)s.%(msecs)03d,%(name)s,%(levelname)s] %(message)s"
                )
            log = logging.getLogger("Pyro4")
            log.info("Pyro log configured using built-in defaults, level=%s", level)
    else:
        # PYRO_LOGLEVEL is not set, disable Pyro logging. No message is printed about this fact.
        log = logging.getLogger("Pyro4")
        log.setLevel(9999)
Exemplo n.º 34
0
    def setUp(self):
        provision_device()

        self.client = Client()
        self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest()
        self.extension = file_formats.PDF
        self.filename = "{}.{}".format(self.hash, self.extension)
        self.title = "abc123!@#$%^&*();'[],./?><"
        self.contentnode = ContentNode(title=self.title)
        self.available = True
        self.preset = format_presets.DOCUMENT
        self.local_file = LocalFile(id=self.hash, extension=self.extension, available=self.available)
        self.file = File(local_file=self.local_file, available=self.available,
                         contentnode=self.contentnode, preset=self.preset)

        self.path = get_content_storage_file_path(self.filename)
        path_dir = os.path.dirname(self.path)
        if not os.path.exists(path_dir):
            os.makedirs(path_dir)
        tempfile = open(self.path, "w")
        tempfile.write("test")
        tempfile.close()
Exemplo n.º 35
0
    def setUp(self):
        # create DeviceOwner to pass the setup_wizard middleware check
        DeviceOwner.objects.create(username='******', password=123)

        self.client = Client()
        self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest()
        self.extension = file_formats.PDF
        self.filename = "{}.{}".format(self.hash, self.extension)
        self.title = "abc123!@#$%^&*();'[],./?><"
        self.contentnode = ContentNode(title=self.title)
        self.available = True
        self.preset = format_presets.DOCUMENT
        self.file = File(checksum=self.hash, extension=self.extension, available=self.available,
                         contentnode=self.contentnode, preset=self.preset)

        self.path = get_content_storage_file_path(self.filename)
        path_dir = os.path.dirname(self.path)
        if not os.path.exists(path_dir):
            os.makedirs(path_dir)
        tempfile = open(self.path, "w")
        tempfile.write("test")
        tempfile.close()
Exemplo n.º 36
0
 def _save_entries(self):
     """
     saves the file entries
     """
     tempname = NamedTemporaryFile().name
     # create a JSON dictionary
     store_dict = {}
     store_dict["max_id"] = self._max_id
     entry_list = []
     for entry in self._entries:
         entry_dict = {}
         entry_dict["filepath"] = entry.get_filepath()
         entry_dict["timestamp"] = entry.get_timestamp()
         entry_dict["state"] = entry.get_state()
         entry_dict["entry_id"] = entry.get_entry_id()
         entry_list.append(entry_dict)
     store_dict["entries"] = entry_list
     line = json.dumps(store_dict)
     # crite JSON to temporary file
     try:
         tempfile = open(tempname, "w")
         tempfile.write(line)
         tempfile.close()
     except IOError:
         show_error_message(
             "Unable to create temporary file %s." % tempname, True)
     # copy encrypted temporary file to cryptstore
     key = self.get_key()
     fname = "cryptbox.00000001"
     destpath = os.path.join(self._rootpath, fname)
     encrypt_file(tempname, destpath, key)
     # delete temporary file
     try:
         os.remove(tempname)
     except OSError:
         show_error_message("Unable to remove temporary file %s." %
                            tempname)
Exemplo n.º 37
0
 def clean_all_temp(self):
     while self.tempfiles:
         tempfile = self.tempfiles.pop(0)
         tempfile.close()
Exemplo n.º 38
0
            raise err

        try:
            pcmreader.close()
        except DecodingError, err:
            raise EncodingError(err.error_message)
        sub.stdin.close()

        if sub.wait() == 0:
            if tempfile is not None:
                filename = actual_filename
                f = file(filename, "wb")
                tempfile.seek(0, 0)
                transfer_data(tempfile.read, f.write)
                f.close()
                tempfile.close()

            return M4AAudio(filename)
        else:
            if tempfile is not None:
                tempfile.close()
            raise EncodingError(u"unable to write file with faac")

    @classmethod
    def can_add_replay_gain(cls):
        """Returns False."""

        return False

    @classmethod
    def lossless_replay_gain(cls):
Exemplo n.º 39
0
    def from_pcm(cls,
                 filename,
                 pcmreader,
                 compression=None,
                 total_pcm_frames=None):
        """encodes a new file from PCM data

        takes a filename string, PCMReader object,
        optional compression level string and optional
        total_pcm_frames integer
        encodes a new audio file from pcmreader's data
        at the given filename with the specified compression level
        and returns a new M4AAudio object"""

        import subprocess
        import os
        from audiotools import PCMConverter
        from audiotools import transfer_data
        from audiotools import transfer_framelist_data
        from audiotools import ignore_sigint
        from audiotools import EncodingError
        from audiotools import DecodingError
        from audiotools import ChannelMask
        from audiotools import __default_quality__

        if ((compression is None)
                or (compression not in cls.COMPRESSION_MODES)):
            compression = __default_quality__(cls.NAME)

        if pcmreader.channels > 2:
            pcmreader = PCMConverter(pcmreader,
                                     sample_rate=pcmreader.sample_rate,
                                     channels=2,
                                     channel_mask=ChannelMask.from_channels(2),
                                     bits_per_sample=pcmreader.bits_per_sample)

        # faac requires files to end with .m4a for some reason
        if not filename.endswith(".m4a"):
            import tempfile
            actual_filename = filename
            tempfile = tempfile.NamedTemporaryFile(suffix=".m4a")
            filename = tempfile.name
        else:
            actual_filename = tempfile = None

        sub = subprocess.Popen(
            [
                BIN['faac'], "-q", compression, "-P", "-R",
                str(pcmreader.sample_rate), "-B",
                str(pcmreader.bits_per_sample), "-C",
                str(pcmreader.channels), "-X", "-o", filename, "-"
            ],
            stdin=subprocess.PIPE,
            stderr=subprocess.DEVNULL
            if hasattr(subprocess, "DEVNULL") else open(os.devnull, "wb"),
            stdout=subprocess.DEVNULL
            if hasattr(subprocess, "DEVNULL") else open(os.devnull, "wb"),
            preexec_fn=ignore_sigint)
        # Note: faac handles SIGINT on its own,
        # so trying to ignore it doesn't work like on most other encoders.

        try:
            if total_pcm_frames is not None:
                from audiotools import CounterPCMReader
                pcmreader = CounterPCMReader(pcmreader)

            transfer_framelist_data(pcmreader, sub.stdin.write)

            if ((total_pcm_frames is not None)
                    and (total_pcm_frames != pcmreader.frames_written)):
                from audiotools.text import ERR_TOTAL_PCM_FRAMES_MISMATCH
                raise EncodingError(ERR_TOTAL_PCM_FRAMES_MISMATCH)

        except (ValueError, IOError) as err:
            sub.stdin.close()
            sub.wait()
            cls.__unlink__(filename)
            raise EncodingError(str(err))
        except Exception:
            sub.stdin.close()
            sub.wait()
            cls.__unlink__(filename)
            raise

        sub.stdin.close()

        if sub.wait() == 0:
            if tempfile is not None:
                filename = actual_filename
                f = open(filename, 'wb')
                tempfile.seek(0, 0)
                transfer_data(tempfile.read, f.write)
                f.close()
                tempfile.close()

            return M4AAudio(filename)
        else:
            if tempfile is not None:
                tempfile.close()
            raise EncodingError(u"unable to write file with faac")
Exemplo n.º 40
0
def s3_upload(hashkey, size=ORIGINAL_SIZE):
    """
    Upload a screenshot PNG file to Amazon S3.

    This uses httplib directly and transfers the file in small chunks,
    so we don't have to load the whole PNG file into RAM.
    """

    from shotserver04.screenshots import s3
    aws = s3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID,
                               settings.AWS_SECRET_ACCESS_KEY,
                               is_secure=False)
    s3_bucket = settings.S3_BUCKETS[str(size)]
    s3_key = hashkey + '.png'
    server = s3.DEFAULT_HOST
    method = 'PUT'
    path = '/%s/%s' % (s3_bucket, urllib.quote_plus(s3_key))

    filename = png_filename(hashkey, size)
    f = file(filename, 'rb')
    f.seek(0, 2)  # os.SEEK_END for Python < 2.5
    bytes_total = f.tell()
    f.seek(0, 0)  # os.SEEK_SET for Python < 2.5

    headers = {
        'User-Agent': 'shotserver/0.4',
        'Host': server,
        'x-amz-acl': 'public-read',
        'Content-Type': 'image/png',
        'Content-Length': str(bytes_total),
    }
    query_args = {}
    aws._add_aws_auth_header(headers, method, s3_bucket, s3_key, query_args)

    host = '%s:%d' % (server, 80)
    conn = httplib.HTTPConnection(host)
    conn.putrequest(method, path)
    for header_key, header_value in headers.iteritems():
        conn.putheader(header_key, header_value)
    conn.endheaders()

    bytes_sent = 0
    while True:
        bytes = f.read(BUFFER_SIZE)
        if not bytes:
            break
        conn.send(bytes)
        bytes_sent += len(bytes)
        # print 'sent', bytes_sent, 'of', bytes_total, 'bytes',
        # print '(%.1f%%)' % (100.0 * bytes_sent / bytes_total)
    assert bytes_sent == bytes_total
    f.close()

    response = conn.getresponse()
    if response.status != 200:
        raise Fault(response.status, response.read())
    # print 'http://%s/%s' % (s3_bucket, s3_key)

    # Write response from S3 to tempfile for debugging
    if DEBUG_HEADERS and str(size) == '160':
        tempfile = file('/tmp/%s.txt' % hashkey, 'w')
        tempfile.write('==== Request headers ====\n')
        tempfile.write('%s %s HTTP/1.1\n' % (method, path))
        for header, value in headers.iteritems():
            tempfile.write('%s: %s\n' % (header, value))
        tempfile.write('\n')
        tempfile.write('==== Response headers ====\n')
        tempfile.write('HTTP/1.1 %s %s\n' % (response.status, response.reason))
        for header, value in response.getheaders():
            tempfile.write('%s: %s\n' % (header, value))
        tempfile.write('\n')
        tempfile.write(response.read())
        # tempfile.write(response.msg)
        tempfile.close()

    conn.close()
Exemplo n.º 41
0
def s3_upload(hashkey, size=ORIGINAL_SIZE):
    """
    Upload a screenshot PNG file to Amazon S3.

    This uses httplib directly and transfers the file in small chunks,
    so we don't have to load the whole PNG file into RAM.
    """

    from shotserver04.screenshots import s3
    aws = s3.AWSAuthConnection(
        settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY,
        is_secure=False)
    s3_bucket = settings.S3_BUCKETS[str(size)]
    s3_key = hashkey + '.png'
    server = s3.DEFAULT_HOST
    method = 'PUT'
    path = '/%s/%s' % (s3_bucket, urllib.quote_plus(s3_key))

    filename = png_filename(hashkey, size)
    f = file(filename, 'rb')
    f.seek(0, 2) # os.SEEK_END for Python < 2.5
    bytes_total = f.tell()
    f.seek(0, 0) # os.SEEK_SET for Python < 2.5

    headers = {
        'User-Agent': 'shotserver/0.4',
        'Host': server,
        'x-amz-acl': 'public-read',
        'Content-Type': 'image/png',
        'Content-Length': str(bytes_total),
        }
    query_args = {}
    aws._add_aws_auth_header(headers, method, s3_bucket, s3_key, query_args)

    host = '%s:%d' % (server, 80)
    conn = httplib.HTTPConnection(host)
    conn.putrequest(method, path)
    for header_key, header_value in headers.iteritems():
        conn.putheader(header_key, header_value)
    conn.endheaders()

    bytes_sent = 0
    while True:
        bytes = f.read(BUFFER_SIZE)
        if not bytes:
            break
        conn.send(bytes)
        bytes_sent += len(bytes)
        # print 'sent', bytes_sent, 'of', bytes_total, 'bytes',
        # print '(%.1f%%)' % (100.0 * bytes_sent / bytes_total)
    assert bytes_sent == bytes_total
    f.close()

    response = conn.getresponse()
    if response.status != 200:
        raise Fault(response.status, response.read())
    # print 'http://%s/%s' % (s3_bucket, s3_key)

    # Write response from S3 to tempfile for debugging
    if DEBUG_HEADERS and str(size) == '160':
        tempfile = file('/tmp/%s.txt' % hashkey, 'w')
        tempfile.write('==== Request headers ====\n')
        tempfile.write('%s %s HTTP/1.1\n' % (method, path))
        for header, value in headers.iteritems():
            tempfile.write('%s: %s\n' % (header, value))
        tempfile.write('\n')
        tempfile.write('==== Response headers ====\n')
        tempfile.write('HTTP/1.1 %s %s\n' % (response.status, response.reason))
        for header, value in response.getheaders():
            tempfile.write('%s: %s\n' % (header, value))
        tempfile.write('\n')
        tempfile.write(response.read())
        # tempfile.write(response.msg)
        tempfile.close()

    conn.close()
Exemplo n.º 42
0
def _download_and_unpack_file(url):
    """Downloads the database files created with setup-exfor-db.py as
    a tarball and unpacks them to the correct folder."""

    from tqdm import tqdm
    from glob import glob
    import requests
    import math
    import tarfile
    import tempfile
    import shutil

    # cleanup
    for f in [
        fullIndexFileName, fullErrorFileName,
        fullCoupledFileName, fullMonitoredFileName,
        fullReactionCountFileName, fullDBPath, dbTagFile
    ]:
        try:
            shutil.rmtree(f)
        except NotADirectoryError:
            os.remove(f)
        except FileNotFoundError:
            pass
    # Tag files:
    tag_files = [
        f for tag in ['X4-*', 'EXFOR-*'] for f in glob(os.path.join(DATAPATH, tag))
        ]
    for tagfile in tag_files:
        try:
            os.remove(tagfile)
        except FileNotFoundError:
            pass
        

    # Streaming, so we can iterate over the response.
    r = requests.get(url, stream=True)
    tarname = os.path.basename(url)

    # Total size in bytes.
    total_size = int(r.headers.get('content-length', 0))
    block_size = 1024 * 1024
    wrote = 0
    tempfile = tempfile.TemporaryFile()
    
    print('Downloading data file', tarname)
    for data in tqdm(r.iter_content(block_size), total=math.ceil(total_size // block_size),
                    unit='MB', unit_scale=True):
        wrote = wrote + len(data)
        tempfile.write(data)
    if total_size != 0 and wrote != total_size:
        raise Exception("ERROR, something went wrong")
    tempfile.flush()
    tempfile.seek(0)
    print('Decompressing archive', tarname)
    wrote = 0
    with tarfile.open(fileobj=tempfile, mode='r') as _tar:
        total = len(_tar.getmembers())
        for member in tqdm(_tar.getmembers(), total=total):
            wrote = wrote + len(data)
            _tar.extract(member, DATAPATH)
    tempfile.close()

    with open(dbTagFile,'wb') as f:
        print('Installed database version', dbTagFile)
        pass
Exemplo n.º 43
0
def mosaic(options, remove, an, ow, fil):
    """Create a daily mosaic of HDF files convert to TIF and import it"""
    try:
        # try to import pymodis (modis) and some classes for i.modis.download
        from rmodislib import product, projection, get_proj
    except ImportError as e:
        grass.fatal("Unable to load i.modis library: {}".format(e))
    try:
        from pymodis.convertmodis import convertModis, createMosaic
        from pymodis.convertmodis_gdal import createMosaicGDAL, convertModisGDAL
        from pymodis.parsemodis import parseModis
    except ImportError as e:
        grass.fatal("Unable to import pymodis library: {}".format(e))
    dictfile, targetdir = list_files(options, True)
    pid = str(os.getpid())
    # for each day
    count = len(dictfile.keys())
    idx = 1
    for dat, listfiles in dictfile.items():
        grass.message(
            _("Processing <{d}> ({i}/{c})...").format(d=dat, i=idx, c=count))
        grass.percent(idx, count, 5)
        idx += 1

        pref = listfiles[0].split(os.path.sep)[-1]
        prod = product().fromcode(pref.split(".")[0])
        spectr = spectral(options, prod, an)
        spectr = spectr.lstrip("( ").rstrip(" )")
        outname = "%s.%s_mosaic" % (pref.split(".")[0], pref.split(".")[1])
        outname = outname.replace(" ", "_")
        # create mosaic
        if options["mrtpath"]:
            # create the file with the list of name
            tempfile = open(os.path.join(targetdir, pid), "w")
            tempfile.writelines(listfiles)
            tempfile.close()
            # basedir of tempfile, where hdf files are write
            basedir = os.path.split(tempfile.name)[0]
            # return the spectral subset in according mrtmosaic tool format
            cm = createMosaic(tempfile.name, outname, options["mrtpath"],
                              spectr)
            cm.run()
            hdfiles = glob.glob1(basedir, outname + "*.hdf")
        else:
            basedir = targetdir
            listfiles = [os.path.join(basedir, i) for i in listfiles]
            cm = createMosaicGDAL(listfiles, spectr)
            try:
                cm.write_vrt(os.path.join(basedir, outname), quiet=True)
            except:
                cm.write_vrt(os.path.join(basedir, outname))
            hdfiles = glob.glob1(basedir, outname + "*.vrt")
        for i in hdfiles:
            # the full path to hdf file
            hdf = os.path.join(basedir, i)
            try:
                pm = parseModis(hdf)
            except:
                out = i.replace(".vrt", "")
                data = doy2date(dat[1:])
                pm = grassParseModis(out, data)
            # create convertModis class and convert it in tif file
            if options["mrtpath"]:
                # create conf file fro mrt tools
                confname = confile(pm, options, an, True)
                execmodis = convertModis(hdf, confname, options["mrtpath"])
            else:
                confname = None
                projwkt = get_proj("w")
                projObj = projection()
                if projObj.returned() != "GEO":
                    res = int(prod["res"]) * int(projObj.proj["meters"])
                else:
                    res = None
                execmodis = convertModisGDAL(str(hdf),
                                             out,
                                             spectr,
                                             res,
                                             wkt=str(projwkt),
                                             vrt=True)

            # produce temporary files in input folder
            os.chdir(basedir)
            try:
                execmodis.run(quiet=True)
            except:
                execmodis.run()
            # remove hdf
            if remove:
                # import tif files
                import_tif(
                    basedir=basedir,
                    rem=remove,
                    write=ow,
                    pm=pm,
                    listfile=fil,
                    prod=prod,
                )
                try:
                    os.remove(hdf)
                    os.remove(hdf + ".xml")
                except OSError:
                    pass
            # move the hdf and hdf.xml to the dir where are the original files
            else:
                # import tif files
                import_tif(
                    basedir=basedir,
                    rem=remove,
                    write=ow,
                    pm=pm,
                    target=targetdir,
                    listfile=fil,
                    prod=prod,
                )
                if i not in os.listdir(targetdir):
                    try:
                        shutil.move(hdf, targetdir)
                        shutil.move(hdf + ".xml", targetdir)
                    except OSError:
                        pass
            # remove the conf file
            try:
                os.remove(confname)
            except (OSError, TypeError) as e:
                pass
        if options["mrtpath"]:
            grass.try_remove(tempfile.name)
        grass.try_remove(os.path.join(targetdir, "mosaic", pid))
Exemplo n.º 44
0
 def _remove_tempfile(self, tempfile, temppath):
     tempfile.close()
     os.remove(temppath)
Exemplo n.º 45
0
    def from_pcm(cls, filename, pcmreader,
                 compression=None, total_pcm_frames=None):
        """encodes a new file from PCM data

        takes a filename string, PCMReader object,
        optional compression level string and optional
        total_pcm_frames integer
        encodes a new audio file from pcmreader's data
        at the given filename with the specified compression level
        and returns a new M4AAudio object"""

        import subprocess
        import os
        from audiotools import PCMConverter
        from audiotools import transfer_data
        from audiotools import transfer_framelist_data
        from audiotools import ignore_sigint
        from audiotools import EncodingError
        from audiotools import DecodingError
        from audiotools import ChannelMask
        from audiotools import __default_quality__

        if ((compression is None) or (compression not in
                                      cls.COMPRESSION_MODES)):
            compression = __default_quality__(cls.NAME)

        if pcmreader.bits_per_sample not in {8, 16, 24}:
            from audiotools import UnsupportedBitsPerSample
            pcmreader.close()
            raise UnsupportedBitsPerSample(filename, pcmreader.bits_per_sample)

        if pcmreader.channels > 2:
            pcmreader = PCMConverter(pcmreader,
                                     sample_rate=pcmreader.sample_rate,
                                     channels=2,
                                     channel_mask=ChannelMask.from_channels(2),
                                     bits_per_sample=pcmreader.bits_per_sample)

        # faac requires files to end with .m4a for some reason
        if not filename.endswith(".m4a"):
            import tempfile
            actual_filename = filename
            tempfile = tempfile.NamedTemporaryFile(suffix=".m4a")
            filename = tempfile.name
        else:
            actual_filename = tempfile = None

        sub = subprocess.Popen(
            [BIN['faac'],
             "-q", compression,
             "-P",
             "-R", str(pcmreader.sample_rate),
             "-B", str(pcmreader.bits_per_sample),
             "-C", str(pcmreader.channels),
             "-X",
             "-o", filename,
             "-"],
            stdin=subprocess.PIPE,
            stderr=subprocess.DEVNULL if hasattr(subprocess, "DEVNULL") else
            open(os.devnull, "wb"),
            stdout=subprocess.DEVNULL if hasattr(subprocess, "DEVNULL") else
            open(os.devnull, "wb"),
            preexec_fn=ignore_sigint)
        # Note: faac handles SIGINT on its own,
        # so trying to ignore it doesn't work like on most other encoders.

        try:
            if total_pcm_frames is not None:
                from audiotools import CounterPCMReader
                pcmreader = CounterPCMReader(pcmreader)

            transfer_framelist_data(pcmreader, sub.stdin.write)

            if ((total_pcm_frames is not None) and
                (total_pcm_frames != pcmreader.frames_written)):
                from audiotools.text import ERR_TOTAL_PCM_FRAMES_MISMATCH
                raise EncodingError(ERR_TOTAL_PCM_FRAMES_MISMATCH)

        except (ValueError, IOError) as err:
            sub.stdin.close()
            sub.wait()
            cls.__unlink__(filename)
            raise EncodingError(str(err))
        except Exception:
            sub.stdin.close()
            sub.wait()
            cls.__unlink__(filename)
            raise

        sub.stdin.close()

        if sub.wait() == 0:
            if tempfile is not None:
                filename = actual_filename
                f = open(filename, 'wb')
                tempfile.seek(0, 0)
                transfer_data(tempfile.read, f.write)
                f.close()
                tempfile.close()

            return M4AAudio(filename)
        else:
            if tempfile is not None:
                tempfile.close()
            raise EncodingError(u"unable to write file with faac")
Exemplo n.º 46
0
            if None not in fields:
                my_note = MyNote(model=my_model,
                                 fields=[str(fields[0]),
                                         str(fields[1])])
                my_deck.add_note(my_note)
            fields[0] = tag.decode_contents()
            fields[1] = None
        else:
            if not fields[1]:
                fields[1] = ""
            if tag.name != None:
                imgs = tag.find_all("img")
                for img in imgs:
                    img_file = img.attrs["src"]
                    fullpath = pathjoin(filedirname, img_file)
                    my_package.media_files.append(fullpath)
                    img.attrs["src"] = basename(
                        img.attrs["src"])  # remove subdir as anki cannot
                    img.attrs["alt"] = ""
            fields[1] += str(tag)

    if None not in fields:
        # Add remaining note
        my_note = MyNote(model=my_model,
                         fields=[str(fields[0]),
                                 str(fields[1])])
        my_deck.add_note(my_note)

my_package.write_to_file(outputname)
tempfile.close()
Exemplo n.º 47
0
 def __cleanAllTemp(self):
     while self.tempfiles:
         tempfile = self.tempfiles.pop(0)
         tempfile.close()
Exemplo n.º 48
0
 def namedFile() -> Path:
     tempfile = TempFileManager.namedFileDescriptor(delete=False)
     tempfile.close()
     return Path(tempfile.name)
Exemplo n.º 49
0
 def _remove_tempfile(self, tempfile, temppath):
     tempfile.close()
     os.remove(temppath)
Exemplo n.º 50
0
def mosaic(options, remove, an, ow, fil):
    """Create a daily mosaic of HDF files convert to TIF and import it
    """
    try:
        # try to import pymodis (modis) and some classes for i.modis.download
        from rmodislib import product, projection, get_proj
    except:
        grass.fatal("i.modis library is not installed")
    try:
        from pymodis.convertmodis import convertModis, createMosaic
        from pymodis.convertmodis_gdal import createMosaicGDAL, convertModisGDAL
        from pymodis.parsemodis import parseModis
    except:
        grass.fatal("pymodis library is not installed")
    dictfile, targetdir = list_files(options, True)
    pid = str(os.getpid())
    # for each day
    for dat, listfiles in dictfile.items():
        pref = listfiles[0].split('/')[-1]
        prod = product().fromcode(pref.split('.')[0])
        spectr = spectral(options, prod, an)
        spectr = spectr.lstrip('( ').rstrip(' )')
        outname = "%s.%s_mosaic" % (pref.split('.')[0], pref.split('.')[1])
        outname = outname.replace(' ', '_')
        # create mosaic
        if options['mrtpath']:
            # create the file with the list of name
            tempfile = open(os.path.join(targetdir, pid), 'w')
            tempfile.writelines(listfiles)
            tempfile.close()
            # basedir of tempfile, where hdf files are write
            basedir = os.path.split(tempfile.name)[0]
            # return the spectral subset in according mrtmosaic tool format
            cm = createMosaic(tempfile.name, outname, options['mrtpath'],
                              spectr)
            cm.run()
            hdfiles = glob.glob1(basedir, outname + "*.hdf")
        else:
            basedir = targetdir
            listfiles = [os.path.join(basedir, i) for i in listfiles]
            cm = createMosaicGDAL(listfiles, spectr)
            try:
                cm.write_vrt(os.path.join(basedir, outname), quiet=True)
            except:
                cm.write_vrt(os.path.join(basedir, outname))
            hdfiles = glob.glob1(basedir, outname + "*.vrt")
        for i in hdfiles:
            # the full path to hdf file
            hdf = os.path.join(basedir, i)
            try:
                pm = parseModis(hdf)
            except:
                out = i.replace('.vrt', '')
                data = doy2date(dat[1:])
                pm = grassParseModis(out, data)
            # create convertModis class and convert it in tif file
            if options['mrtpath']:
                # create conf file fro mrt tools
                confname = confile(pm, options, an, True)
                execmodis = convertModis(hdf, confname, options['mrtpath'])
            else:
                confname = None
                projwkt = get_proj('w')
                projObj = projection()
                if projObj.returned() != 'GEO':
                    res = int(prod['res']) * int(projObj.proj['meters'])
                else:
                    res = None
                execmodis = convertModisGDAL(str(hdf),
                                             out,
                                             spectr,
                                             res,
                                             wkt=str(projwkt),
                                             vrt=True)
            try:
                execmodis.run(quiet=True)
            except:
                execmodis.run()
            # remove hdf
            if remove:
                # import tif files
                import_tif(basedir=basedir,
                           rem=remove,
                           write=ow,
                           pm=pm,
                           listfile=fil,
                           prod=prod)
                try:
                    os.remove(hdf)
                    os.remove(hdf + '.xml')
                except OSError:
                    pass
            # move the hdf and hdf.xml to the dir where are the original files
            else:
                # import tif files
                import_tif(basedir=basedir,
                           rem=remove,
                           write=ow,
                           pm=pm,
                           target=targetdir,
                           listfile=fil,
                           prod=prod)
                if i not in os.listdir(targetdir):
                    try:
                        shutil.move(hdf, targetdir)
                        shutil.move(hdf + '.xml', targetdir)
                    except OSError:
                        pass
            # remove the conf file
            try:
                os.remove(confname)
            except (OSError, TypeError) as e:
                pass
        if options['mrtpath']:
            grass.try_remove(tempfile.name)
        grass.try_remove(os.path.join(targetdir, 'mosaic', pid))
Exemplo n.º 51
0
def write_to_tempfile(tempfile_info, html):
    tempfile = tempfile_info[0]
    tempfile_name = tempfile_info[1]
    tempfile.write(html)
    tempfile.close()