Example #1
1
def Craeate_addon_from_github(URL, local_repo_folder):
    archive_suffix = "/archive/master.zip"
    print(URL)
    addonname = URL.strip("/").split("/")[-1]
    if not os.path.exists(local_repo_folder + os.sep + addonname):
        print("Making folder for addon in repo: ", addonname)
        os.makedirs(local_repo_folder + os.sep + addonname)
    download_file(URL + archive_suffix, local_repo_folder + os.sep + addonname + os.sep + "master.zip")
    try:
        xml_frm_file, ziptype = zipfilehandler(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
    except Exception as e:
        print("cannot create a zip from githuburl ", URL)
        return
    root = ET.fromstring(xml_frm_file)
    for element in root.iter("addon"):
        addon_name = element.attrib["id"]
        addon_version = element.attrib["version"]
    try:
        currntzip = zipfile.ZipFile(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
        currntzip.extractall(local_repo_folder + os.sep + addonname + os.sep)
        currntzip.close()
        shutil.move(
            local_repo_folder + os.sep + addonname + os.sep + addon_name + "-master",
            local_repo_folder + os.sep + addonname + os.sep + addon_name,
        )
        os.remove(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
        shutil.make_archive(
            local_repo_folder + os.sep + addon_name + os.sep + addon_name + "-" + addon_version,
            "zip",
            local_repo_folder + os.sep + addon_name,
            addon_name,
        )
        shutil.rmtree(local_repo_folder + os.sep + addonname + os.sep + addon_name)
    except Exception as e:
        print("could not save fil ", addonname)
Example #2
1
    def parallel(self, *tasklist):
        """Run tasks in parallel"""

        pids = []
        old_log_filename = self.log_filename
        for i, task in enumerate(tasklist):
            assert isinstance(task, (tuple, list))
            self.log_filename = old_log_filename + (".%d" % i)
            task_func = lambda: task[0](*task[1:])
            pids.append(parallel.fork_start(self.resultdir, task_func))

        old_log_path = os.path.join(self.resultdir, old_log_filename)
        old_log = open(old_log_path, "a")
        exceptions = []
        for i, pid in enumerate(pids):
            # wait for the task to finish
            try:
                parallel.fork_waitfor(self.resultdir, pid)
            except Exception, e:
                exceptions.append(e)
            # copy the logs from the subtask into the main log
            new_log_path = old_log_path + (".%d" % i)
            if os.path.exists(new_log_path):
                new_log = open(new_log_path)
                old_log.write(new_log.read())
                new_log.close()
                old_log.flush()
                os.remove(new_log_path)
Example #3
1
 def test_water_file3(self):
     """water with the asis trick and GenBank file, output to a file."""
     # Setup,
     query = "TGTTGTAATGTTTTAATGTTTCTTCTCCCTTTAGATGTACTACGTTTGGA"
     out_file = "Emboss/temp_test3.water"
     in_file = "GenBank/cor6_6.gb"
     self.assertTrue(os.path.isfile(in_file))
     if os.path.isfile(out_file):
         os.remove(out_file)
     cline = WaterCommandline(cmd=exes["water"])
     cline.set_parameter("asequence", "asis:%s" % query)
     cline.set_parameter("bsequence", in_file)
     # TODO - Tell water this is a GenBank file!
     cline.set_parameter("gapopen", "1")
     cline.set_parameter("gapextend", "0.5")
     cline.set_parameter("outfile", out_file)
     self.assertEqual(str(eval(repr(cline))), str(cline))
     # Run the tool,
     self.run_water(cline)
     # Check we can parse the output and it is sensible...
     self.pairwise_alignment_check(
         query, SeqIO.parse(in_file, "genbank"), AlignIO.parse(out_file, "emboss"), local=True
     )
     # Clean up,
     os.remove(out_file)
Example #4
1
 def _read_checkpoint(self):
     """Read checkpoint file from disk."""
     if not os.path.exists(self.checkpoint_path):
         self.log.info("No checkpoint found in %s." % self.checkpoint_path)
         open(self.checkpoint_path, "w").close()
         self.log.debug("Created checkpoint file in %s." % self.checkpoint_path)
         return ""
     try:
         f = open(self.checkpoint_path, "rb")
         read = pickle.load(f)
         f.close()
         if read != None:
             return read
         else:
             return ""
         self.log.debug("Checkpoint read from %s" % self.checkpoint_path)
     except EOFError:
         return ""
     except Exception, e:
         self.log.error("Error reading checkpoint in %s." % self.checkpoint_path)
         self.log.error(traceback.format_exc())
         if self.remove_corrupted_checkpoint_file:
             self.log.info("Removing corrupted checkpoint file %s." % self.checkpoint_path)
             f.close()
             os.remove(self.checkpoint_path)
             return ""
         sys.exit(-1)
Example #5
1
    def __enter__(self):
        """Enter context: Create temporary file for writing, copying stat() of original"""
        from gruntle.memebot.exceptions import TrapErrors, TrapError, trapped, reraise

        # make sure the directory exists
        dirname, basename = os.path.split(self.file)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        # construct temporary file in the same directory as the original
        name, ext = os.path.splitext(basename)
        self.fd, self.temp_file = tempfile.mkstemp(suffix=ext, prefix=".%s-" % name, dir=dirname)

        try:
            with TrapErrors():
                exists = os.path.exists(self.file)
                if self.perms is not None:
                    os.chmod(self.file if exists else self.temp_file, self.perms)
                if exists:
                    shutil.copystat(self.file, self.temp_file)
                    if self.backup:
                        backup_file = self.file + ".bak"
                        if os.path.exists(backup_file):
                            os.remove(backup_file)
                        shutil.copy2(self.file, backup_file)
                self.fp = os.fdopen(self.fd, "w")
        except TrapError, exc:
            with trapped:
                os.close(self.fd)
            if os.path.exists(self.temp_file):
                with trapped:
                    os.remove(self.temp_file)
            self.reset()
            reraise(*exc.args)
Example #6
1
def WCS(cmd):
    global CM_IP
    global CM_PORT
    global CM_USERNAME
    global CM_PASSWORD

    if os.path.exists("cmd.txt") == True:
        os.remove("cmd.txt")

    f = open("cmd.txt", "w")
    f.write(cmd)
    f.close()

    cmd = "wcscli.exe -h %s -u %s -x %s -p %s -s 1 -b cmd.txt" % (CM_IP, CM_USERNAME, CM_PASSWORD, CM_PORT)
    ret = subprocess.check_output(cmd, shell=True, universal_newlines=True).split("\n")
    for i in range(len(ret)):
        ret[i] = ret[i].strip()
        if DEBUG_MODE:
            Log("      [%02d] %s" % (i, ret[i]))

    if os.path.exists("cmd.txt") == True:
        os.remove("cmd.txt")

    if ret[1] == "Connection to CM succeeded.":
        return [True, ret[3:]]
    else:
        return [False, "WCS Command Fail (Connected to CM Service)"]
def rename_files(rootfile, mode, output_path):
    '''
    Rename all the files that match the root of the filename input 
    paramater, excluding the input filename itself.
    '''
    print 'Renaming Files'
    
    # Build the file list.
    rootfile = os.path.abspath(rootfile)
    basename = os.path.splitext(rootfile)[0]
    search = basename + '_sci*'
    file_list_1 = glob.glob(search)
    search = basename + '_single*'
    file_list_2 = glob.glob(search)
    file_list = file_list_1 + file_list_2
    
    # Loop over the files and rename.
    for filename in file_list:
        dst = string.split(basename,'/')[-1] + '_' + mode 
        dst += string.split(filename, basename)[1]
        if output_path == None:
            dst = os.path.join(os.path.dirname(rootfile), dst)
        elif output_path != None:
            dst = os.path.join(output_path, dst)
        shutil.copyfile(filename, dst)
        os.remove(filename)
Example #8
1
    def make_ps(self, tex, fontsize):
        """
        generates a postscript file containing latex's rendering of tex string

        returns the file name
        """
        basefile = self.get_basefile(tex, fontsize)
        psfile = "%s.epsf" % basefile

        if DEBUG or not os.path.exists(psfile):
            dvifile = self.make_dvi(tex, fontsize)
            outfile = basefile + ".output"
            command = self._get_shell_cmd(
                'cd "%s"' % self.texcache,
                'dvips -q -E -o "%s" "%s" > "%s"' % (os.path.split(psfile)[-1], os.path.split(dvifile)[-1], outfile),
            )
            mpl.verbose.report(command, "debug")
            exit_status = os.system(command)
            with open(outfile) as fh:
                if exit_status:
                    raise RuntimeError(
                        "dvipng was not able to process the flowing "
                        "file:\n%s\nHere is the full report generated by "
                        "dvipng: \n\n" % dvifile + fh.read()
                    )
                else:
                    mpl.verbose.report(fh.read(), "debug")
            os.remove(outfile)

        return psfile
Example #9
1
File: base.py Project: grigi/talkey
    def play(self, filename, translate=False):  # pragma: no cover
        """
        Plays the sounds.

        :filename: The input file name
        :translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV.
        """
        # FIXME: Use platform-independent and async audio-output here
        # PyAudio looks most promising, too bad about:
        #  --allow-external PyAudio --allow-unverified PyAudio
        if translate:
            with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as f:
                fname = f.name
            with audioread.audio_open(filename) as f:
                with contextlib.closing(wave.open(fname, "w")) as of:
                    of.setnchannels(f.channels)
                    of.setframerate(f.samplerate)
                    of.setsampwidth(2)
                    for buf in f:
                        of.writeframes(buf)
            filename = fname

        if winsound:
            winsound.PlaySound(str(filename), winsound.SND_FILENAME)
        else:
            cmd = ["aplay", str(filename)]
            self._logger.debug("Executing %s", " ".join([pipes.quote(arg) for arg in cmd]))
            subprocess.call(cmd)

        if translate:
            os.remove(fname)
Example #10
1
    def remove(self, ignore_errors=False):
        # Think about ignore_errors
        stream_logger.info("   - %s" % self.name)

        # If archive not already extract
        if not os.path.exists("%s/%s" % (conf.get("settings", "cache"), self.name)):
            self.unarchive()

        self.import_control()
        # Pre Remove
        stream_logger.info("     | Pre Remove")
        self.control.pre_remove()

        # Remove
        stream_logger.info("     | Remove")
        files_list = open(os.path.join(conf.get("settings", "cache"), self.name, "files.lst")).readlines()
        for _file in files_list:
            try:
                os.remove(os.path.join(conf.get("settings", "packages"), _file.replace("\n", "")))
            except:
                pass
        # Post Remove
        stream_logger.info("     | Post Remove")
        self.control.post_remove()

        stream_logger.info("     | Clean")
        shutil.rmtree(os.path.join(conf.get("settings", "cache"), self.name))
Example #11
0
File: httpd.py Project: ad3n/weeman
 def cleanup(self):
     printt(3, "\n:: Running cleanup ...")
     # In case weeman will not create ref.html, remove each file.
     if os.path.exists("index.html"):
         os.remove("index.html")
     if os.path.exists("ref.html"):
         os.remove("ref.html")
Example #12
0
def demo(train_size=100, test_size=100, java_home=None, mallet_home=None):
    from nltk.corpus import brown
    import textwrap

    # Define a very simple feature detector
    def fd(sentence, index):
        word = sentence[index]
        return dict(word=word, suffix=word[-2:], len=len(word))

    # Let nltk know where java & mallet are.
    nltk.internals.config_java(java_home)
    nltk.classify.mallet.config_mallet(mallet_home)

    # Get the training & test corpus.  We simplify the tagset a little:
    # just the first 2 chars.
    def strip(corpus):
        return [[(w, t[:2]) for (w, t) in sent] for sent in corpus]

    brown_train = strip(brown.tagged_sents(categories="news")[:train_size])
    brown_test = strip(brown.tagged_sents(categories="editorial")[:test_size])

    crf = MalletCRF.train(fd, brown_train, transduction_type="VITERBI")  #'/tmp/crf-model',
    sample_output = crf.tag([w for (w, t) in brown_test[5]])
    acc = nltk.tag.accuracy(crf, brown_test)
    print "\nAccuracy: %.1f%%" % (acc * 100)
    print "Sample output:"
    print textwrap.fill(
        " ".join("%s/%s" % w for w in sample_output), initial_indent="  ", subsequent_indent="  "
    ) + "\n"

    # Clean up
    print "Clean-up: deleting", crf.filename
    os.remove(crf.filename)

    return crf
Example #13
0
    def batch_tag(self, sentences):
        # Write the test corpus to a temporary file
        (fd, test_file) = mkstemp(".txt", "test")
        self.write_test_corpus(sentences, os.fdopen(fd, "w"))

        try:
            # Run mallet on the test file.
            stdout, stderr = call_mallet(
                [
                    self._RUN_CRF,
                    "--model-file",
                    os.path.abspath(self.crf_info.model_filename),
                    "--test-file",
                    test_file,
                ],
                stdout="pipe",
            )

            # Decode the output
            labels = self.parse_mallet_output(stdout)

            # strip __start__ and __end__
            if self.crf_info.add_start_state and self.crf_info.add_end_state:
                labels = [labs[1:-1] for labs in labels]
            elif self.crf_info.add_start_state:
                labels = [labs[1:] for labs in labels]
            elif self.crf_info.add_end_state:
                labels = [labs[:-1] for labs in labels]

            # Combine the labels and the original sentences.
            return [zip(sent, label) for (sent, label) in zip(sentences, labels)]

        finally:
            os.remove(test_file)
    def test_process_schema_rule_mismatched_table(self):

        test_case_buf = """
SET search_path = bial, pg_catalog

--
-- Name: ab_reporting_beta_match_ins; Type: RULE; Schema: bial; Owner: table_owner
--

CREATE RULE ab_reporting_beta_match_ins AS ON INSERT TO weblog_mart_tbls.ab_reporting_beta_match DO INSTEAD INSERT INTO ab_reporting_beta_match VALUES new.search_session_id;
"""
        expected_out = ""

        in_name = os.path.join(os.getcwd(), "infile")
        out_name = os.path.join(os.getcwd(), "outfile")
        with open(in_name, "w") as fd:
            fd.write(test_case_buf)

        dump_schemas = set(["bial"])
        dump_tables = set([("bial", "ab_reporting_beta_match")])
        with open(out_name, "w") as fdout:
            with open(in_name, "r") as fdin:
                process_schema(dump_schemas, dump_tables, fdin, fdout)

        with open(out_name, "r") as fd:
            results = fd.read()

        self.assertEquals(results, expected_out)
        os.remove(in_name)
        os.remove(out_name)
Example #15
0
def package_module(manifest, mf, config):
    name = manifest["name"].lower()
    moduleid = manifest["moduleid"].lower()
    version = manifest["version"]
    modulezip = "%s-iphone-%s.zip" % (moduleid, version)
    if os.path.exists(modulezip):
        os.remove(modulezip)
    zf = zipfile.ZipFile(modulezip, "w", zipfile.ZIP_DEFLATED)
    modulepath = "modules/iphone/%s/%s" % (moduleid, version)
    zf.write(mf, "%s/manifest" % modulepath)
    libname = "lib%s.a" % moduleid
    zf.write("build/%s" % libname, "%s/%s" % (modulepath, libname))
    docs = generate_doc(config)
    if docs != None:
        for doc in docs:
            for file, html in doc.iteritems():
                filename = string.replace(file, ".md", ".html")
                zf.writestr("%s/documentation/%s" % (modulepath, filename), html)

    apidoc_build_path = os.path.join(cwd, "build", "apidoc")
    if generate_apidoc(apidoc_build_path):
        for file in os.listdir(apidoc_build_path):
            if file in ignoreFiles or os.path.isdir(os.path.join(apidoc_build_path, file)):
                continue
            zf.write(os.path.join(apidoc_build_path, file), "%s/documentation/apidoc/%s" % (modulepath, file))

    zip_dir(zf, "assets", modulepath, [".pyc", ".js"])
    zip_dir(zf, "example", modulepath, [".pyc"])
    zip_dir(zf, "platform", modulepath, [".pyc", ".js"])
    zf.write("LICENSE", "%s/LICENSE" % modulepath)
    zf.write("module.xcconfig", "%s/module.xcconfig" % modulepath)
    exports_file = "metadata.json"
    if os.path.exists(exports_file):
        zf.write(exports_file, "%s/%s" % (modulepath, exports_file))
    zf.close()
Example #16
0
 def consume(self, msg):
     print "****** STARTING CONSUME"
     # ignore the message, we do what we want
     lock = LockFile(app.config["RUNNER_LOCKFILE"])
     with lock:
         # get list of open jobs
         while True:
             jobs = Job.query.filter_by(status=dgrepm.STATUS_FREE)
             if jobs.count() == 0:
                 break
             for job in jobs:
                 # run query on jobs
                 dq = DataQuery.from_database(job)
                 job.set_status(dgrepm.STATUS_OPEN)
                 try:
                     job.filename = dq.run_query("datagrepper_{0}".format(job.id))
                 except:
                     job.set_status(dgrepm.STATUS_FAILED)
                 else:
                     job.set_status(dgrepm.STATUS_DONE)
         # get list of completed jobs to be deleted
         jobs = Job.query.filter(
             Job.status == dgrepm.STATUS_DONE, Job.complete_time < (datetime.now() - app.config["JOB_EXPIRY"])
         )
         for job in jobs:
             os.remove(os.path.join(app.config["JOB_OUTPUT_DIR"], job.filename))
             job.set_status(dgrepm.STATUS_DELETED)
     print "****** FINISHING CONSUME"
Example #17
0
def rmtree(path):
    for (dirpath, dirnames, filenames) in os.walk(path):
        for filename in filenames:
            os.remove(os.path.join(dirpath, filename))
        for dirname in dirnames:
            rmtree(os.path.join(dirpath, dirname))
        os.rmdir(dirpath)
Example #18
0
def clean_compiled():
    for root, dirs, files in os.walk("flexget"):
        for name in files:
            fqn = os.path.join(root, name)
            if fqn[-3:] == "pyc" or fqn[-3:] == "pyo" or fqn[-5:] == "cover":
                print "Deleting %s" % fqn
                os.remove(fqn)
Example #19
0
 def testEvalFileException(self):
     tcl = self.interp
     filename = "doesnotexists"
     try:
         os.remove(filename)
     except Exception,e:
         pass
Example #20
0
    def test_get_method_name(self):
        """
        Test --get-method-name
        """
        os.remove(os.path.join(self.repo_dir, "foo_2.php"))
        self.write_load_config("foo_2.php")

        self.check_cmd_and_json_cmd(
            ["Name: \\C::foo, type: method, position: line 8, characters 7-9"],
            [
                '{{"name":"\\\\C::foo","result_type":"method",'
                '"pos":{{"filename":"","line":8,"char_start":7,"char_end":9}},'
                '"internal_error":false}}'
            ],
            options=["--get-method-name", "8:7"],
            stdin="""<?hh

class C {
  public function foo() {}
}

function test(C $c) {
  $c->foo();
}
""",
        )
 def clean(self):
     """Remove all * .pyc and *.swp files"""
     for arch, wppath in self.wppath.items():
         for dirpath, dirnames, filenames in os.walk(wppath):
             for fn in filenames:
                 if os.path.splitext(fn)[1] in (".swp", ".pyc"):
                     os.remove(os.path.join(dirpath, fn))
Example #22
0
    def stop(self):
        """
        Stop the daemon.
        """
        # Get the pid from pidfile.
        try:
            with open(self.pidfile, "r") as f:
                pid = int(f.read().strip())
        except IOError as e:
            message = str(e) + "\nDaemon not running?\n"
            sys.stderr.write(message)
            sys.exit(1)

        # Try killing daemon process.
        try:
            os.kill(pid, SIGTERM)
            time.sleep(1)
        except OSError as e:
            print(str(e))
            sys.exit(1)

        try:
            if os.path.exists(self.pidfile):
                os.remove(self.pidfile)
        except IOError as e:
            message = str(e) + "\nCannot remove pid file {}".format(self.pidfile)
            sys.stderr.write(message)
            sys.exit(1)
Example #23
0
 def test_needle_file(self):
     """needle with the asis trick, output to a file."""
     # Setup,
     cline = NeedleCommandline(cmd=exes["needle"])
     cline.set_parameter("-asequence", "asis:ACCCGGGCGCGGT")
     cline.set_parameter("-bsequence", "asis:ACCCGAGCGCGGT")
     cline.set_parameter("-gapopen", "10")
     cline.set_parameter("-gapextend", "0.5")
     # EMBOSS would guess this, but let's be explicit:
     cline.set_parameter("-snucleotide", "True")
     cline.set_parameter("-outfile", "Emboss/temp with space.needle")
     self.assertEqual(str(eval(repr(cline))), str(cline))
     # Run the tool,
     stdout, stderr = cline()
     # Check it worked,
     self.assertTrue(stderr.strip().startswith("Needleman-Wunsch global alignment"), stderr)
     self.assertEqual(stdout.strip(), "")
     filename = cline.outfile
     self.assertTrue(os.path.isfile(filename))
     # Check we can parse the output...
     align = AlignIO.read(filename, "emboss")
     self.assertEqual(len(align), 2)
     self.assertEqual(str(align[0].seq), "ACCCGGGCGCGGT")
     self.assertEqual(str(align[1].seq), "ACCCGAGCGCGGT")
     # Clean up,
     os.remove(filename)
Example #24
0
 def write(self, cr, uid, ids, vals, context=None):
     if not isinstance(ids, list):
         ids = [ids]
     if vals.get("filename") and not vals.get("extension"):
         vals["filename"], vals["extension"] = os.path.splitext(vals["filename"])
     upd_ids = ids[:]
     if vals.get("filename") or vals.get("extension"):
         images = self.browse(cr, uid, upd_ids, context=context)
         for image in images:
             old_full_path = self._image_path(cr, uid, image, context=context)
             if not old_full_path:
                 continue
             # all the stuff below is there to manage the files on the filesystem
             if (
                 vals.get("filename")
                 and (image.name != vals["filename"])
                 or vals.get("extension")
                 and (image.extension != vals["extension"])
             ):
                 super(product_images, self).write(cr, uid, image.id, vals, context=context)
                 upd_ids.remove(image.id)
                 if "file" in vals:
                     # a new image have been loaded we should remove the old image
                     # TODO it's look like there is something wrong with function
                     # field in openerp indeed the preview is always added in the write :(
                     if os.path.isfile(old_full_path):
                         os.remove(old_full_path)
                 else:
                     new_image = self.browse(cr, uid, image.id, context=context)
                     new_full_path = self._image_path(cr, uid, new_image, context=context)
                     # we have to rename the image on the file system
                     if os.path.isfile(old_full_path):
                         os.rename(old_full_path, new_full_path)
     return super(product_images, self).write(cr, uid, upd_ids, vals, context=context)
Example #25
0
 def test_water_file4(self):
     """water with the asis trick and SwissProt file, output to a file."""
     # Setup,
     query = "DVCTGKALCDPVTQNIKTYPVKIENLRVMI"
     out_file = "Emboss/temp_test4.water"
     in_file = "SwissProt/sp004"
     self.assertTrue(os.path.isfile(in_file))
     if os.path.isfile(out_file):
         os.remove(out_file)
     cline = WaterCommandline(cmd=exes["water"])
     cline.set_parameter("-asequence", "asis:%s" % query)
     cline.set_parameter("-bsequence", in_file)
     # EMBOSS should work this out, but let's be explicit:
     cline.set_parameter("-sprotein", True)
     # TODO - Tell water this is a SwissProt file!
     cline.set_parameter("-gapopen", "20")
     cline.set_parameter("-gapextend", "5")
     cline.set_parameter("-outfile", out_file)
     self.assertEqual(str(eval(repr(cline))), str(cline))
     # Run the tool,
     self.run_water(cline)
     # Check we can parse the output and it is sensible...
     self.pairwise_alignment_check(
         query, SeqIO.parse(in_file, "swiss"), AlignIO.parse(out_file, "emboss"), local=True
     )
     # Clean up,
     os.remove(out_file)
Example #26
0
 def cleanup_cdrom(path):
     """ Removes created iso image """
     if path:
         error.context("Cleaning up temp iso image '%s'" % path, logging.info)
         if "gluster" in path:
             g_mount_point = tempfile.mkdtemp("gluster")
             g_server, v_name, f_name = path.split("/")[-3:]
             if ":" in g_server:
                 g_server = g_server.split(":")[0]
             g_mount_link = "%s:/%s" % (g_server, v_name)
             mount_cmd = "mount -t glusterfs %s %s" % (g_mount_link, g_mount_point)
             utils.system(mount_cmd, timeout=60)
             path = os.path.join(g_mount_point, f_name)
         try:
             logging.debug("Remove the file with os.remove().")
             os.remove("%s" % path)
         except OSError, err:
             logging.warn("Fail to delete %s" % path)
         if "gluster" in path:
             try:
                 umount_cmd = "umount %s" % g_mount_point
                 utils.system(umount_cmd, timeout=60)
                 os.rmdir(g_mount_point)
             except Exception, err:
                 msg = "Fail to clean up %s" % g_mount_point
                 msg += "Error message %s" % err
                 logging.warn(msg)
Example #27
0
 def delete(self, project, version=None):
     if version is None:
         rmtree(path.join(self.basedir, project))
     else:
         remove(self._eggpath(project, version))
         if not self.list(project):  # remove project if no versions left
             self.delete(project)
Example #28
0
def remove_old_logs(logs_by_dir, output_dir):
    for vdir in logs_by_dir:
        fulldir = os.path.join(output_dir, vdir)
        if os.path.exists(fulldir):
            for basename in os.listdir(fulldir):
                if basename not in logs_by_dir[vdir]:
                    os.remove(os.path.join(fulldir, basename))
Example #29
0
        def setup():
            if os.path.exists(_FILENAME):
                os.remove(_FILENAME)
            it._code = [
                "library ieee ;",
                "use ieee.std_logic_1164.all;",
                "USE IEEE.STD_LOGIC_ARITH.ALL;",
                "",
                "library work;",
                "use work.package_with_constants;",
                "",
                "library lib1,lib2;",
                "library lib3, lib4;",
                "",
                "entity clock_divider is",
                "    generic (",
                "        DIVIDER : integer := 10",
                "    );",
                "    port (",
                "        reset : in std_logic;",
                "        clk_input : in  std_logic;",
                "        clk_output : out std_logic",
                "    );",
                "end clock_divider;",
                "",
                "architecture clock_divider of clock_divider is",
                "",
                "begin",
                "",
                "end clock_divider;",
            ]

            writeListToFile(_FILENAME, it._code)
Example #30
0
 def tearDown(self):
     # Delete the temporary files.
     for each in self.tempFiles:
         try:
             os.remove(each)
         except:
             pass