Example #1
1
def convert_notebook(name):
    # Convert the notebook into restructured text suitable for the documentation.
    subprocess.check_call(
        [
            "ipython",
            "nbconvert",
            "--execute",
            "--to",
            "rst",
            os.path.join(docs_dir, "%s.ipynb" % name),
            "--output",
            os.path.join(docs_dir, name),
        ]
    )

    # Unmangle Sphinx cross-references in the tutorial that get mangled by markdown.
    with open(os.path.join(docs_dir, "%s.rst" % name), "r") as file:
        content = file.read()
        content = re.sub(":([^:]+):``([^`]+)``", ":\\1:`\\2`", content)
        content = re.sub("[.][.].*(_[^:]+):", ".. \\1:", content)

        content = (
            """
  .. image:: ../artwork/toyplot.png
    :width: 200px
    :align: right
  """
            + content
        )

    with open(os.path.join(docs_dir, "%s.rst" % name), "w") as file:
        file.write(content)
Example #2
1
    def parallel(self, *tasklist):
        """Run tasks in parallel"""

        pids = []
        old_log_filename = self.log_filename
        for i, task in enumerate(tasklist):
            assert isinstance(task, (tuple, list))
            self.log_filename = old_log_filename + (".%d" % i)
            task_func = lambda: task[0](*task[1:])
            pids.append(parallel.fork_start(self.resultdir, task_func))

        old_log_path = os.path.join(self.resultdir, old_log_filename)
        old_log = open(old_log_path, "a")
        exceptions = []
        for i, pid in enumerate(pids):
            # wait for the task to finish
            try:
                parallel.fork_waitfor(self.resultdir, pid)
            except Exception, e:
                exceptions.append(e)
            # copy the logs from the subtask into the main log
            new_log_path = old_log_path + (".%d" % i)
            if os.path.exists(new_log_path):
                new_log = open(new_log_path)
                old_log.write(new_log.read())
                new_log.close()
                old_log.flush()
                os.remove(new_log_path)
Example #3
1
 def testGetTpls(self):
     app = Flask(__name__)
     with app.test_client() as c:
         testRequest = c.get("/tpls.json?which=asdfjdskfjs")
         self.assertEquals(
             statserv.server.send_error(request, "template does" " not exist"),
             statserv.server.get_tpls(),
             "The get_tpls method really shouldn't try to send " "back a template for 'asdfjdskfjs.'",
         )
     tplsempty = False
     with app.test_client() as c:
         testRequest = c.get("/tpls.json?which=")
         tplsempty = statserv.server.get_tpls()
     with app.test_client() as c:
         testRequest = c.get("/tpls.json?which=all")
         self.assertEquals(
             statserv.server.get_tpls(),
             tplsempty,
             "The get_tpls method should send back all " "templates on both which=all and which=.",
         )
     with app.test_client() as c:
         testRequest = c.get("/tpls.json?callback=blah" "&which=header&which=home")
         header = open(statserv.server.determine_path() + "/tpls/header.tpl").read()
         home = open(statserv.server.determine_path() + "/tpls/home.tpl").read()
         response = statserv.server.make_response("blah", dict({"header": header, "home": home}))
         self.assertEquals(
             statserv.server.get_tpls(),
             response,
             "The single-template support does not seem to be " "working properly.",
         )
def main():
    parser = argparser()
    args = parser.parse_args()

    try:
        nbFeat = -1
        with open(args.filename) as f:
            input = [[int(a) for a in line.strip().split()] for line in f]
            for k in range(0, len(input)):
                input[k] = input[k][3:]
                nbFeat = max(nbFeat, *input[k])
        if args.outputfile == "":
            print "{0} {1}".format(nbFeat + 1, len(input))  # Format => <nbFeat> <nbSamples>\n
            print "\n".join(
                ["{0} {1}".format(len(row), " ".join(map(str, row))) for row in input]
            )  # Format => <length> <f1> ... <fn>
        else:
            with open(args.outputfile, "w") as f:
                f.write("{0} {1}\n".format(nbFeat + 1, len(input)))  # Format => <nbFeat> <nbSamples>\n
                f.write(
                    "\n".join(["{0} {1}".format(len(row), " ".join(map(str, row))) for row in input])
                )  # Format => <length> <f1> ... <fn>

    except Exception as e:
        print >> sys.stderr, "Oops, something wrong!", e
        print >> sys.stderr, sys.exc_info()
        sys.exit(0)
def main():
    # path = 'ExampleData/input_files/'
    # endPath = 'ExampleData/test/'
    path, endPath = getFilePath()
    reader_fgi = list(csv.reader(open(path + "fGI_stats.csv", "rt", encoding="ascii"), delimiter=","))
    reader_core = list(csv.reader(open(path + "Core_attfGI.csv", "rt", encoding="ascii"), delimiter=","))
    genomeListing = list(open(path + "db.txt", "r"))

    genomeClusterDict = pickle.load(open(path + "genomeCluster.dict", "rb"))
    genomeLocusDict = pickle.load(open(path + "genomeLocus.dict", "rb"))
    coreDict, fgiDict = createCoreClusterDict(reader_core)

    # genome = 'E00002'
    genomeIdDict = {}
    index = 3
    for genome2 in genomeListing:
        if "\n" in genome2:
            genome2 = genome2[0:-1]
        genomeIdDict[genome2] = index
        index += 1

    for genome in genomeIdDict:
        genomeDict = createfgiInsertDict(reader_fgi, genome)
        referenceList = createfGIFeatures(
            genomeDict, coreDict, fgiDict, genomeClusterDict, genomeLocusDict, genome, genomeIdDict[genome]
        )
        writeFile(endPath, genome, referenceList)

    genomeDict = createfgiInsertDict(reader_fgi, genome)
    referenceList = createfGIFeatures(
        genomeDict, coreDict, fgiDict, genomeClusterDict, genomeLocusDict, genome, genomeIdDict[genome]
    )

    writeFile(endPath, genome, referenceList)
def main():
    parser = argparse.ArgumentParser(description="Simple program to show lines that are present in two files")

    parser.add_argument("-i1", "--input-file-1", dest="input_file_1", required=True, default=None, type=str)
    parser.add_argument("-i2", "--input-file-2", dest="input_file_2", required=True, default=None, type=str)

    args = parser.parse_args()

    _inform("Input file name 1: " + args.input_file_1)
    _inform("Input file name 2: " + args.input_file_2)

    file_1_lines = []
    with open(args.input_file_1, "r") as input_file_1:
        for line in input_file_1:
            line = line.rstrip()
            if line not in file_1_lines:
                file_1_lines.append(line)

    duplicate_lines = []
    with open(args.input_file_2, "r") as input_file_2:
        for line in input_file_2:
            line = line.rstrip()
            if line in file_1_lines:
                duplicate_lines.append(line)

    _inform("Start duplicate lines:")
    for line in duplicate_lines:
        _inform(line)

    _inform("End duplicate lines:")
Example #7
1
def generate_ovpn(metric):
    results = fetch_ip_data()

    upscript_header = """\
#!/bin/bash -

OLDGW=$(ip route show 0/0 | head -n1 | grep 'via' | grep -Po '\d+\.\d+\.\d+\.\d+')

ip -batch - <<EOF
"""
    downscript_header = """\
#!/bin/bash -

ip -batch - <<EOF
"""

    upfile = open("vpn-up.sh", "w")
    downfile = open("vpn-down.sh", "w")

    upfile.write(upscript_header)
    downfile.write(downscript_header)

    for ip, _, mask in results:
        upfile.write("route add %s/%s via $OLDGW metric %s\n" % (ip, mask, metric))
        downfile.write("route del %s/%s\n" % (ip, mask))

    upfile.write("EOF\n")
    downfile.write("EOF\n")

    upfile.close()
    downfile.close()

    os.chmod("vpn-up.sh", 00755)
    os.chmod("vpn-down.sh", 00755)
Example #8
1
 def _read_checkpoint(self):
     """Read checkpoint file from disk."""
     if not os.path.exists(self.checkpoint_path):
         self.log.info("No checkpoint found in %s." % self.checkpoint_path)
         open(self.checkpoint_path, "w").close()
         self.log.debug("Created checkpoint file in %s." % self.checkpoint_path)
         return ""
     try:
         f = open(self.checkpoint_path, "rb")
         read = pickle.load(f)
         f.close()
         if read != None:
             return read
         else:
             return ""
         self.log.debug("Checkpoint read from %s" % self.checkpoint_path)
     except EOFError:
         return ""
     except Exception, e:
         self.log.error("Error reading checkpoint in %s." % self.checkpoint_path)
         self.log.error(traceback.format_exc())
         if self.remove_corrupted_checkpoint_file:
             self.log.info("Removing corrupted checkpoint file %s." % self.checkpoint_path)
             f.close()
             os.remove(self.checkpoint_path)
             return ""
         sys.exit(-1)
    def updatePthFile(self, oldName, newName):
        """Searches site-packages for .pth files and replaces any instance of
        `oldName` with `newName`, where the names likely have the form PsychoPy-1.60.04
        """
        from distutils.sysconfig import get_python_lib

        siteDir = get_python_lib()
        pthFiles = glob.glob(os.path.join(siteDir, "*.pth"))
        enclosingSiteDir = os.path.split(siteDir)[
            0
        ]  # sometimes the site-packages dir isn't where the pth files are kept?
        pthFiles.extend(glob.glob(os.path.join(enclosingSiteDir, "*.pth")))
        nUpdates = 0  # no paths updated
        info = ""
        for filename in pthFiles:
            lines = open(filename, "r").readlines()
            needSave = False
            for lineN, line in enumerate(lines):
                if oldName in line:
                    lines[lineN] = line.replace(oldName, newName)
                    needSave = True
            if needSave:
                try:
                    f = open(filename, "w")
                    f.writelines(lines)
                    f.close()
                    nUpdates += 1
                    logging.info("Updated PsychoPy path in %s" % filename)
                except:
                    info += "Failed to update PsychoPy path in ", filename
                    return -1, info
        return nUpdates, info
Example #10
0
def dofilehorz(filename):
    print filename
    out = open(filename.replace(".csv", ".trunc.csv"), "w")
    outcsv = UnicodeWriter(out)
    # do preparse
    with open(filename, "r") as f:
        for i, row in enumerate(UnicodeReader(f)):
            if i == 0:
                header = row
                headers = Counter(row)
                continue
            for c, cell in enumerate(row):
                size = (len(cell) / MAX_SIZE) + 1  # integer division is horrid.
                headers[header[c]] = max(headers[header[c]], size)
    # pass 2
    with open(filename, "r") as f:
        for i, row in enumerate(UnicodeReader(f)):
            if i == 0:
                newrow = []
                for c, cell in enumerate(header):
                    newrow.extend(["%s_%d" % (cell, r) for r in range(headers[cell])])
                outcsv.writerow(newrow)
                continue
            # populate dictionary
            d = OrderedDict()
            for c, cell in enumerate(row):
                for r in range(headers[header[c]]):
                    d["%s_%d" % (header[c], r)] = cell[MAX_SIZE * r : MAX_SIZE * (r + 1)]
            outcsv.writerow(d.values())
    out.close()
Example #11
0
def test():
    """ pe_indicators.py: Unit test"""
    import pprint

    # This worker test requires a local server running
    import zerorpc

    workbench = zerorpc.Client(timeout=300, heartbeat=60)
    workbench.connect("tcp://127.0.0.1:4242")

    # Generate the input data for this worker
    import os

    data_path = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "../data/pe/bad/033d91aae8ad29ed9fbb858179271232"
    )
    md5_bad = workbench.store_sample(open(data_path, "rb").read(), "bad_pe", "exe")
    data_path = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "../data/pe/good/4be7ec02133544cde7a580875e130208"
    )
    md5_good = workbench.store_sample(open(data_path, "rb").read(), "good_pe", "exe")

    # Execute the worker (unit test)
    worker = PEIndicators()
    output = worker.execute(workbench.get_sample(md5_bad))
    print "\n<<< Unit Test 1 >>>"
    pprint.pprint(output)
    output = worker.execute(workbench.get_sample(md5_good))
    print "\n<<< Unit Test 2 >>>"
    pprint.pprint(output)

    # Execute the worker (server test)
    output = workbench.work_request("pe_indicators", md5_bad)
    print "\n<<< Server Test >>>"
    pprint.pprint(output)
Example #12
0
def sanitize_open(filename, open_mode):
    """Try to open the given filename, and slightly tweak it if this fails.

    Attempts to open the given filename. If this fails, it tries to change
    the filename slightly, step by step, until it's either able to open it
    or it fails and raises a final exception, like the standard open()
    function.

    It returns the tuple (stream, definitive_file_name).
    """
    try:
        if filename == u"-":
            if sys.platform == "win32":
                import msvcrt

                msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
            return (sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout, filename)
        stream = open(encodeFilename(filename), open_mode)
        return (stream, filename)
    except (IOError, OSError) as err:
        if err.errno in (errno.EACCES,):
            raise

        # In case of error, try to remove win32 forbidden chars
        alt_filename = os.path.join(
            re.sub(u'[/<>:"\\|\\\\?\\*]', u"#", path_part) for path_part in os.path.split(filename)
        )
        if alt_filename == filename:
            raise
        else:
            # An exception here should be caught in the caller
            stream = open(encodeFilename(filename), open_mode)
            return (stream, alt_filename)
Example #13
0
 def _save_file(self, data, file):
     try:
         # write data to the file
         open(file, "w", encoding="utf-8").write(data.decode("utf-8"))
     except Exception as e:
         # oops
         print("An error occurred saving %s file!\n%s" % (file, e))
Example #14
0
def log_progress(app, mode="EDIT", filename=None, progress=0):
    progress_file = os.path.join(apath(app, r=request), "progress.log")
    now = str(request.now)[:19]
    if not os.path.exists(progress_file):
        open(progress_file, "w").write("[%s] START\n" % now)
    if filename:
        open(progress_file, "a").write("[%s] %s %s: %s\n" % (now, mode, filename, progress))
Example #15
0
def main(argv):
    inputfile = "main.swift"
    outputfile = "app.swift"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "i:o:", ["inputfile=", "outputfile="])
    except getopt.GetoptError as err:
        handle_error(str(err))

    for o, a in opts:
        if o in ["-i", "--inputfile"]:
            inputfile = a
        elif o in ["-o", "--outputfile"]:
            outputfile = a
        else:
            handle_error("unhandled option '{0}' detected".format(o))

    # create output file
    try:
        f = open(outputfile, "w+")
        f.close()
    except IOError:
        handle_error("failed to write to file, '{0}'.".format(benchmarkfile))

    # parse file
    f = open(outputfile, "w")
    f.write(includify_file(inputfile))
Example #16
0
def vsu_parse(fn, fn2, case_catalog, log_version):
    tempr = "create " + os.path.basename(fn2) + "\n"
    f1 = open(fn)  # JOURNAL
    lines = f1.readlines()
    f1.close()

    tset = ""
    tset_num = ""
    tset_result = ""

    f1 = open(fn2, "w+")  # FAILURES
    for line in lines:
        line = line.strip()
        if line.startswith("30||TEST_PACKAGES"):
            tempr += "  " + line + "\n"
        if line.find("/tetexec.cfg") > 0:
            tempr += "  " + line + "\n"
        if line.startswith("10|"):
            sps = line.split("|")
            sps2 = sps[1].split()
            tset = sps2[1]
        if line.startswith("220|"):
            sps = line.split("|")
            tset_result = sps[2]
            if tset_result not in VSU_ALLOW_LIST:
                sps2 = sps[1].split()
                tset_num = sps2[1]
                temp = tset + " " + tset_num
                temp2 = "%-56s %s\n" % (temp, tset_result)
                f1.write(temp2)
    f1.close()
    return tempr
Example #17
0
    def __init__(self, server_path):
        self.server_path = server_path
        self.memory = memory_overrides.get(self.server_path, memory_default)
        self.server = None

        # Gives us the width
        self.term = Terminal()

        # Handles input nicely
        self.prompt = prompt.Prompt(self.run_command_from_prompt)

        # Queued server output
        self.server_output_queue = Queue()

        # Stuff to output (includes server output)
        self.console_output = []

        # Move to server directory
        os.chdir(os.path.join(servers_dir, self.server_path))

        # In case of any residual logs...
        self.backup()

        # Read random messages
        self.messages = open("messages.txt").read().splitlines()

        # Read triggers
        self.triggers = open("triggers.txt").read().splitlines()
        self.triggers = [x.split(",", 1) for x in self.triggers]

        # Start
        self.start_server()
Example #18
0
def merge_files():
    """
    Given a list of files where position\treads\nposition\treads...\n
    Returns a merged file containing the position and the reads, if the position between two files
    is the same, sums the reads.
    """

    # Ste the variables
    outFile = sys.argv[1]
    inFile = sys.argv[2:]  # Takes the second argument and the possible ones being after it
    results_dic = {}

    # Run the general loop: Open the files iteratively and load the dictionary with the key = position, value = reads
    for fil in inFile:
        print(fil)
        with open(fil, "r") as filehandle:
            for line in filehandle:
                line = line.strip().split()
                position = int(line[0])
                if position in results_dic:
                    results_dic[position] += int(line[1])
                else:
                    results_dic[position] = int(line[1])

    # Export to a file:
    fo = open(outFile, "w")

    od = collections.OrderedDict(sorted(results_dic.items()))
    for k, v in od.iteritems():
        fo.write(str(k) + "\t" + str(v) + "\n")
    fo.close()
Example #19
0
    def test_rename_one_after_source_and_dest(self):
        tree = self.make_branch_and_tree(".")
        self.build_tree(["a", "b/", "b/foo"])
        tree.add(["a", "b"], ["a-id", "b-id"])
        tree.commit("initial", rev_id="rev-1")
        root_id = tree.get_root_id()

        # TODO: jam 20070225 I would usually use 'rb', but assertFileEqual
        #       uses 'r'.
        a_file = open("a", "r")
        try:
            a_text = a_file.read()
        finally:
            a_file.close()
        foo_file = open("b/foo", "r")
        try:
            foo_text = foo_file.read()
        finally:
            foo_file.close()

        self.assertTreeLayout([("", root_id), ("a", "a-id"), ("b/", "b-id")], tree)
        self.assertRaises(errors.RenameFailedFilesExist, tree.rename_one, "a", "b/foo", after=False)
        self.assertTreeLayout([("", root_id), ("a", "a-id"), ("b/", "b-id")], tree)
        self.assertFileEqual(a_text, "a")
        self.assertFileEqual(foo_text, "b/foo")
        # But you can pass after=True
        tree.rename_one("a", "b/foo", after=True)
        self.assertTreeLayout([("", root_id), ("b/", "b-id"), ("b/foo", "a-id")], tree)
        self.assertTreeLayout([("", root_id), ("a", "a-id"), ("b/", "b-id")], tree.basis_tree())
        # But it shouldn't actually move anything
        self.assertFileEqual(a_text, "a")
        self.assertFileEqual(foo_text, "b/foo")
Example #20
0
    def write_module(self, basedir, package, generated_modules):
        """create a module file to mark directory for python"""
        dir = self.outdir(basedir)
        if not os.path.exists(dir):
            os.makedirs(dir)
        elif not os.path.isdir(dir):
            raise self.exception("file preventing the creating of module directory: %s" % dir)
        p = os.path.join(dir, "__init__.py")
        if roslib.msgs.is_verbose():
            print("... creating module file", p)
        f = open(p, "w")
        try:
            # this causes more problems than anticipated -- for pure python
            # packages it works fine, but in C++ packages doxygen seems to prefer python first.
            # f.write('## \mainpage\n') #doxygen
            # f.write('# \htmlinclude manifest.html\n')
            for mod in generated_modules:
                f.write("from .%s import *\n" % mod)
        finally:
            f.close()

        parentInit = os.path.dirname(dir)
        p = os.path.join(parentInit, "__init__.py")
        if not os.path.exists(p):
            # touch __init__.py in the parent package
            print("... also creating module file %s" % p)
            f = open(p, "w")
            f.close()
Example #21
0
 def drop(self):
     """Remove me from the list of running bot processes."""
     # drop all throttles with this process's pid, regardless of site
     self.checktime = 0
     processes = []
     try:
         f = open(self.ctrlfilename, "r")
     except IOError:
         return
     else:
         now = time.time()
         for line in f.readlines():
             try:
                 line = line.split(" ")
                 this_pid = int(line[0])
                 ptime = int(line[1].split(".")[0])
                 this_site = line[2].rstrip()
             except (IndexError, ValueError):
                 # Sometimes the file gets corrupted ignore that line
                 continue
             if now - ptime <= self.releasepid and this_pid != pid:
                 processes.append({"pid": this_pid, "time": ptime, "site": this_site})
     processes.sort(key=lambda p: p["pid"])
     try:
         f = open(self.ctrlfilename, "w")
         for p in processes:
             f.write("%(pid)s %(time)s %(site)s\n" % p)
     except IOError:
         return
     f.close()
Example #22
0
    def __init__(self, anAppName, aRecursive=True):
        """ Throws SysSingletonCreateError if such app singleton already exists (app is running), other exceptions for the rest """
        self.__isCreated = False
        self.__mutexPath = "/var/run/%s.pid" % anAppName

        if not os.access(self.__mutexPath, os.F_OK):
            if not os.access("/var/run/", os.F_OK):
                os.makedirs("/var/run/")
            myMutexFile = open(self.__mutexPath, "w")
            myMutexFile.write("%d\n" % os.getpid())
            myMutexFile.close()
            self.__isCreated = True
            return

        myMutexFile = open(self.__mutexPath, "r")
        myPid = int(myMutexFile.readline())
        myMutexFile.close()
        if myPid == os.getpid():
            if aRecursive:
                return
            raise SysSingletonCreateError(anAppName, myPid)

        if isPidExist(myPid):
            raise SysSingletonCreateError(anAppName, myPid)

        myMutexFile = open(self.__mutexPath, "w")
        myMutexFile.write("%d\n" % os.getpid())
        myMutexFile.close()
        self.__isCreated = True
Example #23
0
 def testReadConf(self):
     try:
         self.testDeterminePath()
     except:
         pass
     oldconf = open(statserv.server.determine_path() + "/stattrd.conf").read()
     thefile = open(statserv.server.determine_path() + "/stattrd.conf", "w")
     thefile.truncate(0)
     thefile.write("dbname=example\nport=54321\noptions=\nsitename=test")
     thefile.close()
     try:
         self.assertEquals(
             dict({"dbname": "example", "port": "54321", "options": "", "sitename": "test"}),
             statserv.server.read_conf(),
         )
     except:
         failmsg = statserv.server.read_conf()
         thefile = open(statserv.server.determine_path() + "/stattrd.conf", "w")
         thefile.truncate(0)
         thefile.write(oldconf)
         thefile.close()
         self.fail(
             "The read_conf method is broken. We expected: `"
             "{'dbname': 'example', 'port': '54321', "
             "'options': '', 'sitename': 'test'}`, but"
             " we got: `%s`" % failmsg
         )
     thefile = open(statserv.server.determine_path() + "/stattrd.conf", "w")
     thefile.truncate(0)
     thefile.write(oldconf)
     thefile.close()
def main(args):
    recording_file = args[0]
    result_file = args[1]

    with open(recording_file) as fd:
        test_cases = json.load(fd)

    for test_case in test_cases:
        name = test_case["name"]
        command = test_case["command"]
        executable = command["executable"]
        arguments = command["arguments"]
        timeout_limit = command["timeout_limit"]

        exit_code, stdout, stderr, duration, did_timeout = run_command(name, executable, arguments, timeout_limit)

        test_case["command_output"] = {
            "exit_code": exit_code,
            "stdout": stdout,
            "stderr": stderr,
            "duration": duration,
            "did_timeout": did_timeout,
        }
    with open(result_file, "w") as fd:
        json.dump(test_cases, fd)
    def __init__(self, filename=None, verbose=False, read=False):
        self.lst_of_data = []
        self.verbose = verbose
        self.lineCounter = 0

        if read and filename:
            with open(filename, "r") as f:
                for line in f:
                    ll = line.split("\t")
                    try:
                        lst = eval(ll[1])
                    except:
                        lst = None

                    try:
                        dct = eval(ll[2])
                    except:
                        dct = None

                    self._smallaccept(lst, dct)
            return

        if filename:
            self.filename = filename
            with open(self.filename, "w") as f:
                pass
Example #26
0
def generate(folder, todo):
    with open(folder + "start.txt") as f:
        start_html = f.read()
    with open(folder + "movies.json") as f:
        data = json.load(f)
        if "movies" not in data:
            return
        movies = data["movies"]
    with open("search_form.txt") as f:
        search_form = f.read()
    with open("end.txt") as f:
        end_html = f.read()
    with open(folder + "index.html", "w") as f:
        f.write(start_html)
        f.write('<p class="lead">The ' + str(len(movies)) + " ")
        if todo:
            f.write("movies I want to watch are listed on this page. The 5 most ")
            f.write("recently added are marked green and the movies marked red are ")
            f.write("the longest in this list.</p>\n")
        else:
            f.write("movies I have seen are listed on this page. The last 5 ")
            f.write("movies I have seen are labeled green.</p>\n")
        f.write(search_form)
        f.write('<p class="movies" id="movies-list">\n')
        for i, movie in enumerate(movies):
            if i >= len(movies) - 5:
                f.write(json_to_html(movie, btn_type="success"))
            else:
                f.write(json_to_html(movie))
        f.write(end_html)
 def inner():
     if self.isModified():
         msgBox = QtGui.QMessageBox()
         msgBox.setText(self.tr("The spellbook has been modified."))
         msgBox.setInformativeText(self.tr("Do you want to save your changes?"))
         msgBox.setStandardButtons(QtGui.QMessageBox.Yes | QtGui.QMessageBox.No | QtGui.QMessageBox.Cancel)
         msgBox.setDefaultButton(QtGui.QMessageBox.Yes)
         ret = msgBox.exec_()
         if ret == QtGui.QMessageBox.No:
             SpellBookHandler.open(recent, self)
             self.filename = recent
             self.modified = False
             self.updateRecents()
             with open(self.configfile, "w") as f:
                 json.dump(self.config, f, indent=2)
             self.updateWindowName()
         elif ret == QtGui.QMessageBox.Yes:
             self.saveBook()
     else:
         SpellBookHandler.open(recent, self)
         self.modified = False
         self.filename = recent
         self.updateRecents()
         with open(self.configfile, "w") as f:
             json.dump(self.config, f, indent=2)
         self.updateWindowName()
Example #28
0
def replaceInFile(fileIn, fileOut, textToSearch, newText):
    o = open(fileOut, "w")
    data = open(fileIn).read()
    for idx in range(len(textToSearch)):
        data = re.sub(textToSearch[idx], newText[idx], data)
    o.write(data)
    o.close()
Example #29
0
def writepac(enable):
    myenv = Gtenv("")
    path = myenv.getpath()
    file = path + "/static/gtproxy.pac"
    if not os.path.exists(file):
        return
    file_object = open(file)
    try:
        list_of_all_the_lines = file_object.readlines()
        line = list_of_all_the_lines[1]
        pos = line.find('return "DIRECT";')
        if pos > 0:
            usingproxy = False
        else:
            usingproxy = True
        if enable:
            if not usingproxy:
                del list_of_all_the_lines[1]
        else:
            if usingproxy:
                list_of_all_the_lines.insert(1, '    return "DIRECT";\r\n')
    finally:
        file_object.close()
    file_object = open(file, "w")
    try:
        file_object.writelines(list_of_all_the_lines)
    finally:
        file_object.close()
Example #30
0
def main():
    lexer = adalex.make_lexer()
    tokens = adalex.tokens
    parser = adaparse.make_parser()
    fpath = sys.argv[1]  # input file path
    global filepath
    filepath = fpath
    program = parser.parse(open(fpath).read())
    cwd = os.getcwd()  # gettign the current working directory
    slash, dot = fpath.rfind("/"), fpath.rfind(".")
    gfilename = fpath[slash + 1 : dot]  # getting the input canonical file name stripping of the rest

    # Check the program
    typechecker = typecheck.typecheck()
    initialize_types(typechecker)
    env = typechecker.check_goal_symbol(program)

    if typechecker.get_error_count() > 0:
        print "Fix the type errors and compile again"
        sys.exit(0)
    # If no errors occurred, generate code
    code = generate_code(program)
    gen_file = cwd + "/assembly/" + gfilename + ".asm"  # forming the output file name
    try:
        fd = open(gen_file, "w")
        fd.write(code)
        fd.flush()
        fd.close()
    except IOError:
        print "folder cannot be created"
    print "Done"
    # Emit the code sequence
    JumpGenerator().visit(code)