Ejemplo n.º 1
0
def read_users(input, delimiter=',', fields=_fields):
    """Read in users from flat text input. 
    
    If input is None or an empty list, STDIN is used.
    """
    f = FileInput(input)
    reader = csv.reader(f, delimiter=delimiter)

    users = {}
    for row in reader:

        if len(row) != len(fields):
            raise CLIError('EUSER', 
                    'Line %s of input has incorrect number of fields. Expecting "%s"' %
                    (f.lineno(), delimiter.join(fields)))
        u = dict([(k, v) for k, v in izip(fields, row)])

        name = u['display_name']
        grp = _fix_group(u['group'])
        u['group'] = grp
        if name in users:
            raise CLIError('EUSER', 'Duplicate entry present for user "%s"' % name)
        users[name] = u
    
    return users
Ejemplo n.º 2
0
def processingAddAmendHistory(searchPattern):
    input=FileInput(files=tuple(glob.glob(searchPattern)),inplace=1)
    for line in input:
        if "*/" in line:#find out amend history last line
            sys.stdout.write(addamendhistory.substitute())
        sys.stdout.write(line)
    input.close()
Ejemplo n.º 3
0
    def test_file_opening_hook(self):
        try:
            # cannot use openhook and inplace mode
            fi = FileInput(inplace=1, openhook=lambda f, m: None)
            self.fail("FileInput should raise if both inplace "
                             "and openhook arguments are given")
        except ValueError:
            pass
        try:
            fi = FileInput(openhook=1)
            self.fail("FileInput should check openhook for being callable")
        except ValueError:
            pass

        class CustomOpenHook:
            def __init__(self):
                self.invoked = False
            def __call__(self, *args):
                self.invoked = True
                return open(*args)

        t = writeTmp(1, ["\n"])
        self.addCleanup(remove_tempfiles, t)
        custom_open_hook = CustomOpenHook()
        with FileInput([t], openhook=custom_open_hook) as fi:
            fi.readline()
        self.assertTrue(custom_open_hook.invoked, "openhook not invoked")
Ejemplo n.º 4
0
 def test_files_that_dont_end_with_newline(self):
     t1 = self.writeTmp("A\nB\nC")
     t2 = self.writeTmp("D\nE\nF")
     fi = FileInput(files=(t1, t2))
     lines = list(fi)
     self.assertEqual(lines, ["A\n", "B\n", "C", "D\n", "E\n", "F"])
     self.assertEqual(fi.filelineno(), 3)
     self.assertEqual(fi.lineno(), 6)
Ejemplo n.º 5
0
 def test_files_that_dont_end_with_newline(self):
     try:
         t1 = writeTmp(1, ["A\nB\nC"])
         t2 = writeTmp(2, ["D\nE\nF"])
         fi = FileInput(files=(t1, t2))
         lines = list(fi)
         self.assertEqual(lines, ["A\n", "B\n", "C", "D\n", "E\n", "F"])
         self.assertEqual(fi.filelineno(), 3)
         self.assertEqual(fi.lineno(), 6)
     finally:
         remove_tempfiles(t1, t2)
Ejemplo n.º 6
0
def update(adder, adder_path, report_path):
	adder = 'iadder_B16_{0}.v'.format(adder)
	f = FileInput('./pscripts/auto_power.tcl', inplace=True)
	for line in f:
		if KEY.ANALYZE_IADDER in line:
			print('analyze -f verilog ../{0}   ;# {1}\n'.format(adder, KEY.ANALYZE_IADDER), end='')
		elif KEY.IADDER_FOLDER in line:
			print('set saiffiles [glob {0}/*.saif] ;# {1}\n'.format(adder_path, KEY.IADDER_FOLDER), end='')
		elif KEY.REPORT_POWER in line:
			print("report_power > {0}/$fbasename.txt ; # {1}\n".format(report_path, KEY.REPORT_POWER),end='')
		else:
			print(line, end='')
	f.close()
Ejemplo n.º 7
0
 def test_iteration_buffering(self):
     src = LineReader()
     fi = FileInput(files=['line1\nline2', 'line3\n'], openhook=src.openhook)
     self.assertEqual(src.linesread, [])
     self.assertEqual(next(fi), 'line1\n')
     self.assertEqual(src.linesread, ['line1\n'])
     self.assertEqual(next(fi), 'line2')
     self.assertEqual(src.linesread, ['line2'])
     self.assertEqual(next(fi), 'line3\n')
     self.assertEqual(src.linesread, ['', 'line3\n'])
     self.assertRaises(StopIteration, next, fi)
     self.assertEqual(src.linesread, [''])
     self.assertRaises(StopIteration, next, fi)
     self.assertEqual(src.linesread, [])
     fi.close()
Ejemplo n.º 8
0
 def __init__(self, files=None, hook = None):
     self.fi = [] # an array of file input objects.  We push and pop onto this so we can resume.
     self.files = [] # an array of files that need to be read later.
     self.hook = None
     self.current = None # the current file input object.
     self.line = 0
     if files is not None:
         self.current = FileInput(files, openhook = self.hook)
Ejemplo n.º 9
0
    def test_readline(self):
        with open(TESTFN, 'wb') as f:
            f.write('A\nB\r\nC\r')
            # Fill TextIOWrapper buffer.
            f.write('123456789\n' * 1000)
            # Issue #20501: readline() shouldn't read whole file.
            f.write('\x80')
        self.addCleanup(safe_unlink, TESTFN)

        fi = FileInput(files=TESTFN, openhook=hook_encoded('ascii'), bufsize=8)
        # The most likely failure is a UnicodeDecodeError due to the entire
        # file being read when it shouldn't have been.
        self.assertEqual(fi.readline(), u'A\n')
        self.assertEqual(fi.readline(), u'B\r\n')
        self.assertEqual(fi.readline(), u'C\r')
        with self.assertRaises(UnicodeDecodeError):
            # Read to the end of file.
            list(fi)
        fi.close()
Ejemplo n.º 10
0
 def test_fileno(self):
     try:
         t1 = writeTmp(1, ["A\nB"])
         t2 = writeTmp(2, ["C\nD"])
         fi = FileInput(files=(t1, t2))
         self.assertEqual(fi.fileno(), -1)
         line = fi.next()
         self.assertNotEqual(fi.fileno(), -1)
         fi.nextfile()
         self.assertEqual(fi.fileno(), -1)
         line = list(fi)
         self.assertEqual(fi.fileno(), -1)
     finally:
         remove_tempfiles(t1, t2)
Ejemplo n.º 11
0
 def test_fileno(self):
     t1 = self.writeTmp("A\nB")
     t2 = self.writeTmp("C\nD")
     fi = FileInput(files=(t1, t2))
     self.assertEqual(fi.fileno(), -1)
     line = next(fi)
     self.assertNotEqual(fi.fileno(), -1)
     fi.nextfile()
     self.assertEqual(fi.fileno(), -1)
     line = list(fi)
     self.assertEqual(fi.fileno(), -1)
Ejemplo n.º 12
0
 def test_readline_buffering(self):
     src = LineReader()
     fi = FileInput(files=['line1\nline2', 'line3\n'], openhook=src.openhook)
     self.assertEqual(src.linesread, [])
     self.assertEqual(fi.readline(), 'line1\n')
     self.assertEqual(src.linesread, ['line1\n'])
     self.assertEqual(fi.readline(), 'line2')
     self.assertEqual(src.linesread, ['line2'])
     self.assertEqual(fi.readline(), 'line3\n')
     self.assertEqual(src.linesread, ['', 'line3\n'])
     self.assertEqual(fi.readline(), '')
     self.assertEqual(src.linesread, [''])
     self.assertEqual(fi.readline(), '')
     self.assertEqual(src.linesread, [])
     fi.close()
Ejemplo n.º 13
0
    def readline(self):
        if self.current is not None:
            l = self.current.readline()
            if len(l) > 0:
                self.line = self.line + 1
                return l

        # Advance to the next file
        if len (self.fi) > 0:
            self.current = self.fi.pop()
            return self.readline()

        # see if there are any files left to open
        if len (self.files) > 0:
            self.current = FileInput(self.files, openhook = fileinput.hook_compressed)
            self.files = []
            return self.readline()

        return ""
Ejemplo n.º 14
0
    def test_readline(self):
        with open(TESTFN, 'wb') as f:
            f.write('A\nB\r\nC\r')
            # Fill TextIOWrapper buffer.
            f.write('123456789\n' * 1000)
            # Issue #20501: readline() shouldn't read whole file.
            f.write('\x80')
        self.addCleanup(safe_unlink, TESTFN)

        fi = FileInput(files=TESTFN, openhook=hook_encoded('ascii'), bufsize=8)
        # The most likely failure is a UnicodeDecodeError due to the entire
        # file being read when it shouldn't have been.
        self.assertEqual(fi.readline(), u'A\n')
        self.assertEqual(fi.readline(), u'B\r\n')
        self.assertEqual(fi.readline(), u'C\r')
        with self.assertRaises(UnicodeDecodeError):
            # Read to the end of file.
            list(fi)
        fi.close()
Ejemplo n.º 15
0
 def run(self, files):
     for line in FileInput(files):
         self.feed(line)
     return self
Ejemplo n.º 16
0
def processIface(iface, h_tmplt, cpp_tmplt, ih_tmplt, h_dest, cpp_dest,
                 docstr_dest, ih_dest, msgcodes):
    curDocStrings = []
    values = []
    methods = []
    cmds = []

    # parse iface file
    fi = FileInput(iface)
    for line in fi:
        line = line[:-1]
        if line[:2] == '##' or line == '':
            #curDocStrings = []
            continue

        op = line[:4]
        if line[:2] == '# ':  # a doc string
            curDocStrings.append(line[2:])

        elif op == 'val ':
            parseVal(line[4:], values, curDocStrings)
            curDocStrings = []

        elif op == 'fun ' or op == 'set ' or op == 'get ':
            parseFun(line[4:], methods, curDocStrings, cmds, op == 'get ',
                     msgcodes)
            curDocStrings = []

        elif op == 'cat ':
            if line[4:].strip() == 'Deprecated':
                break  # skip the rest of the file

        elif op == 'evt ':
            pass

        elif op == 'enu ':
            pass

        elif op == 'lex ':
            pass

        else:
            print('***** Unknown line type: %s' % line)

    # process templates
    data = {}
    data['VALUES'] = processVals(values)
    data['CMDS'] = processVals(cmds)
    defs, imps, docstrings, idefs = processMethods(methods)
    data['METHOD_DEFS'] = defs
    data['METHOD_IDEFS'] = idefs
    data['METHOD_IMPS'] = imps

    # get template text
    h_text = open(h_tmplt).read()
    ih_text = open(ih_tmplt).read()
    cpp_text = open(cpp_tmplt).read()

    # do the substitutions
    h_text = h_text % data
    cpp_text = cpp_text % data
    ih_text = ih_text % data

    # write out destination files
    open(h_dest, 'w').write(h_text)
    open(cpp_dest, 'w').write(cpp_text)
    if docstr_dest:
        open(docstr_dest, 'w').write(docstrings)
    open(ih_dest, 'w').write(ih_text)
Ejemplo n.º 17
0
    def test_zero_byte_files(self):
        t1 = self.writeTmp("")
        t2 = self.writeTmp("")
        t3 = self.writeTmp("The only line there is.\n")
        t4 = self.writeTmp("")
        fi = FileInput(files=(t1, t2, t3, t4))

        line = fi.readline()
        self.assertEqual(line, 'The only line there is.\n')
        self.assertEqual(fi.lineno(), 1)
        self.assertEqual(fi.filelineno(), 1)
        self.assertEqual(fi.filename(), t3)

        line = fi.readline()
        self.assertFalse(line)
        self.assertEqual(fi.lineno(), 1)
        self.assertEqual(fi.filelineno(), 0)
        self.assertEqual(fi.filename(), t4)
        fi.close()
Ejemplo n.º 18
0
 def test__getitem___deprecation(self):
     t = self.writeTmp("line1\nline2\n")
     with self.assertWarnsRegex(DeprecationWarning,
                                r'Use iterator protocol instead'):
         with FileInput(files=[t]) as fi:
             self.assertEqual(fi[0], "line1\n")
Ejemplo n.º 19
0
 def include(self, files):
     if self.current is not None:
         self.fi.push(self.current)
     self.current = FileInput(files, openhook = self.hook)
Ejemplo n.º 20
0
def fix_includes(path):
    for line in FileInput(path, inplace=True):
        line = re.sub(r'#include "src/', '#include "catapult/', line.rstrip())
        line = re.sub(r'#include "plugins/.*/src/', '#include "catapult/',
                      line.rstrip())
        print(line)
Ejemplo n.º 21
0
for round, bs in (0, 0), (1, 30):
    try:
        writeFiles()
        runTests(t1, t2, t3, t4, bs, round)
    finally:
        remove_tempfiles(t1, t2, t3, t4)

# Next, check for proper behavior with 0-byte files.
if verbose:
    print "13. 0-byte files"
try:
    t1 = writeTmp(1, [""])
    t2 = writeTmp(2, [""])
    t3 = writeTmp(3, ["The only line there is.\n"])
    t4 = writeTmp(4, [""])
    fi = FileInput(files=(t1, t2, t3, t4))
    line = fi.readline()
    verify(line == 'The only line there is.\n')
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 1)
    verify(fi.filename() == t3)
    line = fi.readline()
    verify(not line)
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 0)
    verify(fi.filename() == t4)
    fi.close()
finally:
    remove_tempfiles(t1, t2, t3, t4)

if verbose:
Ejemplo n.º 22
0
'''
Ejemplo n.º 23
0
def main(argv):
    args = parse_args()

    ## check the yaml of these files because ruamel pythin lib has issues with loading em
    yaml_files_check_list = [
        'ml-operator/values.yaml', 'emailnotifier/values.yaml'
    ]

    ports_array = {
        "simapi":
        "3000",
        "reportapi":
        "3002",
        "testapi":
        "3003",
        "https":
        "80",
        "http":
        "80",
        "http-admin":
        "4001",
        "http-api":
        "4002",
        "mysql":
        "3306",
        "mongodb":
        "27017",
        "inboundapi":
        "{{ $config.config.schemeAdapter.env.INBOUND_LISTEN_PORT }}",
        "outboundapi":
        "{{ $config.config.schemeAdapter.env.OUTBOUND_LISTEN_PORT }}"
    }

    p = Path() / args.directory
    print(f"Processing helm charts in directory: [{args.directory}]")
    yaml = YAML()
    yaml.allow_duplicate_keys = True
    yaml.preserve_quotes = True
    yaml.width = 4096

    # walk the directory structure and process all the values.yaml files
    # replace solsson kafka with kymeric
    # replace kafa start up check with netcat test (TODO check to see if this is ok)
    # replace mysql with arm version of mysql and adjust tag on the following line (TODO: check that latest docker mysql/mysql-server latest tag is ok )
    # TODO: maybe don't do this line by line but rather read in the entire file => can match across lines and avoid the next_line_logic
    # for now disable metrics and metrics exporting
    # replace the mojaloop images with the locally built  ones

    if (args.all or args.values):
        print(
            "\n\n============================================================="
        )
        print("Processing values.yaml files.. ")
        print("=============================================================")

        for vf in p.rglob('*/values.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            print(f"{vf} : {backupfile}")
            copyfile(vf, backupfile)
            with FileInput(files=[vf], inplace=True) as f:
                next_line_is_mojaloop_tag = False
                for line in f:
                    line = line.rstrip()

                    # now update the mojaloop images
                    if (next_line_is_mojaloop_tag):
                        line = re.sub("tag:.*$", "tag: latest", line)
                        next_line_is_mojaloop_tag = False
                    # TODO : check that there is no mojaloop image with > 3 parts to its name i.e. > 3 hypens
                    if re.match(r"(\s+)repository:\s*mojaloop", line):
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)-(\w+)-(\w+)",
                            r"\1repository: \2_\3_\4_\5_local", line)
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)-(\w+)",
                            r"\1repository: \2_\3_\4_local", line)
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)",
                            r"\1repository: \2_\3_local", line)
                        line = re.sub(r"(\s+)repository:\s*mojaloop/(\w+)",
                                      r"\1repository: \2_local", line)
                        next_line_is_mojaloop_tag = True

                    print(line)

    ## TODO  Need to modify the kafka requirements.yaml to update the zookeeper image
    ##       if I am fully automating this
    # walk the directory structure and process all the requirements.yaml files
    # kafka => local kafka chart
    # mysql/percona => local mysql chart with later arm64 based image
    # zookeeper => local zookeeper (this is in the requirements.yaml of the kafka local chart)

    if (args.all or args.requirements):
        print(
            "\n\n============================================================="
        )
        print("Processing requirements.yaml files ")
        print("=============================================================")
        for rf in p.rglob('*/requirements.yaml'):
            backupfile = Path(rf.parent) / f"{rf.name}_bak"
            print(f"{rf} : {backupfile}")
            copyfile(rf, backupfile)
            with open(rf) as f:
                reqs_data = yaml.load(f)
                #print(reqs_data)
            try:
                dlist = reqs_data['dependencies']
                for i in range(len(dlist)):
                    if (dlist[i]['name'] == "percona-xtradb-cluster"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['name'] = "mysql"
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../mysql"
                        dlist[i]['alias'] = "mysql"
                        dlist[i]['condition'] = "enabled"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "kafka"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['repository'] = "file://../kafka"
                        dlist[i]['version'] = "1.0.0"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "zookeeper"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../zookeeper"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "mongodb"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../mongodb"
                        print(f"new is: {dlist[i]}")
            except Exception:
                continue
            #print(yaml.dump(reqs_data))
            with open(rf, "w") as f:
                yaml.dump(reqs_data, f)

    if (args.testonly):
        print(
            "\n\n==============================================================="
        )
        print("running toms code tests")
        print(
            "===============================================================")

        for vf in p.rglob('*/values.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            # print(f"{vf} : {backupfile}")
            copyfile(vf, backupfile)

            with open(vf) as f:
                skip = False
                for fn in yaml_files_check_list:
                    if vf == Path(fn):
                        print(
                            f"This yaml file needs checking skipping load/processing for now =>  {Path(fn)} "
                        )
                        skip = True
                if not skip:
                    print(f"      Loading yaml for ==> {vf.parent}/{vf.name}",
                          end="")
                    data = yaml.load(f)
                    print("  :[ok]")

            # update kafka settings
            count = 0
            for x, value in lookup("kafka", data):
                #print_debug(x,value)
                list(
                    update_key(
                        'command',
                        'until nc -vz -w 1 $kafka_host $kafka_port; do echo waiting for Kafka; sleep 2; done;',
                        value))
                list(update_key('repository', 'kymeric/cp-kafka', value))
                list(update_key('image', 'kymeric/cp-kafka', value))
                list(update_key('imageTag', 'latest', value))

            # turn off prometheus jmx and kafka exporter
            for x, value in lookup("prometheus", data):
                #print_debug(x,value , 2)
                if isinstance(value, dict):
                    if value.get("jmx"):
                        value['jmx']['enabled'] = False
                    if value.get("kafka"):
                        value['kafka']['enabled'] = False

            # update mysql settings
            for x, value in lookup("mysql", data):
                list(update_key('repository', 'mysql/mysql-server', value))
                list(update_key('tag', '8.0.28-1.2.7-server', value))
                if value.get("image"):
                    del value['image']
                    value['image'] = "mysql/mysql-server"
                    value['imageTag'] = "8.0.28-1.2.7-server"
                    value['pullPolicy'] = "ifNotPresent"

            # turn the side car off for the moment
            for x, value in lookup("sidecar", data):
                list(update_key('enabled', False, value))

            # turn metrics off
            # The simulator has metrics clause with no enabled setting  => hence need to test
            for x, value in lookup("metrics", data):
                try:
                    if value.get("enabled"):
                        value['enabled'] = False
                except Exception:
                    continue

            with open(vf, "w") as f:
                yaml.dump(data, f)

    if (args.ingress):
        print(
            "\n\n======================================================================================"
        )
        print(" Modify charts to implement networking/v1 ")
        print(
            " and to use bitnami mysql rather than percona (percona / busybox is broken on containerd) "
        )
        print(
            "==========================================================================================="
        )

        # modify the template files
        for vf in p.rglob('*.tpl'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            #print(f"{vf} : {backupfile}")
            #copyfile(vf, backupfile)
            with FileInput(files=[vf], inplace=True) as f:
                #with fileinput.input(files=([vf]), inplace=True)  as f:
                for line in f:
                    line = line.rstrip()
                    #replace networking v1beta1
                    line = re.sub(r"networking.k8s.io/v1beta1",
                                  r"networking.k8s.io/v1", line)
                    line = re.sub(r"extensions/v1beta1",
                                  r"networking.k8s.io/v1", line)
                    print(line)

        # modify the ingress.yaml files
        for vf in p.rglob('*/ingress.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            #print(f"{vf} : {backupfile}")
            #copyfile(vf, backupfile)

            with FileInput(files=[vf], inplace=True) as f:
                for line in f:
                    line = line.rstrip()
                    if re.search("path:", line):
                        line_dup = line
                        line_dup = re.sub(
                            r"- path:.*$",
                            r"  pathType: ImplementationSpecific", line_dup)
                        print(line)
                        print(line_dup)
                    elif re.search("serviceName:", line):
                        line_dup = line
                        line_dup = re.sub(r"serviceName:.*$", r"service:",
                                          line_dup)
                        print(line_dup)
                        line = re.sub(r"serviceName:", r"  name:", line)
                        print(line)
                    elif re.search("servicePort:", line):
                        line_dup = line
                        line_dup = re.sub(r"servicePort:.*$", r"  port:",
                                          line_dup)
                        line = re.sub(r"servicePort: ", r"    number: ", line)
                        # need to replace port names with numbers
                        for pname, pnum in ports_array.items():
                            line = re.sub(f"number: {pname}$",
                                          f"number: {pnum}", line)
                        print(line_dup)
                        print(line)
                        #servicePort {{ .Values.containers.api.service.ports.api.externalPort }}
                    elif re.search("spec:", line):
                        print(line)
                        print(
                            "  ingressClassName: public"
                        )  # well at least it is "public" for microk8s v1.22 => TODO fully figure the chamges and settings out here and simplify!
                    else:
                        print(line)

        for vf in p.rglob('*/values.yaml'):
            with open(vf) as f:

                #print(f"{vf.parent}/{vf.name}")
                skip = False
                for fn in yaml_files_check_list:
                    if vf == Path(fn):
                        print(
                            f"This yaml file needs checking skipping load/processing for now =>  {Path(fn)} "
                        )
                        skip = True
                if not skip:
                    #print(f"      Loading yaml for ==> {vf.parent}/{vf.name}", end="")
                    data = yaml.load(f)
                    #print("  :[ok]")

                for x, value in lookup("mysql", data):
                    list(update_key('enabled', 'true', value))
                # => use these for now
                # TODO: update to later DB and get rid of default passwords
                for x, value in lookup("mysql", data):
                    list(update_key('repository', 'mysql/mysql-server', value))
                    list(update_key('tag', '5.6', value))
                    if value.get("image"):
                        del value['image']
                        value['image'] = "mysql"
                        value['imageTag'] = '8.0'
                        value['pullPolicy'] = "ifNotPresent"

                ### need to set nameOverride  for mysql for ml-testing-toolkit as it appears to be missing
                if vf == Path('mojaloop/values.yaml'):
                    print("Updating the ml-testing-toolkit / mysql config ")
                    for x, value in lookup("ml-testing-toolkit", data):
                        value['mysql'] = {"nameOverride": "ttk-mysql"}

            with open(vf, "w") as f:
                yaml.dump(data, f)

        # versions of k8s -> 1.20 use containerd not docker and the percona chart
        # or at least the busybox dependency of the percona chart has an issue
        # so just replace the percona chart with the mysql charts
        #  for now using the old one because it deploys => TODO fix this and update
        for rf in p.rglob('*/requirements.yaml'):
            with open(rf) as f:
                reqs_data = yaml.load(f)
                #print(reqs_data)
            try:
                dlist = reqs_data['dependencies']
                for i in range(len(dlist)):
                    if (dlist[i]['name'] == "percona-xtradb-cluster"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['name'] = "mysql"
                        #dlist[i]['version'] = "8.8.8"
                        #dlist[i]['repository'] = "https://charts.bitnami.com/bitnami"
                        dlist[i]['version'] = 8.0
                        dlist[i][
                            'repository'] = "https://charts.bitnami.com/bitnami"
                        dlist[i]['alias'] = "mysql"
                        dlist[i]['condition'] = "enabled"
                        print(f"new is: {dlist[i]}")

                    # if (dlist[i]['name'] == "mongodb"):
                    #     print(f"old was: {dlist[i]}")
                    #     dlist[i]['version'] = "11.1.7"
                    #     dlist[i]['repository'] = "file://../mongodb"
                    #     print(f"new is: {dlist[i]}")
            except Exception:
                continue

            with open(rf, "w") as f:
                yaml.dump(reqs_data, f)
Ejemplo n.º 24
0
To increase the modularity and reusability of documentation snippets,
model documentation files might be composed from multiple different
reStructuredText files by means of '.. include::' roles.

In order to make the non-rendered help texts also complete and useful
when viewed via PyNEST's help() command, we replace lines with
'include' roles by the content of the included file.

"""

import os
import re
import sys
import glob
from fileinput import FileInput

pattern = re.compile('^.. include:: (.*)')
path = sys.argv[1]

for rst_fname in glob.glob(os.path.join(path, 'spike_recorder.rst')):
    with FileInput(rst_fname, inplace=True, backup='.bak') as rst_file:
        for line in rst_file:
            match = pattern.match(line)
            if match:
                include_fname = os.path.join(path, match.group(1))
                with open(include_fname) as include_file:
                    print(include_file.read(), end='')
            else:
                print(line, end='')
Ejemplo n.º 25
0
 def check(errors, expected_lines):
     with FileInput(files=TESTFN, mode='r',
                    openhook=hook_encoded('utf-8', errors=errors)) as fi:
         lines = list(fi)
     self.assertEqual(lines, expected_lines)
Ejemplo n.º 26
0
 def replace_in_file(self, file, target, replacement):
     with FileInput(file, inplace=True) as fileinput:
         for line in fileinput:
             print(line.replace(target, replacement), end='')
Ejemplo n.º 27
0
def main():

    with FileInput(sys.argv[1], inplace=True, backup='.bak') as filename:
        for line in filename:
            u, v, p = line.split()
            print(u + "\t" + v + "\t" + str(int(float(p) * 1000)))
def merge_log():
    """Merge files"""
    with FileInput([ACCESSLOG, ACCESSLOG2]) as input_lines:
        with open(MERGELOG, 'w') as output_file:
            output_file.writelines(input_lines)
Ejemplo n.º 29
0
for round, bs in (0, 0), (1, 30):
    try:
        writeFiles()
        runTests(t1, t2, t3, t4, bs, round)
    finally:
        remove_tempfiles(t1, t2, t3, t4)

# Next, check for proper behavior with 0-byte files.
if verbose:
    print "13. 0-byte files"
try:
    t1 = writeTmp(1, [""])
    t2 = writeTmp(2, [""])
    t3 = writeTmp(3, ["The only line there is.\n"])
    t4 = writeTmp(4, [""])
    fi = FileInput(files=(t1, t2, t3, t4))
    line = fi.readline()
    verify(line == 'The only line there is.\n')
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 1)
    verify(fi.filename() == t3)
    line = fi.readline()
    verify(not line)
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 0)
    verify(fi.filename() == t4)
    fi.close()
finally:
    remove_tempfiles(t1, t2, t3, t4)

if verbose:
Ejemplo n.º 30
0
    def test_zero_byte_files(self):
        t1 = t2 = t3 = t4 = None
        try:
            t1 = writeTmp(1, [""])
            t2 = writeTmp(2, [""])
            t3 = writeTmp(3, ["The only line there is.\n"])
            t4 = writeTmp(4, [""])
            fi = FileInput(files=(t1, t2, t3, t4))

            line = fi.readline()
            self.assertEqual(line, 'The only line there is.\n')
            self.assertEqual(fi.lineno(), 1)
            self.assertEqual(fi.filelineno(), 1)
            self.assertEqual(fi.filename(), t3)

            line = fi.readline()
            self.assertFalse(line)
            self.assertEqual(fi.lineno(), 1)
            self.assertEqual(fi.filelineno(), 0)
            self.assertEqual(fi.filename(), t4)
            fi.close()
        finally:
            remove_tempfiles(t1, t2, t3, t4)
Ejemplo n.º 31
0
def runTests(t1, t2, t3, t4, bs=0, round=0):
    start = 1 + round*6
    if verbose:
        print '%s. Simple iteration (bs=%s)' % (start+0, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    lines = list(fi)
    fi.close()
    verify(len(lines) == 31)
    verify(lines[4] == 'Line 5 of file 1\n')
    verify(lines[30] == 'Line 1 of file 4\n')
    verify(fi.lineno() == 31)
    verify(fi.filename() == t4)

    if verbose:
        print '%s. Status variables (bs=%s)' % (start+1, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    s = "x"
    while s and s != 'Line 6 of file 2\n':
        s = fi.readline()
    verify(fi.filename() == t2)
    verify(fi.lineno() == 21)
    verify(fi.filelineno() == 6)
    verify(not fi.isfirstline())
    verify(not fi.isstdin())

    if verbose:
        print '%s. Nextfile (bs=%s)' % (start+2, bs)
    fi.nextfile()
    verify(fi.readline() == 'Line 1 of file 3\n')
    verify(fi.lineno() == 22)
    fi.close()

    if verbose:
        print '%s. Stdin (bs=%s)' % (start+3, bs)
    fi = FileInput(files=(t1, t2, t3, t4, '-'), bufsize=bs)
    savestdin = sys.stdin
    try:
        sys.stdin = StringIO("Line 1 of stdin\nLine 2 of stdin\n")
        lines = list(fi)
        verify(len(lines) == 33)
        verify(lines[32] == 'Line 2 of stdin\n')
        verify(fi.filename() == '<stdin>')
        fi.nextfile()
    finally:
        sys.stdin = savestdin

    if verbose:
        print '%s. Boundary conditions (bs=%s)' % (start+4, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    verify(fi.lineno() == 0)
    verify(fi.filename() == None)
    fi.nextfile()
    verify(fi.lineno() == 0)
    verify(fi.filename() == None)

    if verbose:
        print '%s. Inplace (bs=%s)' % (start+5, bs)
    savestdout = sys.stdout
    try:
        fi = FileInput(files=(t1, t2, t3, t4), inplace=1, bufsize=bs)
        for line in fi:
            line = line[:-1].upper()
            print line
        fi.close()
    finally:
        sys.stdout = savestdout

    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    for line in fi:
        verify(line[-1] == '\n')
        m = pat.match(line[:-1])
        verify(m != None)
        verify(int(m.group(1)) == fi.filelineno())
    fi.close()
Ejemplo n.º 32
0
 def parse(self):
     for line in FileInput(files=[self.filename], openhook=hook_compressed):
         self.parse_line(line)
Ejemplo n.º 33
0
    def buffer_size_test(self, t1, t2, t3, t4, bs=0, round=0):
        pat = re.compile(r'LINE (\d+) OF FILE (\d+)')

        start = 1 + round*6
        if verbose:
            print('%s. Simple iteration (bs=%s)' % (start+0, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        lines = list(fi)
        fi.close()
        self.assertEqual(len(lines), 31)
        self.assertEqual(lines[4], 'Line 5 of file 1\n')
        self.assertEqual(lines[30], 'Line 1 of file 4\n')
        self.assertEqual(fi.lineno(), 31)
        self.assertEqual(fi.filename(), t4)

        if verbose:
            print('%s. Status variables (bs=%s)' % (start+1, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        s = "x"
        while s and s != 'Line 6 of file 2\n':
            s = fi.readline()
        self.assertEqual(fi.filename(), t2)
        self.assertEqual(fi.lineno(), 21)
        self.assertEqual(fi.filelineno(), 6)
        self.assertFalse(fi.isfirstline())
        self.assertFalse(fi.isstdin())

        if verbose:
            print('%s. Nextfile (bs=%s)' % (start+2, bs))
        fi.nextfile()
        self.assertEqual(fi.readline(), 'Line 1 of file 3\n')
        self.assertEqual(fi.lineno(), 22)
        fi.close()

        if verbose:
            print('%s. Stdin (bs=%s)' % (start+3, bs))
        fi = FileInput(files=(t1, t2, t3, t4, '-'), bufsize=bs)
        savestdin = sys.stdin
        try:
            sys.stdin = StringIO("Line 1 of stdin\nLine 2 of stdin\n")
            lines = list(fi)
            self.assertEqual(len(lines), 33)
            self.assertEqual(lines[32], 'Line 2 of stdin\n')
            self.assertEqual(fi.filename(), '<stdin>')
            fi.nextfile()
        finally:
            sys.stdin = savestdin

        if verbose:
            print('%s. Boundary conditions (bs=%s)' % (start+4, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)
        fi.nextfile()
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)

        if verbose:
            print('%s. Inplace (bs=%s)' % (start+5, bs))
        savestdout = sys.stdout
        try:
            fi = FileInput(files=(t1, t2, t3, t4), inplace=1, bufsize=bs)
            for line in fi:
                line = line[:-1].upper()
                print(line)
            fi.close()
        finally:
            sys.stdout = savestdout

        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        for line in fi:
            self.assertEqual(line[-1], '\n')
            m = pat.match(line[:-1])
            self.assertNotEqual(m, None)
            self.assertEqual(int(m.group(1)), fi.filelineno())
        fi.close()
Ejemplo n.º 34
0
class ManyFile(object):
    """A generalized version of file input that allows for stopping
    reading the current file and switching to a different file.
    Reading the old file is resumed once the current file is complete.
    Useful for #include or similiar constructs.
    """

    def __init__(self, files=None, hook = None):
        self.fi = [] # an array of file input objects.  We push and pop onto this so we can resume.
        self.files = [] # an array of files that need to be read later.
        self.hook = None
        self.current = None # the current file input object.
        self.line = 0
        if files is not None:
            self.current = FileInput(files, openhook = self.hook)

    def include(self, files):
        if self.current is not None:
            self.fi.push(self.current)
        self.current = FileInput(files, openhook = self.hook)
            
    def fileno(self):
        if self.current is None:
            return None
        return self.current.fileno()

    def filename(self):
        if self.current is None:
            return None
        return self.current.filename()

    def lineno(self):
        return self.line

    def filelineno(self):
        if self.current is None:
            return None
        return self.current.filelineno()

    def isfirstline(self):
        if self.current is None:
            return None
        return self.current.isfirstline()

    def isstdin(self):
        return False

    def nextfile(self):
        if self.current is not None:
            self.current.nextfile()

    def close(self):
        if self.current:
            self.current.close()
        self.fi = []
        self.files = []

    def readline(self):
        if self.current is not None:
            l = self.current.readline()
            if len(l) > 0:
                self.line = self.line + 1
                return l

        # Advance to the next file
        if len (self.fi) > 0:
            self.current = self.fi.pop()
            return self.readline()

        # see if there are any files left to open
        if len (self.files) > 0:
            self.current = FileInput(self.files, openhook = fileinput.hook_compressed)
            self.files = []
            return self.readline()

        return ""

    def __getimem__ (self):
        return self.readline()

    def __iter__(self):
        return self

    def next(self):
        l = self.__getimem__()
        if l != '':
            return l
        raise StopIteration
Ejemplo n.º 35
0
 def test_empty_files_list_specified_to_constructor(self):
     with FileInput(files=[]) as fi:
         self.assertEqual(fi._files, ('-', ))
Ejemplo n.º 36
0
 def check(mode, expected_lines):
     with FileInput(files=TESTFN,
                    mode=mode,
                    openhook=hook_encoded('utf-7')) as fi:
         lines = list(fi)
     self.assertEqual(lines, expected_lines)
Ejemplo n.º 37
0
    def test_zero_byte_files(self):
        t1 = self.writeTmp("")
        t2 = self.writeTmp("")
        t3 = self.writeTmp("The only line there is.\n")
        t4 = self.writeTmp("")
        fi = FileInput(files=(t1, t2, t3, t4))

        line = fi.readline()
        self.assertEqual(line, 'The only line there is.\n')
        self.assertEqual(fi.lineno(), 1)
        self.assertEqual(fi.filelineno(), 1)
        self.assertEqual(fi.filename(), t3)

        line = fi.readline()
        self.assertFalse(line)
        self.assertEqual(fi.lineno(), 1)
        self.assertEqual(fi.filelineno(), 0)
        self.assertEqual(fi.filename(), t4)
        fi.close()
Ejemplo n.º 38
0
 def test_stdin_binary_mode(self):
     with mock.patch('sys.stdin') as m_stdin:
         m_stdin.buffer = BytesIO(b'spam, bacon, sausage, and spam')
         fi = FileInput(files=['-'], mode='rb')
         lines = list(fi)
         self.assertEqual(lines, [b'spam, bacon, sausage, and spam'])
Ejemplo n.º 39
0
    def __prepare_script(self,
                         a1,
                         a2,
                         snapshot=None,
                         initial=None,
                         initialNum=None):
        '''mordify job submission file'''
        from fileinput import FileInput
        import os

        lst = sorted([a1, a2])
        name = str(lst[0]) + '_' + str(lst[1])
        MSpath = self.parameter.crdPath + '/' + name

        if snapshot is not None:
            newScriptPath = MSpath + '/' + str(
                self.parameter.iteration) + "/" + str(snapshot)
            newScript = newScriptPath + "/submit"
        elif initial is not None:
            newScriptPath = self.parameter.seekPath + '/structure' + str(
                a1) + "/" + str(initialNum)
            newScript = newScriptPath + '/submit'
        else:
            newScriptPath = MSpath
            newScript = newScriptPath + "/MS" + name
        create_folder(newScriptPath)
        copyfile(self.parameter.inputPath + '/submit', newScript)

        with FileInput(files=newScript, inplace=True) as f:
            for line in f:
                line = line.strip()
                #                line = line.lower()
                info = line.split()
                #                 info = line.split("#")[0].split()
                if not info:
                    continue

                if "source" in line:
                    if self.parameter.nodes:
                        import numpy as np
                        rand = np.random.uniform(0, len(self.parameter.nodes),
                                                 1)
                        info[2] = 'hostname="' + self.parameter.nodes[int(
                            rand)] + '"'
                    else:
                        info.insert(0, '#')

                if "name" in line:
                    place = info.index('name')
                    if snapshot is not None:
                        info[place] = 'MILES' + '_' + str(a1) + '_' + str(
                            a2) + '_' + str(snapshot)
                    elif initial is not None:
                        info[place] = 'a' + str(a1)
                    else:
                        info[place] = 'MS' + str(a1) + '_' + str(a2)

                if snapshot is not None:
                    path = MSpath + '/' + str(
                        self.parameter.iteration) + '/' + str(snapshot)
                elif initial is not None:
                    path = self.parameter.seekPath + '/structure' + str(
                        a1) + "/" + str(initialNum)
                else:
                    path = MSpath

                if "path" in line:
                    place = info.index('path')
                    info[place] = path
                if "namd" in line:
                    place = info.index('namd')
                    if snapshot is None and initial is None:
                        info[place] = './sample.namd'
                    else:
                        info[place] = './free.namd'
                line = " ".join(str(x) for x in info)
                print(line)
        return newScript
Ejemplo n.º 40
0
    def test_buffer_sizes(self):

        t1 = self.writeTmp(''.join("Line %s of file 1\n" % (i + 1)
                                   for i in range(15)))
        t2 = self.writeTmp(''.join("Line %s of file 2\n" % (i + 1)
                                   for i in range(10)))
        t3 = self.writeTmp(''.join("Line %s of file 3\n" % (i + 1)
                                   for i in range(5)))
        t4 = self.writeTmp(''.join("Line %s of file 4\n" % (i + 1)
                                   for i in range(1)))

        pat = re.compile(r'LINE (\d+) OF FILE (\d+)')

        if verbose:
            print('1. Simple iteration')
        fi = FileInput(files=(t1, t2, t3, t4))
        lines = list(fi)
        fi.close()
        self.assertEqual(len(lines), 31)
        self.assertEqual(lines[4], 'Line 5 of file 1\n')
        self.assertEqual(lines[30], 'Line 1 of file 4\n')
        self.assertEqual(fi.lineno(), 31)
        self.assertEqual(fi.filename(), t4)

        if verbose:
            print('2. Status variables')
        fi = FileInput(files=(t1, t2, t3, t4))
        s = "x"
        while s and s != 'Line 6 of file 2\n':
            s = fi.readline()
        self.assertEqual(fi.filename(), t2)
        self.assertEqual(fi.lineno(), 21)
        self.assertEqual(fi.filelineno(), 6)
        self.assertFalse(fi.isfirstline())
        self.assertFalse(fi.isstdin())

        if verbose:
            print('3. Nextfile')
        fi.nextfile()
        self.assertEqual(fi.readline(), 'Line 1 of file 3\n')
        self.assertEqual(fi.lineno(), 22)
        fi.close()

        if verbose:
            print('4. Stdin')
        fi = FileInput(files=(t1, t2, t3, t4, '-'))
        savestdin = sys.stdin
        try:
            sys.stdin = StringIO("Line 1 of stdin\nLine 2 of stdin\n")
            lines = list(fi)
            self.assertEqual(len(lines), 33)
            self.assertEqual(lines[32], 'Line 2 of stdin\n')
            self.assertEqual(fi.filename(), '<stdin>')
            fi.nextfile()
        finally:
            sys.stdin = savestdin

        if verbose:
            print('5. Boundary conditions')
        fi = FileInput(files=(t1, t2, t3, t4))
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)
        fi.nextfile()
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)

        if verbose:
            print('6. Inplace')
        savestdout = sys.stdout
        try:
            fi = FileInput(files=(t1, t2, t3, t4), inplace=1)
            for line in fi:
                line = line[:-1].upper()
                print(line)
            fi.close()
        finally:
            sys.stdout = savestdout

        fi = FileInput(files=(t1, t2, t3, t4))
        for line in fi:
            self.assertEqual(line[-1], '\n')
            m = pat.match(line[:-1])
            self.assertNotEqual(m, None)
            self.assertEqual(int(m.group(1)), fi.filelineno())
        fi.close()
Ejemplo n.º 41
0
    def __prepare_namd(self,
                       a1=None,
                       a2=None,
                       snapshot=None,
                       frame=None,
                       initial=None,
                       initialNum=None):
        '''modify namd configuration file'''
        from fileinput import FileInput
        from random import randrange as rand
        import re

        enhanced = 0
        lst = sorted([a1, a2])
        name = str(lst[0]) + '_' + str(lst[1])

        if initial:
            template = self.parameter.inputPath + "/free.namd"
            MSpath = self.parameter.seekPath + '/structure' + str(a1)
            filename = "/" + str(initialNum) + "/free.namd"
        elif snapshot:
            template = self.parameter.inputPath + "/free.namd"
            MSpath = self.parameter.crdPath + '/' + name
            filename = "/" + str(
                self.parameter.iteration) + "/" + str(snapshot) + "/free.namd"
            if os.path.isfile(MSpath + "/" + str(self.parameter.iteration) +
                              "/" + str(snapshot) + '/enhanced'):
                enhanced == 1
            else:
                enhanced = 0
        else:
            template = self.parameter.inputPath + "/sample.namd"
            MSpath = self.parameter.crdPath + '/' + name
            filename = "/sample.namd"

        newNamd = MSpath + filename
        copyfile(template, newNamd)

        tmp = []
        colvar_commands = False
        with open(newNamd, 'r') as f:
            for line in f:
                #                line = line.lower()
                #                 info = line.split("#")[0].split()
                info = line.split()
                if info == []:
                    continue
                if "colvars" in info and "on" in info:
                    colvar_commands = True
                if "colvarsConfig" in info and colvar_commands:
                    if initial is not None or snapshot is not None:
                        info[1] = 'colvar_free.conf'
                    else:
                        info[1] = 'colvar.conf'
                    l = " ".join(str(x) for x in info) + "\n"
                    tmp.append(l)
                    continue

                if "run" in info or 'minimize' in info:
                    #                    if info[0] == '#':
                    #                        continue
                    if not colvar_commands:
                        tmp.append('colvars on\n')
                        info[0] = 'colvarsConfig'
                        if initial is not None or snapshot is not None:
                            info[1] = 'colvar_free.conf\n\n'
                        else:
                            info[1] = 'colvar.conf\n\n'
                        l = " ".join(str(x) for x in info)
                        tmp.append(l)
                        colvar_commands = True
                    if initial is not None:
                        with open(file=self.parameter.ScMilesPath +
                                  '/tclScript_seek.txt') as f_tcl:
                            for l in f_tcl:
                                if "qsub" in l:
                                    kill = l.strip()
                                    killswitch = kill.split()
                                    if self.parameter.jobsubmit == "qsub":
                                        killswitch.pop(0)
                                    else:
                                        killswitch[0] = '#'
                                    a = " ".join(str(x) for x in killswitch)
                                    tmp.append(a + '\n')
                                elif "sbatch" in l:
                                    kill = l.strip()
                                    killswitch = kill.split()
                                    if self.parameter.jobsubmit == "sbatch":
                                        killswitch.pop(0)
                                    else:
                                        killswitch[0] = '#'
                                    a = " ".join(str(x) for x in killswitch)
                                    tmp.append(a + '\n')
                                else:
                                    tmp.append(l)
                        tmp.append('\n')
                    if snapshot is not None:
                        with open(file=self.parameter.ScMilesPath +
                                  '/tclScript_step2.txt') as f_tcl:
                            for l in f_tcl:
                                if "qsub" in l:
                                    kill = l.strip()
                                    killswitch = kill.split()
                                    if self.parameter.jobsubmit == "qsub":
                                        killswitch.pop(0)
                                    else:
                                        killswitch[0] = '#'
                                    a = " ".join(str(x) for x in killswitch)
                                    tmp.append(a + '\n')
                                elif "sbatch" in l:
                                    kill = l.strip()
                                    killswitch = kill.split()
                                    if self.parameter.jobsubmit == "sbatch":
                                        killswitch.pop(0)
                                    else:
                                        killswitch[0] = '#'
                                    a = " ".join(str(x) for x in killswitch)
                                    tmp.append(a + '\n')
                                else:
                                    tmp.append(l)
                        tmp.append('\n')
                tmp.append(line)

        with open(newNamd, 'w') as f:
            for i in range(len(tmp)):
                f.write(tmp[i])

        if self.parameter.namd_conf == True:
            if not snapshot and (initial
                                 or self.parameter.milestone_search == 0):
                namd_conf_mod(self.parameter.inputPath, newNamd, a1)

        with FileInput(files=newNamd, inplace=True) as f:
            for line in f:
                line = line.strip()
                #                line = line.lower()
                info = line.split()

                if "coordinates" in line and 'bincoordinates' not in line.lower(
                ):
                    info[1] = self.parameter.inputPath + '/pdb/' + str(
                        lst[0]) + ".pdb"
                    if snapshot is None and initial is None and self.parameter.milestone_search == 1:
                        info[1] = "./seek.ms.pdb"

                if "outputname" in line:
                    info[1] = self.parameter.outputname

                if "seed" in line:
                    info[1] = rand(10000000, 99999999)
                if 'restartfreq' in line:
                    if initial:
                        info[1] = 2
                if "bincoordinates" in line or "binCoordinates" in line:
                    if snapshot is not None:
                        info[0] = 'bincoordinates'
                        if self.parameter.iteration == 1:
                            info[1] = '../../restarts/' + self.parameter.outputname + '.' + \
                                      str(frame*self.parameter.sampling_interval) + '.coor'
                        else:
                            info[1] = self.parameter.outputname + '.coor'

                if "binvelocities" in line or "binVelocities" in line:
                    if snapshot is not None:
                        info[0] = 'binvelocities'
                        if self.parameter.iteration == 1:
                            info[0] = '#binvelocities'
                            info[1] = '../../restarts/' + self.parameter.outputname + '.' + \
                                      str(frame*self.parameter.sampling_interval) + '.vel'
                        else:
                            if not self.parameter.NVT:
                                if enhanced == 0:
                                    info[0] = 'binvelocities'
                                else:
                                    info[0] = '#binvelocities'
                            info[1] = self.parameter.outputname + '.vel'

                if "extendedSystem" in line or "extendedsystem" in line:
                    if snapshot is not None:
                        info[0] = 'extendedSystem'
                        if self.parameter.iteration == 1:
                            info[1] = '../../restarts/' + self.parameter.outputname + '.' + \
                                      str(frame * self.parameter.sampling_interval) + '.xsc'
                        else:
                            info[1] = self.parameter.outputname + '.xsc'
                    if self.parameter.namd_conf == True and not initial and self.parameter.milestone_search == 1:
                        info[0] = 'extendedSystem'
                        info[1] = './sample.xsc'

                if "restartsave" in line:
                    if snapshot is not None or initial == 'yes':
                        info[1] = "off"

                if "binaryrestart" in line:
                    if initial == 'yes':
                        info[1] = "no"

                if "temperature" in line and "pressure" not in line:
                    if self.parameter.iteration > 1:
                        info[0] = '#temperature'
                    else:
                        info[0] = 'temperature'
                    if initial:
                        info[0] = 'temperature'
                    if not self.parameter.NVT:
                        if enhanced == 1:
                            info[0] = 'temperature'

                # if "langevin" in line and self.parameter.nve and snapshot is not None::
                #     info[0] = '#'

                if "lreplace" in line:
                    #                    line = re.sub(r'[^\w]', ' ', line)
                    if self.parameter.colvarsNum == 0:
                        info[0] = '#set'
                    else:
                        if ']' in info[-1]:
                            info[-1] = str(self.parameter.colvarsNum - 1)
                            info.append(']')
                        else:
                            info[-2] = str(self.parameter.colvarsNum - 1)

                if "a111" in line:
                    if snapshot is None:
                        info[2] = str(a1)
                    else:  # snapshot != None:
                        if self.parameter.iteration == 1:
                            info[2] = str(a1)
                        else:
                            path_start = MSpath + '/' + str(
                                self.parameter.iteration) + '/' + str(snapshot)
                            info[2] = str(get_initial_ms(path_start)[0])

                if "a222" in line:
                    if snapshot is None:
                        info[2] = str(a2)
                    else:  # snapshot != None:
                        if self.parameter.iteration == 1:
                            info[2] = str(a2)
                        else:
                            path_start = MSpath + '/' + str(
                                self.parameter.iteration) + '/' + str(snapshot)
                            info[2] = str(get_initial_ms(path_start)[1])

                if initial is not None and "run" in info:
                    info[1] = str(self.parameter.initialTime * 1000)

                line = " ".join(str(x) for x in info)
                print(line)
Ejemplo n.º 42
0
#!/usr/bin/python
from sys import argv
from fileinput import FileInput

if len(argv) < 3:
    print(
        "2 arguments are required: (1) a text file containing the list of read tags, 1 per line, and (2) a file containing the corresponding diBELLA untagged alignment output"
    )
    quit()

tag_map = dict()
filename = argv[1]
with open(filename) as file:
    tag_map = {k: v for (k, v) in enumerate(file)}

#now open alignments file and replace first two words with of each line with value from dictionary
space = ' '
filename = argv[2]
with FileInput(filename, inplace=True, backup='.bak') as file:
    for line in file:
        linesplit = line.split()
        linesplit[0] = tag_map[(int(linesplit[0]) - 1)].rstrip()
        linesplit[1] = tag_map[(int(linesplit[1]) - 1)].rstrip()
        lineout = ' '.join(x for x in linesplit)
        print(lineout)
Ejemplo n.º 43
0
def main(config: Config) -> None:
    ssh = SSHClient()
    ssh.load_system_host_keys()
    ssh.set_missing_host_key_policy(DnssecPolicy())

    if config.ssh_key_file:
        print_verbose(
            f'Checking for backups on {config.ssh_username}@{config.ssh_hostname}:{config.ssh_port}'
            +
            f' with {config.ssh_key_type["name"]} key: {config.ssh_key_file}')
        pkey = config.ssh_key_type["class"].from_private_key_file(
            config.ssh_key_file, password=config.ssh_passphrase)
        ssh.connect(config.ssh_hostname,
                    port=config.ssh_port,
                    username=config.ssh_username,
                    pkey=pkey)
    else:
        print_verbose(
            f'Checking for backups on {config.ssh_username}@{config.ssh_hostname}:{config.ssh_port}'
        )
        try:
            ssh.connect(config.ssh_hostname,
                        port=config.ssh_port,
                        username=config.ssh_username,
                        password=config.ssh_password,
                        allow_agent=False)
        except BadAuthenticationType:
            print("Error: Password authentication is disabled! Exiting...")
            exit(1)
        except AuthenticationException as e:
            print("Error: Authentication failed! Exiting...")
            exit(1)

    sftp = ssh.open_sftp()

    # backup location on the server (remote path)
    server_path: str = config.server_location
    if not server_path.endswith('/'):
        server_path += "/"

    # list existing backups (from stdout)
    existing_backups = ssh.exec_command(
        f"ls -1 -d {server_path}*/ | xargs -n 1 basename")[1].readlines()

    # remove \n from list entries
    existing_backups = list(map(lambda s: s.strip(), existing_backups))

    # remove non pybackup specific directories
    existing_backups = list(filter(check_date, existing_backups))

    if not existing_backups:
        print(
            f'Error: Unable to find backups on {config.ssh_hostname}:{config.server_location}'
        )
        exit(1)

    # sort existing backups and get the directory name of the latest
    backup_date = sorted(existing_backups,
                         key=lambda x: datetime.strptime(x, '%Y-%m-%d'))[-1]
    server_path += backup_date

    print_verbose(
        f'Downloading backup from {config.ssh_username}@{config.ssh_hostname}:{config.ssh_port}'
        + f' from {config.server_location}')

    # local directory where the backup should be stored
    local_path: str = config.local_location
    if not os.path.isdir(local_path):
        os.mkdir(local_path)

    if not local_path.endswith('/'):
        local_path += "/"

    local_path += backup_date

    if not local_path.endswith('/'):
        local_path += "/"

    # create local directory for the backup
    if not os.path.isdir(local_path):
        os.mkdir(local_path)

    # download files, path is the starting point, server base path is for subtraction only
    download(sftp=sftp,
             path=server_path,
             local_path=local_path,
             server_base_path=server_path)

    methods: list = ["sha512", "sha384", "sha256", "sha224", "sha1", 'md5']

    # update path definitions to match local path
    for method in methods:
        if os.path.isfile(f'{local_path}/{method}sum.txt'):
            with FileInput(f'{local_path}/{method}sum.txt',
                           inplace=True,
                           backup='.bak') as file:
                for line in file:
                    line.replace(server_path, local_path)

    # check check sums
    best_available_method = next(
        (m for m in methods if os.path.isfile(f'{local_path}/{m}sum.txt')),
        None)
    if best_available_method:
        with open(f'{local_path}/{best_available_method}sum.txt',
                  'r') as check_sum_file:
            for entry in check_sum_file.readlines():
                checksum, filepath = entry.split("\t")
                filepath = os.path.basename(filepath.strip("\n"))
                local_filepath = Path(local_path) / filepath
                if not os.path.isfile(local_filepath):
                    logger.warning(
                        f"There is a check sum for %s, but the file does not exist",
                        local_filepath)
                if not ChecksumLib.get_checksum_file(
                        local_filepath) == checksum:
                    logger.warning(f"Checksum of  is not correct!" % filepath)
    else:
        logger.warning("Unable to find check sums, skipping integrity check!")
import os 
import sys
job_name=sys.argv[1]
initial_path=os.path.join(r"remote_workdirectory" ,job_name)
files=os.listdir(r"{0}".format(initial_path))
need=[ file for  file  in files if (os.path.splitext(file)[1].lower() in [".png",".jpg",".jpeg"])]
actual_path=os.path.join(initial_path,need[0])
li=f'source="{actual_path}" '
li=li.replace("\\","/")
image_ext=os.path.splitext(need[0])[1]
print(li)
from fileinput import FileInput
     with FileInput(r"path/to_s3.tf",inplace=True) as fp:
          for line in fp:
               if  'source="c' in line.lower():
                    print(line.replace(line.strip(),li.strip()))
               else:
                    print(line.strip())         


    



Ejemplo n.º 45
0
"""
Convert .h definitions (especially constants and enums) to python code
"""
from fileinput import FileInput
from re import finditer
from pathlib import Path
from sys import stdin

if __name__ == '__main__':
    constants = set()
    enums = dict()
    enum = None
    with FileInput() as file:
        for line in file:
            for res in finditer(r"enum\s*([A-Z1-9_]+)\s*\{", line):
                enum = (res[1], set())
            for res in finditer(r"#define\s+([A-Z1-9_]+)\s+(\d+)", line):
                constants.add((res[1], res[2]))
            for res in finditer(r"([A-Z1-9_]+)\s*=\s*(\d+)\s*,", line):
                enum[1].add((res[1], res[2]))
            for res in finditer(r"\};", line):
                if enum:
                    enums[enum[0]] = enum[1]
                    enum = None

    print("# generated by h_to_py.py")
    print("from enum import Enum")
    print()
    print()
    print("class Constants:")
    for c_name, c_value in sorted(constants):
Ejemplo n.º 46
0
def processIface(iface, h_tmplt, cpp_tmplt, ih_tmplt, h_dest, cpp_dest, docstr_dest, ih_dest, msgcodes):
    curDocStrings = []
    values = []
    methods = []
    cmds = []
    icat = 'Basics'

    # parse iface file
    fi = FileInput(iface)
    for line in fi:
        line = line[:-1]
        if line[:2] == '##' or line == '':
            #curDocStrings = []
            continue

        op = line[:4]
        if line[:2] == '# ':   # a doc string
            curDocStrings.append(line[2:])

        elif op == 'val ':
            parseVal(line[4:], values, curDocStrings, icat)
            curDocStrings = []

        elif op == 'fun ' or op == 'set ' or op == 'get ':
            parseFun(line[4:], methods, curDocStrings, cmds, op == 'get ', msgcodes, icat)
            curDocStrings = []

        elif op == 'cat ':
            icat = line[4:].strip()
            curDocStrings = []

        elif op == 'evt ':
            pass

        elif op == 'enu ':
            pass

        elif op == 'lex ':
            pass

        else:
            print('***** Unknown line type: %s' % line)

    # build the items for the table of contents in the interface header
    tableitems=''
    firstitem=True
    for category, title, description  in categoriesList:
        if firstitem:
            firstitem=False
        else:
            tableitems+='\n'
        tableitems+='    - @ref_member_group{'+category+', '+title+'}'

    # process templates
    data = {}
    data['VALUES'] = processVals(values)
    data['CMDS']   = processVals(cmds)
    defs, imps, docstrings, idefs = processMethods(methods)
    data['METHOD_DEFS'] = defs
    data['METHOD_IDEFS'] = idefs
    data['METHOD_IMPS'] = imps
    data['TABLE_OF_CONTENTS'] = tableitems

    # get template text
    h_text = open(h_tmplt).read()
    ih_text = open(ih_tmplt).read()
    cpp_text = open(cpp_tmplt).read()

    # do the substitutions
    h_text = h_text % data
    cpp_text = cpp_text % data
    ih_text = ih_text % data

    # write out destination files
    open(h_dest, 'w').write(h_text)
    open(cpp_dest, 'w').write(cpp_text)
    if docstr_dest:
        open(docstr_dest, 'w').write(docstrings)
    open(ih_dest, 'w').write(ih_text)
Ejemplo n.º 47
0
 def check(mode, expected_lines):
     fi = FileInput(files=TESTFN, mode=mode,
                    openhook=hook_encoded('utf-7'))
     lines = list(fi)
     fi.close()
     self.assertEqual(lines, expected_lines)
Ejemplo n.º 48
0
for round, bs in (0, 0), (1, 30):
    try:
        writeFiles()
        runTests(t1, t2, t3, t4, bs, round)
    finally:
        remove_tempfiles(t1, t2, t3, t4)

# Next, check for proper behavior with 0-byte files.
if verbose:
    print "13. 0-byte files"
try:
    t1 = writeTmp(1, [""])
    t2 = writeTmp(2, [""])
    t3 = writeTmp(3, ["The only line there is.\n"])
    t4 = writeTmp(4, [""])
    fi = FileInput(files=(t1, t2, t3, t4))
    line = fi.readline()
    verify(line == 'The only line there is.\n')
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 1)
    verify(fi.filename() == t3)
    line = fi.readline()
    verify(not line)
    verify(fi.lineno() == 1)
    verify(fi.filelineno() == 0)
    verify(fi.filename() == t4)
    fi.close()
finally:
    remove_tempfiles(t1, t2, t3, t4)

if verbose:
Ejemplo n.º 49
0
                    if accumulator and accumulator[-1].islower():
                        if should_yield():
                            yield accumulator
                            accumulator = ""
                    if accumulator and accumulator[-1].isupper() and (
                            location + 1 < len(word)):
                        if word[location + 1].islower():
                            yield accumulator
                            accumulator = ""
                accumulator += letter
            else:
                if should_yield():
                    yield accumulator
                accumulator = ""
        if should_yield():
            yield accumulator

    def mostUsedWords(self, number):
        return self.counts.most_common(number)


if __name__ == "__main__":
    counter = WordCounter()
    for line in FileInput():
        counter.count(line)

    index = 0
    for (word, count) in counter.mostUsedWords(100):
        index += 1
        print index, word, count
Ejemplo n.º 50
0
    def test_zero_byte_files(self):
        t1 = t2 = t3 = t4 = None
        try:
            t1 = writeTmp(1, [""])
            t2 = writeTmp(2, [""])
            t3 = writeTmp(3, ["The only line there is.\n"])
            t4 = writeTmp(4, [""])
            fi = FileInput(files=(t1, t2, t3, t4))

            line = fi.readline()
            self.assertEqual(line, 'The only line there is.\n')
            self.assertEqual(fi.lineno(), 1)
            self.assertEqual(fi.filelineno(), 1)
            self.assertEqual(fi.filename(), t3)

            line = fi.readline()
            self.assertFalse(line)
            self.assertEqual(fi.lineno(), 1)
            self.assertEqual(fi.filelineno(), 0)
            self.assertEqual(fi.filename(), t4)
            fi.close()
        finally:
            remove_tempfiles(t1, t2, t3, t4)
Ejemplo n.º 51
0
def runTests(t1, t2, t3, t4, bs=0, round=0):
    start = 1 + round * 6
    if verbose:
        print '%s. Simple iteration (bs=%s)' % (start + 0, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    lines = list(fi)
    fi.close()
    verify(len(lines) == 31)
    verify(lines[4] == 'Line 5 of file 1\n')
    verify(lines[30] == 'Line 1 of file 4\n')
    verify(fi.lineno() == 31)
    verify(fi.filename() == t4)

    if verbose:
        print '%s. Status variables (bs=%s)' % (start + 1, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    s = "x"
    while s and s != 'Line 6 of file 2\n':
        s = fi.readline()
    verify(fi.filename() == t2)
    verify(fi.lineno() == 21)
    verify(fi.filelineno() == 6)
    verify(not fi.isfirstline())
    verify(not fi.isstdin())

    if verbose:
        print '%s. Nextfile (bs=%s)' % (start + 2, bs)
    fi.nextfile()
    verify(fi.readline() == 'Line 1 of file 3\n')
    verify(fi.lineno() == 22)
    fi.close()

    if verbose:
        print '%s. Stdin (bs=%s)' % (start + 3, bs)
    fi = FileInput(files=(t1, t2, t3, t4, '-'), bufsize=bs)
    savestdin = sys.stdin
    try:
        sys.stdin = StringIO("Line 1 of stdin\nLine 2 of stdin\n")
        lines = list(fi)
        verify(len(lines) == 33)
        verify(lines[32] == 'Line 2 of stdin\n')
        verify(fi.filename() == '<stdin>')
        fi.nextfile()
    finally:
        sys.stdin = savestdin

    if verbose:
        print '%s. Boundary conditions (bs=%s)' % (start + 4, bs)
    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    verify(fi.lineno() == 0)
    verify(fi.filename() == None)
    fi.nextfile()
    verify(fi.lineno() == 0)
    verify(fi.filename() == None)

    if verbose:
        print '%s. Inplace (bs=%s)' % (start + 5, bs)
    savestdout = sys.stdout
    try:
        fi = FileInput(files=(t1, t2, t3, t4), inplace=1, bufsize=bs)
        for line in fi:
            line = line[:-1].upper()
            print line
        fi.close()
    finally:
        sys.stdout = savestdout

    fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
    for line in fi:
        verify(line[-1] == '\n')
        m = pat.match(line[:-1])
        verify(m != None)
        verify(int(m.group(1)) == fi.filelineno())
    fi.close()
Ejemplo n.º 52
0
    def buffer_size_test(self, t1, t2, t3, t4, bs=0, round=0):
        pat = re.compile(r'LINE (\d+) OF FILE (\d+)')

        start = 1 + round * 6
        if verbose:
            print('%s. Simple iteration (bs=%s)' % (start + 0, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        lines = list(fi)
        fi.close()
        self.assertEqual(len(lines), 31)
        self.assertEqual(lines[4], 'Line 5 of file 1\n')
        self.assertEqual(lines[30], 'Line 1 of file 4\n')
        self.assertEqual(fi.lineno(), 31)
        self.assertEqual(fi.filename(), t4)

        if verbose:
            print('%s. Status variables (bs=%s)' % (start + 1, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        s = "x"
        while s and s != 'Line 6 of file 2\n':
            s = fi.readline()
        self.assertEqual(fi.filename(), t2)
        self.assertEqual(fi.lineno(), 21)
        self.assertEqual(fi.filelineno(), 6)
        self.assertFalse(fi.isfirstline())
        self.assertFalse(fi.isstdin())

        if verbose:
            print('%s. Nextfile (bs=%s)' % (start + 2, bs))
        fi.nextfile()
        self.assertEqual(fi.readline(), 'Line 1 of file 3\n')
        self.assertEqual(fi.lineno(), 22)
        fi.close()

        if verbose:
            print('%s. Boundary conditions (bs=%s)' % (start + 4, bs))
        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)
        fi.nextfile()
        self.assertEqual(fi.lineno(), 0)
        self.assertEqual(fi.filename(), None)

        if verbose:
            print('%s. Inplace (bs=%s)' % (start + 5, bs))
        savestdout = sys.stdout
        try:
            fi = FileInput(files=(t1, t2, t3, t4), inplace=1, bufsize=bs)
            for line in fi:
                line = line[:-1].upper()
                print(line)
            fi.close()
        finally:
            pass  ###

        fi = FileInput(files=(t1, t2, t3, t4), bufsize=bs)
        for line in fi:
            self.assertEqual(line[-1], '\n')
            m = pat.match(line[:-1])
            self.assertNotEqual(m, None)
            self.assertEqual(int(m.group(1)), fi.filelineno())
        fi.close()
Ejemplo n.º 53
0
 def check(mode, expected_lines):
     fi = FileInput(files=TESTFN, mode=mode,
                    openhook=hook_encoded('utf-7'))
     lines = list(fi)
     fi.close()
     self.assertEqual(lines, expected_lines)