Example #1
0
    def inner_correlation(self, step):
        check_integer(step, 'step should be integer')
        n = len(self._data)
        if not 0 < step < n:
            raise ValueError('step should be in range ({0}; {1})'.format(0, n))

        return Sample.calc_correlation(self._data[:-step], self._data[step:])
Example #2
0
def populatePCB(pcb, printer=False):
    file_name = input("Give me a file name: ")
    file_size = input("How big is the file: ")
    while not check_integer(file_size):
        file_size = input("Only enter integers for file size try again: ")
    memory_start_region = input("Enter a memory start region: ")
    while not check_integer(memory_start_region):
        memory_start_region = input(
            "Must be an integer. Enter a memory start region: ")
    if printer:
        pcb.readwrite = "w"
    else:
        pcb.readwrite = input("Is this a read or write (r/w): ")
    while match(r"^[rw]{1}$", pcb.readwrite) is False:
        pcb.readwrite = input(
            "Invalid input for read/write! You must only enter r or w! Try again: ")

    pcb.file_size = file_size
    pcb.file_name = file_name
    pcb.memory_start_region = memory_start_region
    return pcb
Example #3
0
    def sysgen(self):
        print("Welcome to sysgen!")

        printer_cnt = input("How many printers: ").strip()
        while not check_integer(printer_cnt):
            printer_cnt = input("Not a positive integer, enter a positive integer value: ")

        disk_cnt = input("How many disks: ").strip()
        while not check_integer(disk_cnt):
            disk_cnt = input("Not a positive integer, enter a positive integer value: ")

        cdrw_cnt = input("How many cdrw drives: ").strip()
        while not check_integer(cdrw_cnt):
            cdrw_cnt = input("Not a positive integer, enter a positive integer value: ")

        if int(printer_cnt) > 0:
            self.printers = [Printer(n + 1) for n in range(0, int(printer_cnt))]

        if int(disk_cnt) > 0:
            self.disks = [Disk(n + 1) for n in range(0, int(disk_cnt))]

        if int(cdrw_cnt) > 0:
            self.cdrws = [CDRW(n + 1) for n in range(0, int(cdrw_cnt))]
Example #4
0
    def __init__(self, seed):
        check_integer(seed, 'seed should be integer value')

        self.seed = seed
        self._value = seed
Example #5
0
def tokenize(text, line_map, file_name="[unknown]"):
    symbols = [
        ".", ",", "{", "}", "(", ")", "=", ";", "&", "*", "-", "!", "+", "++",
        "--", "->", "[", "]", "?", ":", "=", "/", "||", "&&", "==", "!=", ">",
        "<", "<=", ">=", "|", "&", "^", "<<", ">>", "+=", "-=", "*=", "/=",
        "%=", "<<=", ">>=", "|=", "&=", "^="
    ]
    symbols_first = [v[0] for v in symbols]
    beginnings = [v[0] for v in symbols if len(v) > 1]

    current_word = ""
    text_iter = PeekIter(text + " ")

    tokens = []

    in_multiline_comment = False
    in_comment = False
    in_string = False
    in_char = False

    line_number = 1
    column_number = 0

    for c in text_iter:
        if c == "\n":
            if current_word != "":
                tokens.append(
                    Token(current_word, line_number,
                          column_number - len(current_word), line_map,
                          file_name))
                current_word = ""

            column_number = 0
            line_number += 1
        column_number += 1

        if in_multiline_comment:
            if c == '*' and next(text_iter) == '/':
                in_multiline_comment = False

        elif in_comment:
            if c == '\n':
                in_comment = False

        elif in_string:
            if c == '"':
                in_string = False
                current_word += c
                tokens.append(
                    Token(current_word, line_number,
                          column_number - len(current_word), line_map,
                          file_name))
                current_word = ""
            elif c == '\\':
                p = next(text_iter)
                if p == "n":
                    current_word += "\n"
                elif p == "t":
                    current_word += "\t"
                else:
                    current_word += p
            else:
                current_word += c

        elif in_char:
            if c == "'":
                in_char = False
                current_word += c
                tokens.append(
                    Token(current_word, line_number,
                          column_number - len(current_word), line_map,
                          file_name))
                current_word = ""
            elif c == '\\':
                p = next(text_iter)
                if p == "n":
                    current_word += "\n"
                elif p == "t":
                    current_word += "\t"
                else:
                    current_word += p
            else:
                current_word += c
        else:
            if c in [" ", "\n", "\t"]:
                if current_word != "":
                    tokens.append(
                        Token(current_word, line_number,
                              column_number - len(current_word), line_map,
                              file_name))
                current_word = ""
            elif c == '/' and text_iter.peek() in ["*", "/"]:
                if text_iter.peek() == "*":
                    in_multiline_comment = True

                    if current_word != "":
                        tokens.append(
                            Token(current_word, line_number,
                                  column_number - len(current_word), line_map,
                                  file_name))
                    current_word = ""

                elif text_iter.peek() == "/":
                    in_comment = True

                    if current_word != "":
                        tokens.append(
                            Token(current_word, line_number,
                                  column_number - len(current_word), line_map,
                                  file_name))
                    current_word = ""
            elif c in symbols_first:
                if check_integer(current_word) and c == ".":
                    current_word += c
                    continue

                if current_word != "":
                    tokens.append(
                        Token(current_word, line_number,
                              column_number - len(current_word), line_map,
                              file_name))
                current_word = ""

                if c in beginnings:
                    next_c = text_iter.peek()
                    if c + next_c in symbols:
                        tokens.append(
                            Token(c + next(text_iter), line_number,
                                  column_number - len(current_word), line_map,
                                  file_name))
                    else:
                        tokens.append(
                            Token(c, line_number, column_number - 1, line_map,
                                  file_name))
                else:
                    tokens.append(
                        Token(c, line_number, column_number - 1, line_map,
                              file_name))
            elif c == '"':
                if current_word != "":
                    tokens.append(
                        Token(current_word, line_number,
                              column_number - len(current_word), line_map,
                              file_name))
                current_word = '"'
                in_string = True

            elif c == "'":
                if current_word != "":
                    tokens.append(
                        Token(current_word, line_number,
                              column_number - len(current_word), line_map,
                              file_name))
                current_word = "'"
                in_char = True

            else:
                current_word += c

    for token in tokens:
        try:
            token.data = str(int(token.data, 0))
        except ValueError:
            pass

    return tokens