def load_conf(infile: str, flog: TextIOWrapper = None) -> dict:
    ''' Loads user configuration file.
    @param infile: Path and name of the user input YAML configuration file
    @type infile: str  
    @param flog: Log file to record raised events
    @type flog: TextIOWrapper (file object) 
    @return: User configuration file as YAML object
    @rtype: dict
    '''

    try:
        if infile and os.path.isfile(infile):
            with open(infile, 'r') as f:
                return yaml.load(f)
        else:
            print('Error: No user configuration file is specified')
            return None
    except:
        result = 'Error: "load_conf" function has error(vifi_server): '
        if flog:
            flog.write(result)
            traceback.print_exc(file=flog)
        else:
            print(result)
            traceback.print_exc()
Ejemplo n.º 2
0
def write(ctx: click.Context, file: TextIOWrapper) -> None:
    """Writes results from last command to a file."""
    if file.name.split(".")[-1] == "wiki":
        file.write(PARSERS[ctx.obj["last"]](ctx.obj["export"],
                                            print_date=False))
    else:
        file.write(json.dumps(ctx.obj["export"]))
Ejemplo n.º 3
0
def serialization(wf: _io.TextIOWrapper, pNode: TreeNode):
    if (pNode == None):
        wf.write("$\n")
        return
    else:
        wf.write(str(pNode.val) + "\n")
    serialization(wf, pNode.left)
    serialization(wf, pNode.right)
 def read_to_file(self, file: _io.TextIOWrapper) -> str:
     """
     Decompress the object file and write to text file and return the text
     :param file: Open text file object for writing decompressed text to
     :return text: Returns the decompressed text as a string
     """
     text = self.decompress()
     file.write(text)
     return text
Ejemplo n.º 5
0
def _write_data_to_file(data: list, file: _io.TextIOWrapper) -> None:
    """
    write data to file
    :param data:
    :param file:
    :return:
    """
    file.writelines(data)
    file.flush()
Ejemplo n.º 6
0
def is_csv(objects_file: TextIOWrapper, delim: str):
    try:
        csv.Sniffer().sniff(objects_file.read(1024), delimiters=delim)
        return True
    except:
        return False
    finally:
        objects_file.seek(
            0
        )  # need this to move back to the beginning of the file after sampling
Ejemplo n.º 7
0
def load_toml_file(conf_file: _io.TextIOWrapper) -> Dict:
    """Create a dictionary from the configuration specified in the loaded TOML
    configuration file. Note that no Config object is created, just a dict.

    :param conf_file: file data as TextIOWrapper
    :return: hierarchical configuration in a dictionary
    """
    try:
        return toml.load(conf_file)
    finally:
        conf_file.close()
Ejemplo n.º 8
0
def msmarco_write(query_ids: List[int], doc_ids: List[int], preds: List[int],
                  msmarco_file: _io.TextIOWrapper):
    assert len(set(query_ids)) == 1
    query_id = query_ids[0]
    rank = 1
    logger.info("writing to MSMarco file...")
    for idx in preds:
        doc_id = doc_ids[idx]
        msmarco_file.write("\t".join((str(query_id), str(doc_id), str(rank))) +
                           "\n")
        rank += 1
Ejemplo n.º 9
0
def count_decil(file: _io.TextIOWrapper):
    n = math.ceil(0.1 * sum(1 for l in file if l.startswith("open")))
    file.seek(0)
    dec = [0] * n
    pat = re.compile(r"open .* (\d+) usec")
    for l in file:
        match = pat.match(l)
        if match is not None:
            value = float(match.group(1))
            if value > dec[0]:
                heapq.heappushpop(dec, value)
    return dec[0]
Ejemplo n.º 10
0
    def _write_select_content(cls, output: _io.TextIOWrapper,
                              select: SelectParser.SelectContent) -> None:
        output.write("{ " + cls._MARK_LINE)

        # If it doesn't have a `default` case, then it is a blocking `select`, so we have `while` in `define`
        output.write(
            f"{'if' if cls._select_has_default_case(select) else 'while'} (true) {cls._MARK_LINE}"
        )
        output.write("{ " + cls._MARK_LINE)
        for case in select:
            cls._write_case_content(output, case)

        output.write("} \\\n")  # /while
        output.write("}\n")  # /define
Ejemplo n.º 11
0
def read_MeshFormat(file: TextIOWrapper, data: dict):
    """
    Read version of .msh file

    :param file: opened .msh file
    :param data: dictionary with data describing mesh
    """

    mesh_format = file.readline()
    mesh_format = mesh_format.split(' ')

    data['version'] = 'MSH ' + mesh_format[0] + ' ASCII'

    check_ending('$EndMeshFormat\n', file.readline())
Ejemplo n.º 12
0
    def finalize_results_logging(self, csvf: _io.TextIOWrapper, loss: float,
                                 f1: float):
        """
        Finalize writing of result CSV
        :param csvf: CSV file handle
        :param loss: loss reached with result
        :param f1: F1 reached with result
        """

        csvf.close()
        os.rename(
            self.TMP_FNAME,
            f"introspection/introspection"
            f"_{str(self.__class__)}_A{f1:.6f}_L{loss:.6f}_{socket.gethostname()}.tsv",
        )
Ejemplo n.º 13
0
def read_Elements(file: TextIOWrapper, data: dict):
    """
    Read Elements section from .msh file

    :param file: opened .msh file
    :param data: dictionary with data describing mesh
    """
    n_entities = entity_block_info(file)[0]

    data['element_lists'] = []
    data['id_list'] = []

    for i in range(n_entities):
        n_elements, elements_str_list, el_type, tag, dim = parse_entity(file)
        if dim != 0:
            data['id_list'].append(data['entities'][dim][tag])
            element_list = gmshTypes[el_type](n_elements)
            for j in range(n_elements):
                el = elements_str_list[j]
                connectivity = [(int(i) - 1) for i in el.split(' ')]
                element_list[j] = connectivity[1:]

            data['element_lists'].append(element_list)

    check_ending('$EndElements\n', file.readline())
Ejemplo n.º 14
0
def addListEmail(request):
    if request.method == 'POST':
        form = FileForm(data=request.POST, files=request.FILES)
        if form.is_valid():
            redundant = 0
            success = 0
            f = request.FILES[list(request.FILES.keys())[0]]
            #request.Files are binary files but csv needs text file
            #TextIOWrapper makes f.file a text file
            f = TextIOWrapper(f.file, encoding=request.encoding)
            csv_read = csv.reader(f, delimiter=',')
            for line in csv_read:
                [mail, type_mail] = line
                email = Email(email=mail, user_id=request.user.id)
                email.type = type_mail
                try:
                    email.save()
                    success += 1
                except IntegrityError:
                    if Email.objects.filter(email=mail,
                                            user_id=request.user.id):
                        redundant += 1
            done = True
    form = FileForm()
    return render(request, 'Mail_blacklist/Adding_mail_list.html', locals())
Ejemplo n.º 15
0
def define_team_size(file: TextIOWrapper) -> int:
    """Find out how much players in every team

    Parameters
    ----------
    file : TextIOWrapper
        Log file to be parsed

    Returns
    -------
    int
        Players in team

    """

    players = 0
    got_team = False

    for line in file.readlines():

        if line.startswith('TeamName'):

            if not got_team:
                got_team = True
            else:
                return players

        elif line.startswith('NAME') and got_team:

            players += 1

    raise ValueError("Could not figure out team size")
Ejemplo n.º 16
0
def hasilCSV(request):
    if request.method == 'POST':
        name = request.FILES['fileInput'].name
        typeFile = name.split('.')[1]
        if(typeFile == 'txt'):
            reader = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8')
        elif(typeFile == 'csv'):
            try:
                text = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8')
                reader = csv.reader(text)
            except:
                text = StringIO(request.FILES['fileInput'].file.read().decode())
                reader = csv.reader(text)
        
        arrData = []
        for line in reader:
            line = ''.join(line)
            arrData.append(line)
        
        myfile = StringIO()
        
        metode = request.POST['metode']
        statusFormalisasi = request.POST.get('formalisasi', False)
        if(metode == 'EDR'):
            for line in arrData:
                hasil = F_EDR(line)
                myfile.write(hasil + os.linesep)
        elif(metode == 'ED'):
            for line in arrData:
                hasil = F_ED(line)
                myfile.write(hasil + os.linesep)
        elif(metode == 'BG'):
            for line in arrData:
                hasil = F_BG(line)
                myfile.write(hasil + os.linesep)
        
        myfile.flush()
        myfile.seek(0)
        
        response = HttpResponse(FileWrapper(myfile), content_type='text/csv')
        response['Content-Disposition'] = 'attachment; filename=hasil.csv'
        return response
    else:
        return render(request, 'index_preprocess.html', {})
        
        
Ejemplo n.º 17
0
def f30kTokenize(anno: _io.TextIOWrapper) -> (dict, Counter):
    '''
    tokenize all the annotations and return a dictionary containing
    the results, indexed by the key specified by keyname.
    '''
    anno.seek(0)
    j_orig = json.load(anno)
    tokens = dict()
    wctr = Counter()
    for i, image in enumerate(j_orig['images']):
        for j, sent in enumerate(image['sentences']):
            raw, sid, iid = sent['raw'], sent['sentid'], sent['imgid']
            tok = spk.tokenize(raw)
            wctr.update(tok)
            tokens[int(sid)] = (tok, int(iid))
        print('.', end='')
    return tokens, wctr
Ejemplo n.º 18
0
    def tokenize_file(self, file: _io.TextIOWrapper):
        """
        :param file:
        :return:
        """
        line = file.readline()
        line_num = 1
        in_doc = False
        doc = ""
        while line:
            tup = (line_num, self.file_name)
            last_index = len(self.tokens)
            in_doc, doc = self.proceed_line(line, tup, in_doc, doc)
            self.find_include(last_index, len(self.tokens))
            line = file.readline()
            line_num += 1

        self.tokens.append(stl.Token((stl.EOF, None)))
Ejemplo n.º 19
0
def skip_section(file: TextIOWrapper, data: dict):
    """
    Skip section in .msh file

    :param file: opened .msh file
    :param data: dictionary with data describing mesh
    """

    while file.readline()[0:4] != '$End':
        pass
Ejemplo n.º 20
0
    def _write_define_header(cls, output: _io.TextIOWrapper, index: int,
                             select: SelectParser.SelectContent) -> None:
        output.write(cls._DEFINE_PREFIX)
        output.write(str(index))

        parameters: typing.List[str] = []
        for case in select:
            receiver: str = case[cls._INDICES_CASE.receiver] if case[
                cls._INDICES_CASE.receiver] else "nullptr"
            sender: str = case[cls._INDICES_CASE.sender] if case[
                cls._INDICES_CASE.sender] else "nullptr"
            read_from_channel: bool = cls._is_read_from_channel(sender)

            # The default case is not appearing in the `define`'s header.
            if cls._is_default_case(case):
                continue
            parameters.append(sender if read_from_channel else receiver)
            parameters.append(str(read_from_channel).lower())
            parameters.append(
                receiver or "nullptr" if read_from_channel else sender)
Ejemplo n.º 21
0
def read_PhysicalNames(file: TextIOWrapper, data: dict):
    """
    Read information about physical names of each entity

    :param file: opened .msh file
    :param data: dictionary with data describing mesh
    """
    n = int(file.readline()[:-1])
    data['phys_names'] = {}
    data['phys_names'][0] = {}
    data['phys_names'][1] = {}
    data['phys_names'][2] = {}
    data['phys_names'][3] = {}
    for i in range(n):
        entity_phys_info = file.readline()[:-1].split(' ')

        data['phys_names'][int(entity_phys_info[0])][int(
            entity_phys_info[1])] = entity_phys_info[2][1:-1]

    check_ending('$EndPhysicalNames\n', file.readline())
Ejemplo n.º 22
0
def index(request):
    Topik = listfolder() #topik yang ada disimpan dalam bentuk folder-folder ==> filebased
    FeatX = ['1  Bag of Word','2  TF Binary','3  TF-IDF','4  Bigram'] #pilihan fitur extrac
    kelasSentimen = ['2 Kelas Sentimen - (positif atau negatif)','3 Kelas Sentimen - (positif, negatif, atau netral)'] #pilihan jumlah kelas sentimen
    tabledata = ''
    prediction = ''
    data = ''
    IA = ''
    topik = ''
    if request.method=="POST":
        if 'input' in request.POST:            
            tabledata = []
            topik = request.POST.get("topik")
            FE = request.POST.get("FE")
            IA = request.POST.get("inputArea")
            print(topik, FE, IA)
            if IA == "": #kalau menginput dengan file (multi input)
                inputFile = request.FILES["inputDataTest"]
                loadfile = TextIOWrapper(inputFile.file,encoding='utf-8')
                datatemp = []
                for i in loadfile:
                    datatemp.append(i)
                for i in datatemp:
                    prepros, prediction = predict(i,int(FE),topik) #memanggil fungsi predict dari listFunction
                    tabledata.append({
                        'input': i,
                        'prepros': prepros,
                        'prediction': prediction,
                        'confirm': True
                        })
                data = json.dumps(tabledata) #memasukan ke tabel hasil
            else: #kalau menginput dengan text area (1 input)
                prepros, prediction = predict(IA,int(FE),topik)#memanggil fungsi predict dari listFunction
                tabledata.append({
                    'input': IA,
                    'prepros': prepros,
                    'prediction': prediction,
                    'confirm': True
                    })
                data = json.dumps(tabledata) #memasukan ke tabel hasil
        
        if 'create' in request.POST and request.FILES: #memproses pembuatan model
            topik = request.POST.get('inputTopik') #judul topik
            KS = request.POST.get('KS') #banyaknya Kelas Sentimen
            dataS = request.FILES['inputData'] #dataset yg digunakan
            label = request.FILES['inputLabel'] #label yg digunakan
            
            createfiletemp(dataS,label,topik) #memanggil fungsi dari tsts untuk menginput dataset ke database
            crfolder(topik) #membuat folder topik
            call(int(KS),topik) #memanggil fungsi call untuk membuat model dari listfinction2
            q = kelasData(topik=topik, kategori=KS)
            q.save()
    return render(request, "index_dlnnFinal.html",{'selected_topic': topik,'data':FeatX,'sent':predict, 'IA':IA, 'hasil':data,'topik':Topik, 'kelasSentimen':kelasSentimen})
Ejemplo n.º 23
0
def parse_entity(file: TextIOWrapper) -> (int, list, int, int, int):
    """
    Parse one entity block in .msh format

    :param file: opened .msh file
    :return: number of objects, objects, objects type, entity tag, entity dimension
    """
    info = file.readline()
    info = [int(i) for i in info[:-1].split(' ')]

    objects = []
    entity_dimension = info[0]
    entity_tag = info[1]
    obj_type = info[2]
    n_object = info[3]

    # read objects as strings
    for i in range(n_object):
        objects.append(file.readline()[:-2])  # append but delete <\n>

    return n_object, objects, obj_type, entity_tag, entity_dimension
Ejemplo n.º 24
0
def entity_block_info(file: TextIOWrapper) -> (int, int):
    """
    Read information about current entity block

    :param file: opened .msh file
    :return: number of entity blocks, number of objects
    """
    block = file.readline()
    block = [int(i) for i in block[:-1].split(' ')]

    # number of entity blocks and number of objects in such block
    return block[0], block[1]
Ejemplo n.º 25
0
def cocoTokenize(anno: _io.TextIOWrapper) -> (dict, Counter):
    '''
    tokenize all the annotations and return a dictionary containing
    the results, indexed by the key specified by keyname.
    '''
    anno.seek(0)
    j_orig = json.load(anno)
    tokens = dict()
    wctr = Counter()
    for i, annotation in enumerate(j_orig['annotations']):
        tok = spk.tokenize(annotation['caption'])
        wctr.update(tok)
        tokens[int(annotation['id'])] = (tok, int(annotation['image_id']))
        print('\0337\033[K>',
              i,
              '/',
              len(j_orig['annotations']),
              '-> {:.1f}%'.format(100 * i / len(j_orig['annotations'])),
              end='\0338')
        sys.stdout.flush()
    return tokens, wctr
Ejemplo n.º 26
0
 def _read_csv(self):
     if self.quotechar == '':
         current_quoting = QUOTE_NONE
     else:
         current_quoting = QUOTE_ALL
     if 'csvcontent' in self.request.FILES.keys():
         csvfile = TextIOWrapper(
             self.request.FILES['csvcontent'].file, encoding=self.encoding, errors='replace')
         csvcontent = "".join(csvfile.readlines())
         for param_idx in range(0, int(len(csvcontent) / 2048) + 2):
             self.params['csvcontent%d' % param_idx] = csvcontent[
                 2048 * param_idx:2048 * (param_idx + 1)]
         csvfile.seek(0)
     else:
         csvcontent = ""
         for param_idx in range(0, 1000):
             curent_content = self.getparam('csvcontent%d' % param_idx)
             if curent_content is None:
                 break
             else:
                 csvcontent += "" + curent_content
         csvfile = StringIO(csvcontent)
     self.spamreader = DictReader(csvfile, delimiter=self.delimiter, quotechar=self.quotechar, quoting=current_quoting)
     try:
         if (self.spamreader.fieldnames is None) or (len(self.spamreader.fieldnames) == 0):
             raise Exception("")
     except Exception:
         raise LucteriosException(IMPORTANT, _('CSV file unvalid!'))
Ejemplo n.º 27
0
 def parse_map(self, server_messages: TextIOWrapper) -> StateInit:
     """Parse the initial server message into a map."""
     # a level has a header with color specifications followed by the map
     # the map starts after the line "#initial"
     line = server_messages.readline().rstrip()
     initial = False  # mark start of level map
     goal = False  # mark start of level map
     map = []
     goal_state = []
     col_count = 0
     while line:
         if goal:
             if line.find("#end") != -1:
                 len_line = max(len(l) for l in map)
                 for i in range(len(map)):
                     map[i] += "+" * (len_line - len(map[i]))
                     goal_state[i] += "+" * (len_line - len(goal_state[i]))
                 println("\n".join(["".join(line) for line in map]))
                 return self.build_map(map, goal_state)
             goal_state.append(list(self._formatl(line)))
         elif initial:
             if line.find("#goal") != -1:
                 goal = True
             else:
                 map.append(list(self._formatl(line)))
         else:
             if line.find("#initial") != -1:
                 initial = True
             else:
                 color_matched = self.colors_re.search(line)
                 if color_matched:
                     col_count += 1
                     color = color_matched[1]
                     self.colors[color_matched[2]] = color
                     for obj in line[len(color) + 5:].split(", "):
                         self.colors[obj] = color
         line = server_messages.readline().replace("\r",
                                                   "")[:-1]  # chop last
    def replace_table_contents(file_content: _io.TextIOWrapper, old_table: str, new_table: str, homebrew_tap: str):
        """Replaces the old README project table string with the new
        project table string.
        """
        logger = woodchips.get(LOGGER_NAME)

        readme = ReadmeUpdater.determine_readme(homebrew_tap)

        if readme:
            with open(readme, 'w') as readme_contents:
                readme_contents.write(file_content.replace(old_table, new_table + '\n'))
            logger.debug(f'{readme} written successfully.')

            Git.add(homebrew_tap)
Ejemplo n.º 29
0
def parse_nodes(file: TextIOWrapper, ref_node_tags: list) -> list:
    """
    Parse one $Nodes entity block in .msh format

    :param file: opened .msh file
    :param ref_node_tags: mutable node tags list
    :return: nodal coordinates as string
    """
    info = file.readline()
    info = [int(i) for i in info[:-1].split(' ')]

    nodes = []

    n_nodes = info[3]

    # read node tags in order to check if eny nodes have been skiped
    for i in range(n_nodes):
        ref_node_tags.append(int(file.readline()[:-1]))

    # read nodal coordinates as strings
    for i in range(n_nodes):
        nodes.append(file.readline()[:-1])  # append but delete <\n>

    return nodes
Ejemplo n.º 30
0
    def from_csv_file(self, file: TextIOWrapper, headers: bool = True):
        """
        Method make object from csv file.

        :param file:
        :param headers:
        :return:
        """
        read_file = file.read()
        splitted_data = read_file.split('\n')
        self.__make_keys_csv(splitted_data[0], headers)
        shift = 1 if headers else 0
        self.__data.extend(
            dict(zip(self.__keys, element.split(',')))
            for element in splitted_data[shift:])