def main(argv):
    _file = "teste04.txt"

    try:
        opts, args = getopt.getopt(argv, 'hf:d', ['help', 'file='])
    except getopt.GetoptError:
        usage()
        sys.exit(2)

    for opt, arg in opts:
        if opt in ('-h', '--help'):
            usage()
            sys.exit()
        elif opt == '-d':
            Settings.debug = True
        elif opt in ('-f', '--file'):
            _file = arg

    parser = FileParser(_file)
    content = parser.read_content()

    graph = Graph()
    graph.build(content['vertices'], content['caminho'], content['h'])

    start = graph.get_vertex(content['início'][0][0])
    final = graph.get_vertex(content['final'][0][0])

    final.add_heuristic(final, 0)

    dijkstra = Dijkstra(graph, start, final)
    a_star = AStar(graph, start, final)

    Menu(graph, dijkstra, a_star).run()
Exemple #2
0
def main():
    parser = build_parser()
    options = parser.parse_args()
    file_parser = FileParser(SECTION_REGEXES, PAPER_REGEXES)
    papers, sections = file_parser.parse_file(OVERVIEW_FILE)

    paper_checks = {
        options.section:
        lambda paper, option: paper.section == Section(option),
        options.papers:
        lambda paper, option: PaperParser.fuzzy_paper_name_match(
            paper.title, option, FUZZY_PAPER_NAME_MATCH_PERCENT),
        options.changed:
        lambda paper, option: paper.changed,
        options.tags:
        lambda paper, option: PaperParser.check_tags(paper.tags, option),
        options.to_read:
        lambda paper, option: not paper.is_read
    }
    papers = PaperParser(paper_checks).parse_papers(papers)

    if not papers:
        print(red("No papers match your search!"))
        return

    if options.download:
        for paper in papers:
            try:
                PaperDownloader.download(paper)
            except KeyboardInterrupt:
                print(red("Stopped download!"))
    elif options.bibtex:
        with open(BIBFILE, 'w') as file:
            for paper in papers:
                print('- {}'.format(paper))
                bibtex = PaperDownloader.get_paper_bibtex(paper)
                if bibtex:
                    file.write(bibtex)
    else:
        if options.section or options.papers or options.changed or options.to_read or options.tags:
            papers_by_section = Section.gather_papers_by_section(papers)
            for section in papers_by_section.keys():
                print('{}\n{}'.format(
                    section, '\n'.join([
                        '- {}'.format(paper)
                        for paper in papers_by_section[section]
                    ])))
        else:
            for section in sections:
                print("{} ({} papers)".format(
                    section, len(section.get_papers_in_section(papers))))

        print("\nTags: [{}]".format(";".join(
            sorted(list(set([tag for paper in papers
                             for tag in paper.tags]))))))
Exemple #3
0
    def setUp(self):
        super(TestFileParser, self).setUp()

        spec_file = (self.resource_folder /
                     "invalid_encoding_spec.json").absolute()
        self.invalid_encoding_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "no_encoding_spec.json").absolute()
        self.no_encoding_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "no_header_spec.json").absolute()
        self.no_header_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "spec.json").absolute()
        self.valid_parser = FileParser(spec_file=spec_file)

        self.existing_fixed_width_file = (self.resource_folder /
                                          "fixed_width_file.txt").absolute()
        self.existing_delimited_file = (self.resource_folder /
                                        "delimited_file.txt").absolute()
        self.existing_delimited_file_with_header = (
            self.resource_folder /
            "delimited_file_with_header.txt").absolute()
        self.fixed_width_file = (self.temp_folder /
                                 "fixed_width_file.txt").absolute()
        self.delimited_file = (self.temp_folder /
                               "delimited_file.txt").absolute()
        self.invalid_file = (self.temp_folder /
                             "filedoesnotexist.txt").absolute()
        self.output_with_header_file = (self.temp_folder /
                                        "output_with_header.txt").absolute()
        self.output_without_header_file = (
            self.temp_folder / "output_without_header.txt").absolute()
def setup_app():
    parser = argparse.ArgumentParser()
    parser.add_argument('-f', dest='txtFile', default='./data.txt',
                        help='text file with structured data to be parsed')
    args = parser.parse_args()
    fileParser = FileParser(args.txtFile)
    app.run(debug=True)
Exemple #5
0
def basic_prepare():
    global board_data, iter_count
    print("Board filename : ")
    filename = prompt()
    print("Iteration count : ")
    iter_count = prompt()
    board_data = FileParser.parse_data(filename)
Exemple #6
0
def execute():
    try:
        arguments = ArgParser().parse_args()
        if arguments:
            print("Calculating....\n")
            return FileParser(**arguments).parse_file_and_calculate()
    except KeyboardInterrupt:
        print(PrintUtils.main_screen_format(Logs.SEE_YOU_LATER_LOG))
    except Exception as err:
        print(PrintUtils.error_format(Logs.EXECUTION_FAILED_ERROR.format(err)))
 def __init__(self, path):
     self.logger = logging.getLogger('pymetadatamanager.scanner')
     self.config = Config()
     self.dbTV = TVShowDB(self.config.tvshowdb)
   	self.dbTV.init_db()
     self.TVDB = TVDB()
     self.FP = FileParser()
     self.nfo_reader = NfoReader()
     self.new_time = self.TVDB.get_server_time()
     self.dbTV.clean_db_files()
     self.series_list = []
     self.set_file_list(path)
Exemple #8
0
def main():
    fp = None
    if len(sys.argv) == 1:
        print("Usage: <command> <configuration-file>")
        return
    else:
        fp = FileParser(sys.argv[1])
    if not fp.exists():
        print(
            "Configuartion file does not exists! Please check the given path.")
        return
    fp.load_config()
    fp.generate_file_tree()
    print_tree_stats(fp)
    gen = Generator("/home/janschon/", fp.get_tree())
    gen.generate_cpp()
    def test_parse_data(self):

        filename = "unit_test/testfile.txt"
        parsed_data = FileParser.parse_data(filename)

        expected = [{
            'color': 'w',
            'count': 2,
            'type': 'knight'
        }, {
            'color': 'w',
            'count': 2,
            'type': 'bishop'
        }, {
            'color': 'w',
            'count': 2,
            'type': 'rook'
        }, {
            'color': 'w',
            'count': 2,
            'type': 'queen'
        }, {
            'color': 'b',
            'count': 2,
            'type': 'knight'
        }, {
            'color': 'b',
            'count': 2,
            'type': 'bishop'
        }, {
            'color': 'b',
            'count': 2,
            'type': 'rook'
        }, {
            'color': 'b',
            'count': 2,
            'type': 'queen'
        }]

        self.assertEqual(parsed_data, expected)
class FaitManager:
	def __init__(self,filename):
		
		self.filepath=filename
		self.file_parser=FileParser(filename," et ")
	def read_faits (self):
		self.file_parser.read_faits()
	def read_regles (self):
		self.file_parser.read_regles()
	def get_faits(self):
		return self.file_parser.read_faits()
	def get_regles (self):
		return self.file_parser.read_regles()
	def test_value (self):
		regles=self.get_regles()
		faits=self.get_faits()
		print (faits)
		for r in regles :
			print (r)
Exemple #11
0
class TestFileParser(TestBaseClass):
    def setUp(self):
        super(TestFileParser, self).setUp()

        spec_file = (self.resource_folder /
                     "invalid_encoding_spec.json").absolute()
        self.invalid_encoding_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "no_encoding_spec.json").absolute()
        self.no_encoding_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "no_header_spec.json").absolute()
        self.no_header_parser = FileParser(spec_file=spec_file)

        spec_file = (self.resource_folder / "spec.json").absolute()
        self.valid_parser = FileParser(spec_file=spec_file)

        self.existing_fixed_width_file = (self.resource_folder /
                                          "fixed_width_file.txt").absolute()
        self.existing_delimited_file = (self.resource_folder /
                                        "delimited_file.txt").absolute()
        self.existing_delimited_file_with_header = (
            self.resource_folder /
            "delimited_file_with_header.txt").absolute()
        self.fixed_width_file = (self.temp_folder /
                                 "fixed_width_file.txt").absolute()
        self.delimited_file = (self.temp_folder /
                               "delimited_file.txt").absolute()
        self.invalid_file = (self.temp_folder /
                             "filedoesnotexist.txt").absolute()
        self.output_with_header_file = (self.temp_folder /
                                        "output_with_header.txt").absolute()
        self.output_without_header_file = (
            self.temp_folder / "output_without_header.txt").absolute()

    def test_invalid_spec(self):
        with self.assertRaises(FileNotFoundError):
            FileParser(spec_file=self.invalid_file)

    def test_generate_fixed_width_file_invalid_encoding(self):
        with self.assertRaises(LookupError):
            self.invalid_encoding_parser.generate_fixed_width_file(
                lines_count=10, fixed_width_file=self.fixed_width_file)

    def test_generate_delimited_file_invalid_input_file(self):
        with self.assertRaises(FileNotFoundError):
            self.invalid_encoding_parser.generate_delimited_file(
                fixed_width_file=self.invalid_file,
                delimited_file=self.delimited_file)

    def test_generate_delimited_file_invalid_encoding(self):
        self.valid_parser.generate_fixed_width_file(
            fixed_width_file=self.fixed_width_file, lines_count=10)

        with self.assertRaises(LookupError):
            self.invalid_encoding_parser.generate_delimited_file(
                fixed_width_file=self.fixed_width_file)

    def test_generate_fixed_width_file_invalid_columns(self):
        with self.assertRaises(ValueError):
            FileParser(spec_file="{folder}/invalid_columns_spec.json".format(
                folder=self.resource_folder))

    def test_generate_fixed_width_file_no_encoding(self):
        with self.assertRaises(KeyError):
            self.no_encoding_parser.generate_fixed_width_file(
                lines_count=10, fixed_width_file=self.fixed_width_file)

        self.valid_parser.generate_fixed_width_file(
            fixed_width_file=self.fixed_width_file, lines_count=10)

        with self.assertRaises(KeyError):
            self.no_encoding_parser.generate_delimited_file(
                fixed_width_file=self.fixed_width_file)

    def test_valid_spec(self):
        self.valid_parser.generate_fixed_width_file(
            fixed_width_file=self.fixed_width_file, lines_count=10)
        self.valid_parser.generate_delimited_file(
            fixed_width_file=self.fixed_width_file,
            delimited_file=self.delimited_file)
        self.no_header_parser.generate_delimited_file(
            fixed_width_file=self.fixed_width_file,
            delimited_file=self.delimited_file)

    def test_file_content_with_header(self):
        self.valid_parser.generate_delimited_file(
            fixed_width_file=self.existing_fixed_width_file,
            delimited_file=self.output_with_header_file)
        self.assertTrue(
            self.compare_files(self.existing_delimited_file_with_header,
                               self.output_with_header_file))

    def test_file_content_without_header(self):
        self.no_header_parser.generate_delimited_file(
            fixed_width_file=self.existing_fixed_width_file,
            delimited_file=self.output_without_header_file)
        self.assertTrue(
            self.compare_files(self.existing_delimited_file,
                               self.output_without_header_file))
Exemple #12
0
 def test_invalid_spec(self):
     with self.assertRaises(FileNotFoundError):
         FileParser(spec_file=self.invalid_file)
Exemple #13
0
 def test_generate_fixed_width_file_invalid_columns(self):
     with self.assertRaises(ValueError):
         FileParser(spec_file="{folder}/invalid_columns_spec.json".format(
             folder=self.resource_folder))
Exemple #14
0
import sys
from file_parser import FileParser
from main_setup import main_setup

if __name__ == '__main__':
    if len(sys.argv) != 2:
        print "Usage: %s test_file.json" % sys.argv[0]
        sys.exit(-1)

    main_setup(None)

    file_parser = FileParser()
    network = file_parser.create_network(sys.argv[1])
    network.print_dot()
class Scanner(object):
    """
    Methods for scanning files into the database.
    """

    def __init__(self, path):
        self.logger = logging.getLogger('pymetadatamanager.scanner')
        self.config = Config()
        self.dbTV = TVShowDB(self.config.tvshowdb)
      	self.dbTV.init_db()
        self.TVDB = TVDB()
        self.FP = FileParser()
        self.nfo_reader = NfoReader()
        self.new_time = self.TVDB.get_server_time()
        self.dbTV.clean_db_files()
        self.series_list = []
        self.set_file_list(path)

    def __del__(self):
        try:
            self.dbTV.set_update_time(self.new_time)
        except AttributeError:
            self.logger.error("Error setting update time.")

    def set_file_list(self, path):
        self.file_list = self.FP.parse_files_by_path(path)

    def set_series_list(self):
        for file in self.file_list:
            dir = file[0]
            filename = file[1]
            series_name = file[2]
            if not series_name in self.series_list:
                self.series_list.append(series_name)

    def get_series_id_list(self, series_name):
        series_id = self.dbTV.check_db_for_series(series_name)
        if not series_id:
            match_list = self.TVDB.find_series(series_name)
        else:
            match_list = [(series_id, match_list)]
        return match_list

    def add_series_to_db_by_id(self, series_id):
        if not self.dbTV.check_db_for_series(series_id):
            series = self.TVDB.get_all_series_info(series_id)
            episodes = self.TVDB.get_series_episodes(series_id)
            actors = self.TVDB.get_series_actors(series_id)
            banners = self.TVDB.get_series_banners(series_id)
    
            if series is not None:
                self.dbTV.write_series_to_db(series)
                series_name = self.dbTV.get_series_name(series_id)
                self.logger.info("Adding series %s to DB" % (series_name,))
                if episodes is not None:
                    self.dbTV.write_episodes_to_db(episodes, series_id)
                if actors is not None:
                    self.dbTV.write_actors_to_db(actors)
                if banners is not None:
                    self.dbTV.write_banners_to_db(banners)

    def add_series_to_db_by_nfo(self, series_name):
        episodes = []
        episode_nfos = []
        for file in self.file_list:
            name = file[2]
            if name == series_name:
                episode_nfos.append(file[6])
                series_nfo = file[5]
        series = self.nfo_reader.get_series(series_nfo)
        for episode_nfo in episode_nfos:
            if not episode_nfo == '':
                episodes.append(self.nfo_reader.get_episode(episode_nfo))
        actors = self.nfo_reader.get_actors(series_nfo)
        self.logger.debug(actors)
        banners = self.nfo_reader.get_banners(series_nfo)

        self.logger.info("Adding series %s to DB" % (series_name,))
        if series is not None:
            self.dbTV.write_series_to_db(series)
            if episodes is not None:
                series_id = self.dbTV.get_series_id(series_name)
                self.dbTV.write_episodes_to_db(episodes, series_id)
            if actors is not None:
                self.dbTV.write_actors_to_db(actors)
            if banners is not None:
                self.dbTV.write_banners_to_db(banners)
    
    def add_files_to_db(self, series_id, series_name):
        #Create a list of files from this series
        series_file_list = []
        for file in self.file_list:
            if file[2] == series_name:
                if not self.dbTV.check_db_for_file(file[1], file[0]):
                    series_file_list.append(file)
        #Add any new files to the DB
        if len(series_file_list):
            self.logger.info("Adding files from %s to DB" % (series_name,))
            self.dbTV.write_files_to_db(series_file_list, series_id)
        unlinked = self.dbTV.find_unlinked_files()
        unlinked_list = []
        for unlinked_file in unlinked:
            for file in self.file_list:
                file_path = file[0]
                file_name = file[1]
                if unlinked_file[1] == file_name \
                  and unlinked_file[2] == file_path:
                    unlinked_list.append(file)
        if len(unlinked_list):
            self.dbTV.write_files_to_db(series_file_list, series_id)
Exemple #16
0
from file_parser import FileParser

parser = FileParser()
parser.run()
Exemple #17
0
from solver.factory import SolverModelFactory
from file_parser import FileParser
from solver.state_evaluator import StateEvaluator

population_count = 10
mutation_prob = 0.15
solver = SolverModelFactory.create_model("genetic_algorithm", population_count,
                                         mutation_prob)

order = FileParser.parse_data("example_board/14bishop.txt")
solver.generate_population(order)
for i in range(0, 1000):
    print('try :' + str(i + 1))
    solver.next_step()
    best = max(solver.population)
    print(StateEvaluator().evaluate(best.chess_board))

best = max(solver.population)
print(best.chess_board)
print(StateEvaluator().evaluate(best.chess_board))
Exemple #18
0
from typing import List

from configuration.configuration_provider import ConfigurationProvider
from file_parser import FileParser
from generator.factory import GeneratorFactory
from model import Relation
from performance import performance
from sql_script_builder import SqlScriptBuilder

parser = FileParser()
configuration_provider = ConfigurationProvider()
generator_factory = GeneratorFactory()
builder = SqlScriptBuilder()


@performance
def parse(filepath: str) -> List[Relation]:
    return parser.parse(filepath)


@performance
def configure(relations: List[Relation]):
    for relation in relations:
        for column in relation.columns.values():
            config = configuration_provider.provide(relation, column)
            column.generator = generator_factory.create(column, config)


@performance
def generate(relations: List[Relation]):
    builder.generate(relations)
Exemple #19
0
import globals_

from file_parser import FileParser
from main_setup import main_setup


def simulate(network):
    '''
    Takes a network.
    Runs the simulation!
    '''
    globals_.event_manager.register_network(network)
    globals_.event_manager.run()


if __name__ == '__main__':
    if len(sys.argv) != 2 and len(sys.argv) != 3:
        print 'Usage: %s test_file.json [output_name]' % sys.argv[0]
        sys.exit(-1)

    test_case_name = sys.argv[1]
    output_name = 'output'
    if len(sys.argv) > 2:
        output_name = sys.argv[2]

    main_setup(output_name)
    file_parser = FileParser()
    network = file_parser.create_network(test_case_name)
    simulate(network)
    globals_.stats_manager.output_graphs()
Exemple #20
0
parser.add_argument('--lines',
                    help='Number of lines of sample data',
                    default=10)
parser.add_argument('--generate_fix_width_file',
                    help='Generate fixed width file')
parser.add_argument('--fix_width_file',
                    help='Fixed width file for delimited file')
parser.add_argument('--generate_delimited_file',
                    help='Generate delimited file')
parser.add_argument('--generate_csv_file', help='Generate CSV file')
parser.add_argument('--csv_file', help='CSV file to hash')
parser.add_argument('--hash_csv_file', help='Hashed CSV file')
args = parser.parse_args()

if args.type == 'fp':
    fp = FileParser(spec_file=args.spec_file)
    if args.generate_fix_width_file:
        if args.lines:
            fp.generate_fixed_width_file(
                fixed_width_file=args.generate_fix_width_file,
                lines_count=args.lines)
        else:
            fp.generate_fixed_width_file(
                fixed_width_file=args.generate_fix_width_file)
    elif args.generate_delimited_file:
        fp.generate_delimited_file(fixed_width_file=args.fix_width_file,
                                   delimited_file=args.generate_delimited_file)
elif args.type == 'dp':
    dp = DataProcessor(spec_file=args.spec_file)
    if args.generate_csv_file:
        if args.lines:
import sys
import copy
import config
from utils import calc_base_weights, get_topn_nodes, prune_nodes, get_class_label, calc_tag_probs
from file_parser import FileParser
from tree_node import Node

# Declare all variables
TEST_FILE = sys.argv[1]
BOUNDARY_FILE = sys.argv[2]
MODEL_FILE = sys.argv[3]
SYS_OUTPUT = sys.argv[4]
config.BEAM_SIZE = int(sys.argv[5])
config.TOPN = int(sys.argv[6])
config.TOPK = int(sys.argv[7])
data = FileParser(TEST_FILE, BOUNDARY_FILE, MODEL_FILE)

# Beam search
for sent_idx in range(len(data.test_set)):
    total_paths = {}
    data.final_probs[sent_idx] = {}
    root = Node('BOS', None, None, 0)
    history = copy.deepcopy(data.test_set[sent_idx][0][1])
    tag1 = 'prevTwoTags=BOS+BOS'
    tag2 = 'prevT=BOS'

    base_weights = calc_base_weights(data.weights, data.tagset, history)
    tag_dict = calc_tag_probs(data.weights, base_weights, tag1, tag2)
    top_n = get_topn_nodes(tag_dict, root)
    pruned_nodes = prune_nodes(top_n, total_paths)
    data.final_probs[sent_idx][0] = get_class_label(pruned_nodes)
	def __init__(self,filename):
		
		self.filepath=filename
		self.file_parser=FileParser(filename," et ")
Exemple #23
0
from coffee_finder import CoffeeFinder
from file_parser import FileParser

parser = FileParser("tests/test_one.txt")
parser.get_initials()
city = parser.get_city()
city.print_layout()
queries = parser.get_queries()
coffee_finder = CoffeeFinder(city)
for query in queries:
    optimum = coffee_finder.find_coffee(int(query))
    for coffee_number, cell in optimum.items():
        print("Found {0} coffeeshops near this cell: {1}".format(
            coffee_number, cell))

print("\n")
parser = FileParser("tests/test_two.txt")
parser.get_initials()
city = parser.get_city()
city.print_layout()
queries = parser.get_queries()
coffee_finder = CoffeeFinder(city)
for query in queries:
    optimum = coffee_finder.find_coffee(int(query))
    for coffee_number, cell in optimum.items():
        print("Found {0} coffeeshops near this cell: {1}".format(
            coffee_number, cell))
from flask import Flask, jsonify
from flask import request
import argparse
from file_parser import FileParser
app = Flask(__name__)

fileParser = FileParser('./data.txt')


def setup_app():
    parser = argparse.ArgumentParser()
    parser.add_argument('-f', dest='txtFile', default='./data.txt',
                        help='text file with structured data to be parsed')
    args = parser.parse_args()
    fileParser = FileParser(args.txtFile)
    app.run(debug=True)


@app.route('/', methods=['GET'])
def index_route():
    return jsonify({'about': 'python assignment'})


@app.route('/interface/<path:interface>', methods=['GET'])
def interface_route(interface):
    if(interface == 'all'):
        return jsonify(fileParser.getAllInterfaces())
    else:
        return jsonify(fileParser.getInterface(interface))