Ejemplo n.º 1
0
 def __init__(self, filename, logger=None):
     FileParser.__init__(self, filename) # Stations filename ("/apps/px/etc/stations.conf")
     self.logger = logger     # Logger Object
     self.stations = {}       # Indexed by header
     self.printErrors = True  #
     self.type = 'INT'        # Must be in ['CA', 'US', 'COLL', 'INT']
     self.stationSearched = None  # Station for which we want to find the more recent occurence
Ejemplo n.º 2
0
def display_deck_selector():
    file = tk.filedialog.askopenfilename()
    file_reader = FileParser(str(file))
    cards = file_reader.create_deck()
    deck = Deck(cards)

    display_deck(deck, file_reader)
Ejemplo n.º 3
0
 def __init__(self):
     fileParser = FileParser()
     self.__table = fileParser.parse("graph")
     self.__maxSize = fileParser.getMax()
     self.__price = []
     self.__date = []
     self.__range = 365
Ejemplo n.º 4
0
def extracte_token(dir):
    for root, dirs, files in os.walk(dir):
        for name in files:
            if re.match(r'.*jack$', name):
                file_src = os.path.join(root, name)
                parser = FileParser(file_src)
                parser.generate_token()
Ejemplo n.º 5
0
 def __init__(self):
     self.__parts = ["1", "2", "3"]
     self.__optimizer = SqlOptimizer()
     self.__rules = self.__optimizer.GetOptions()
     self.__back = len(self.__rules) + 1
     self.__fileParser = FileParser()
     self.__fileParser.Parse("statistics.txt")
Ejemplo n.º 6
0
 async def async_topPosts(self):
     await asyncio.sleep(self.TOP_REFRESH_RATE)
     while not self.gracefulExit:
         startTime = time.time()
         wasChanged = False
         self.logger.info("Refreshing top 10 posts...")
         topPosts = self.sr.top('day', limit=10)
         for post in topPosts:
             if not post.id in self.redditCache[3]:
                 wasChanged = True
                 self.redditCache[3].append(str(post.id))
                 postData = [
                     post.title, post.author.name, post.url, post.shortlink
                 ]
                 if post.over_18:
                     postData.append('!')
                 if post.spoiler:
                     postData.append('?')
                 self.postQueue.put(postData)
                 l = len(self.redditCache[3])
                 if l >= 40:
                     self.redditCache[3] = self.redditCache[3][l - 30:]
         if wasChanged:
             FileParser.writeNestedList("redditcache.txt", self.redditCache,
                                        'w')
         endTime = time.time()
         await asyncio.sleep(
             self.checkTime(self.TOP_REFRESH_RATE, startTime - endTime,
                            "async_topPosts"))
Ejemplo n.º 7
0
def executeSimplePerceptron():
    #fileParser = FileParser("./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_and_or.txt") #open from arguments
    fileParser = FileParser("./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_ocr.txt") #open from arguments
    #fileParser = FileParser('./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_test.txt') #open from arguments
    #fileParser = FileParser('./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_test_with_testing_data.txt') #open from arguments
    #fileParser = FileParser('./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_test_with_same_testing_data.txt') #open from arguments
    #fileParser = FileParser('./workspaceFacu/RedesNeuronales-TP1/src/OCR/input_martin.txt') #open from arguments
    parameters = fileParser.parseInputFile()
    
    #function = Exponential(0.5)
    #function = Identity()
    function = Sign()

    VERBOSE = True
    SHUFFLE_TRAINING_SET = True
    neuralAlgorithm = SimplePerceptron(parameters, function, not SHUFFLE_TRAINING_SET, VERBOSE)
    
    print 'ALGORITHM - Start'
    neuralAlgorithm.train()
    print 'ALGORITHM - Finish'
    
    trainingInformation = neuralAlgorithm.getTrainingInformation()

    errorFileName = './/..//graphics//error - ' + parameters.objective
    validationFileName = './/..//graphics//validation - ' + parameters.objective
    testingFileName = './/..//graphics//testing - ' + parameters.objective
    SAVE_TO_FILE = True
    SHOW = True
    plotter = Plotter(errorFileName, validationFileName, testingFileName, not SAVE_TO_FILE)
    plotter.plot(trainingInformation, SHOW)
Ejemplo n.º 8
0
    def __init__(self, filename, pxLinkables=[], logger=None, version = 0):
        FileParser.__init__(self, filename) # Routing filename ("/apps/px/etc/pxroute.conf")
        self.logger = logger            # Logger object
        self.version = version          # Routing file version  (0 or 1)

        self.version = 1                # FORCING VERSION 1

        self.routingInfos = {}          # Addressed by header name: self.routingInfos[header]['clients']
                                        #                           self.routingInfos[header]['subclients']
                                        #                           self.routingInfos[header]['priority']

        self.keyMasks = []              # Key pattern list :        mask, 'clients','priority',cmask,t/f
        self.keyMaskMap = {}            # Addressed by Key pattern: self.keyMaskMap[key] = True

        self.subClients = {}            # Addressed by client name
        self.aliasedClients = {}        # Addressed by alias
        self.aftnMap = {}               # Addressed by AFTN address. aftnMap['DEFAULT'] give the default
        self.goodClients = {}           # Sub group of clients that are in header2clients.conf and are px linkable.
        self.badClients = {}            # Sub group of clients that are in header2clients.conf and are not px linkable.
        self.pxLinkables = pxLinkables  # All clients to which px can link (independant of header2clients.conf)

        self.originalClients = {}       # Indexed by header, remove duplicate, no alias expansion, subclients as is.
                                        # Only used to reconstruct the routing file

        if self.logger:
            self.logger.info("Routing table used: %s" % filename)
Ejemplo n.º 9
0
 def __init__(self, filename, logger=None):
     FileParser.__init__(self, filename)  # Stations filename ("/apps/px/etc/stations.conf")
     self.logger = logger  # Logger Object
     self.stations = {}  # Indexed by header
     self.printErrors = True  #
     self.type = "INT"  # Must be in ['CA', 'US', 'COLL', 'INT']
     self.stationSearched = None  # Station for which we want to find the more recent occurence
Ejemplo n.º 10
0
    def __init__(self):
        self.dataAttr = [
            "Gender", "Age", "Student?", "PreviouslyDeclined?", "HairLength",
            "Employed?", "TypeOfColateral", "FirstLoan", "LifeInsurance"
        ]
        self.dataClasif = "Risk"
        self.posClassification = "high"
        self.negClassification = "low"
        self.matchAllChar = '?'

        self.hypothesis = ConjHypothesis(self.dataAttr, self.matchAllChar)
        self.printEvery = 30

        # files
        self.testFileName = sys.argv[1]
        self.devFileName = "9Cat-Dev.labeled"
        self.trainFileName = "9Cat-Train.labeled"
        self.outFileName = "partA4.txt"

        # parse data
        self.trainingData = FileParser(self.trainFileName,
                                       self.dataClasif).getOutputData()
        self.developmentData = FileParser(self.devFileName,
                                          self.dataClasif).getOutputData()
        self.testData = FileParser(self.testFileName, self.dataClasif,
                                   False).getOutputData()

        # run program
        self.printInitialization()
        with open(self.outFileName, 'w') as f:
            self.runTraining(f, self.trainingData)
        self.runTrial(self.developmentData)
        self.runClassification(self.testData)
Ejemplo n.º 11
0
def main(top_dir="", exclude_dirs=[]):
    """
    Run main program
    1) Use the FileScavenger.scavenge to get all dirs in top level
        excluding the exclude dirs
    2) Use FileParser.parseFileList to parse all the files in the project
        and return a list of objects
    3) Pass the object list to ObjectFormatter.resolveProjectHierarchy 
        to properly resolve the cpp objects and values
    4) Sort the returned object list using ObjectFormatter.sortObjectHierarchy
    5) Write the sorted object list to file
    """

    fScav = FileScavenger()
    file_list = fScav.scavenge(top_dir, exclude_dirs)

    fParser = FileParser()
    objList = fParser.parseFileList(file_list)

    objForm = ObjectFormatter()
    objList = objForm.resolveProjectHierarchy(objList)

    objList = objForm.sortObjectHierarchy(objList, 2)

    objForm.objListToFile(objList)
Ejemplo n.º 12
0
def extracte_vm(dir):
    for root, dirs, files in os.walk(dir):
        for name in files:
            if re.match(r'.*xml$', name):
                file_src = os.path.join(root, name)
                print(file_src)
                parser = FileParser(file_src)
                parser.generate_vm()
Ejemplo n.º 13
0
 def __init__(self, filename, logger=None):
     FileParser.__init__(self, filename) # Stations filename ("/apps/px/etc/stations.conf")
     self.logger = logger     # Logger Object
     self.headers = {}        # Indexed by station
     self.stations = {}       # Indexed by header
     self.stationsColl = {}   # Indexed by header (only the ones that have to be collected)
     self.printErrors = False # Print errors
     self.logErrors = True    # Log errors
Ejemplo n.º 14
0
def predict():
    fileParser = FileParser()
    table = fileParser.parse("predictor")
    maxSize = fileParser.getMax()
    #print(maxSize)
    epsilon = .6  #percent error
    dataContainer = []

    for i in range(0, maxSize):
        openValue = table.iloc[i, 0]
        closeValue = table.iloc[i, 1]
        percentDifference = (closeValue - openValue) / openValue * 100
        dataContainer.append(
            percentDifference
        )  #stores the percent difference between the closed and open value

    if DEBUG:
        print(table)

    accumMatch = 0
    accumIncrease = 0
    #averages the current 3 percent differences
    currentAvg = (dataContainer[maxSize - 1] + dataContainer[maxSize - 2] +
                  dataContainer[maxSize - 3]) / 3
    if DEBUG:
        print("\"\"\"DEBUG START\"\"\"")
        print("LAST THREE DIFFERENCE:")
        #print("1.", dataContainer[0])
        print("1.", dataContainer[maxSize - 1])
        print("2.", dataContainer[maxSize - 2])
        print("3.", dataContainer[maxSize - 3])
        print("CURRENT AVG:", currentAvg)

    for i in range(2, maxSize - 1):
        #gets the averages of all the past 3 percent differences and checks if the trend
        #is similar to the current average with margin of error (epsilon)
        pastAvg = (dataContainer[i] + dataContainer[i - 1] +
                   dataContainer[i - 2]) / 3
        if currentAvg - epsilon < pastAvg and currentAvg + epsilon > pastAvg:
            accumMatch += 1
            #within those similar trends, check how much of those events actually increased afterwards
            if dataContainer[i + 1] > 0:
                accumIncrease += 1
    #This means that there was no similar trends, then give a 50:50 prediction
    if accumMatch == 0:
        return (.5, .5)

    probabilityIncrease = accumIncrease / accumMatch
    probabilityDecrease = (accumMatch - accumIncrease) / accumMatch

    if DEBUG:
        print("ACCUM INCREASE:", accumIncrease)
        print("ACCUM MATCH:", accumMatch)
        print("PROBABILITY TO INCREASE:", probabilityIncrease)
        print("PROBABILITY TO DECREASE:", probabilityDecrease)
        print("\"\"\"DEBUG END\"\"\"")

    return (probabilityIncrease, probabilityDecrease)
Ejemplo n.º 15
0
 def __init__(self, filename, logger=None):
     FileParser.__init__(
         self, filename)  # Stations filename ("/apps/px/etc/stations.conf")
     self.logger = logger  # Logger Object
     self.headers = {}  # Indexed by station
     self.stations = {}  # Indexed by header
     self.stationsColl = {
     }  # Indexed by header (only the ones that have to be collected)
     self.printErrors = False  # Print errors
     self.logErrors = True  # Log errors
Ejemplo n.º 16
0
    def run(self):
        while (not self.quitSaveTheFile):
            fileParser = FileParser(self._queue)
            start_time = time.time()
            print("the Execution Time11:%s seconds " %
                  (time.time() - start_time))

            self.quitSaveTheFile = fileParser.FileParser()
            print("the Execution Time22:%s seconds " %
                  (time.time() - start_time))
Ejemplo n.º 17
0
 def parseDir(input):
     if (os.path.isdir(input)):
         return parseDir(input)
     filename, fileExt = os.path.splitext(input)
     init = InitParser()
     parsedObject = init.initialize(fileExt)
     parser = FileParser()
     newSloc = SlocObject()
     newSloc = parser.parse(parsedObject, input)
     Tracker.computeAggregates(newSloc)
Ejemplo n.º 18
0
 def run(self):
     while True:
          fileParser = FileParser()
          start_time = time.time()
          print("the Execution Time11:%s seconds " % (time.time() - start_time))
      
      
     
          fileParser.FileParser()
          print("the Execution Time22:%s seconds " % (time.time() - start_time))
Ejemplo n.º 19
0
 def __init__(self):
     self.file_parser = FileParser()
     self.file_parser.parse_teams()
     self.file_parser.parse_events()
     self.file_parser.parse_coaches()
     self.teams = self.file_parser.get_teams()
     self.events = self.file_parser.get_events_as_map()
     self.events_remaining = list(self.events.keys())
     self.coaches = self.file_parser.get_coaches_as_map()
     self.coaches_remaining = list(self.coaches.keys())
     self.graph = self.file_parser.create_graph()
Ejemplo n.º 20
0
 async def async_checkUsers(self):
     await asyncio.sleep(self.SNOOPSNOO_REFRESH_RATE)
     while not self.gracefulExit:
         startTime = time.time()
         wasChanged = False
         self.logger.info("Time to refresh SnoopSnoo for " +
                          str(len(self.redditCache[1])) + " users...")
         u = 0
         while u < len(self.redditCache[1]):
             usr = self.redditCache[1][u]
             res, usrobj = await SheriAPI.async_getUserInfo(usr)
             if res.find("ERROR") == 0 or res.find("EXCEPTION") == 0:
                 # See if the user exists, may be deleted or banned
                 ru = self.r.redditor(usr)
                 try:
                     if ru.id:
                         self.logger.warning("Error in SS refresh for '" +
                                             usr + "': " + res)
                         u += 1
                 except:
                     self.logger.warning(
                         "User likely deleted or suspended - '" + usr +
                         "': " + res)
                     self.redditCache[1].pop(u)
                     self.redditCache[2].pop(u)
             else:
                 if self.isQualifiedUser(usr, res):
                     wasChanged = True
                     self.logger.info("CONGRATULATIONS!! " + usr +
                                      " is qualified to join!")
                     # Add user to queue so discord side can announce it
                     self.acceptQueue.put(usr)
                     self.acceptedUsers.append(usr.lower())
                     self.redditCache[1].pop(u)
                     self.redditCache[2].pop(u)
                 else:
                     self.redditCache[2][u] = str(
                         int(self.redditCache[2][u]) - 1)
                     if self.redditCache[2][u] == "0":
                         self.redditCache[1].pop(u)
                         self.redditCache[2].pop(u)
                     else:
                         u += 1
         # Write the (hopefully changed) accepted users list
         if wasChanged:
             FileParser.writeList("acceptedusers.txt", self.acceptedUsers,
                                  'w')
         FileParser.writeNestedList("redditcache.txt", self.redditCache,
                                    'w')
         endTime = time.time()
         await asyncio.sleep(
             self.checkTime(self.SNOOPSNOO_REFRESH_RATE,
                            startTime - endTime, "async_checkUsers"))
Ejemplo n.º 21
0
async def on_message(message):
    global lastMessageFrom, messageComboBreak
    # ignore other bots I guess
    if not message.author.bot:
        # isinstance is poor form, but what're you going to do?
        if isinstance(message.channel, discord.DMChannel):
            if message.content[:6].lower() == "b.help":
                await message.channel.send("No need for that! Just write me a message and I'll forward it to the mods, and then they can reply through me! If you wish to remain anonymous for your message, make sure it begins with the word \"anon\" (without quotes!)")
                return
            if message.content[:4].lower() == "anon":
                # Keep users anonymous
                logger.info("Received message from Anonymous User ; " + str(message.id))
                # Can't do much here since the bot needs their discord ID to persist between restarts
                # and any obfuscation could eventually be cracked
                key = str(message.author.id) + "anon"
                auth = "Anonymous User"
                msg = message.content[4:]
            else:
                logger.info("Received message from " + str(message.author.id) + " ; " + str(message.id))
                key = str(message.author.id)
                auth = message.author.name+" ("+message.author.mention+")"
                msg = message.content
            if not (key in userMap[3]):
                if key in userMap[2]:
                    idx = userMap[2].index(key)
                else:
                    idx = len(userMap[2])
                    userMap[2].append(key)
                    FileParser.writeNestedList("usermap.txt", userMap, 'w')
                # skip header if no other messages were posted to the channel since last message
                # and the author of the previous message is the same
                skipheader = (key == lastMessageFrom) and (messageComboBreak == False)
                if skipheader:
                    mail = msg
                else:
                    mail = "From: "+auth+"\n(reply with `b.reply "+str(idx)+" \"message here\"`, mute with `b.mute "+str(idx)+"`)\n\n"+msg
                lastMessageFrom = key
                # Get attachments, put in the URLs, don't save anything unknown to the bot
                try:
                    for item in message.attachments:
                        mail += "\n" + item.url
                except:
                    logger.warning("Could not get URL for all attachments")
                mailchannel = bot.get_channel(findChannel(config['general']['dm_channel']))
                messageComboBreak = False
                await mailchannel.send(mail)
            else:
                await message.channel.send("You are currently muted, DM the mods directly to appeal your mute")
        else:
            mailchannel = bot.get_channel(findChannel(config['general']['dm_channel']))
            if (message.channel.id == mailchannel.id) and (message.author.id != bot.user.id):
                messageComboBreak = True
            await bot.process_commands(message)
Ejemplo n.º 22
0
async def me(ctx, redditname: str=None):
    """Associate a reddit username to your discord name"""
    if redditname is None:
        await ctx.send("You need to specify a reddit username,\neg. `b.ping me SimStart`")
    else:
        logger.info("b.ping me called: "+str(ctx.author.id)+" ; "+redditname)
        if not redditname.lower() in acceptedusers:
            await ctx.send("Couldn't find '"+redditname+"' in accepted users, are you sure their name is spelled correctly?")
        else:
            userMap[0].append(redditname.lower())
            userMap[1].append(str(ctx.author.id))
            FileParser.writeNestedList("usermap.txt", userMap, 'w')
            await ctx.send("Added "+fixUsername(redditname)+" to the ping list :thumbsup:")
Ejemplo n.º 23
0
async def user(ctx, member: discord.Member=None, redditname: str=None):
    """Associate a discord user to a reddit username (mods only)"""
    if member is None or redditname is None:
        await ctx.send("You need to specify both a Discord user and a reddit username,\neg. `b.ping user @SimStart SimStart`")
    else:
        logger.info("b.ping user called: "+str(member.id)+" ; "+redditname)
        if not redditname.lower() in acceptedusers:
            await ctx.send("Couldn't find '"+redditname+"' in accepted users, are you sure their name is spelled correctly?")
        else:
            userMap[0].append(redditname.lower())
            userMap[1].append(str(member.id))
            FileParser.writeNestedList("usermap.txt", userMap, 'w')
            await ctx.send("Added "+fixUsername(redditname)+" to the ping list :thumbsup:")
Ejemplo n.º 24
0
 def ShowFileContent(self):
     file_name = self.OpenFileDialog()
     logging.warning('FileName:{}'.format(file_name))
     date, magnitude = FileParser.GetMagnitudeTime(
         FileParser.Read4File(file_name[0]))
     qListP = []
     for i in range(len(date)):
         qListP.append(QPointF(i, magnitude[i]))
     series = QtCharts.QLineSeries()
     series.append(qListP)
     self.chartView.chart().removeAllSeries()
     self.chartView.chart().addSeries(series)
     self.chartView.chart().createDefaultAxes()
     self.chartView.show()
Ejemplo n.º 25
0
async def remove(ctx, redditname: str=None):
    """Remove all associations used for a reddit user (mods only)"""
    if redditname is None:
        await ctx.send("You need to specify a reddit username,\neg. `b.ping remove SimStart`")
    else:
        logger.info("b.ping remove called: "+redditname)
        num = 0
        while redditname.lower() in userMap[0]:
            idx = userMap[0].index(redditname.lower())
            userMap[0].pop(idx)
            userMap[1].pop(idx)
            num += 1
        FileParser.writeNestedList("usermap.txt", userMap, 'w')
        await ctx.send("Removed "+str(num)+" instances of "+fixUsername(redditname)+" from the ping list :thumbsup:")
Ejemplo n.º 26
0
 def __init__(self):
     self.parser = FileParser()
     self.persistent = {}
     self.timer = Timer()
     self.processes = {
         "chapters_books": {
             "process_data": "_process_data_chapters_books",
             "persistent_file": os.path.join(self.path,
                                             "chapters_books.pkl")
         },
         "chapters_all_scigraph_citations": {
             "process_data":
             "_process_data_chapters_all_scigraph_citations",
             "persistent_file":
             os.path.join(self.path, "chapters_all_scigraph_citations.pkl")
         },
         "chapters_confproc_scigraph_citations": {
             "process_data":
             "_process_data_chapters_confproc_scigraph_citations",
             "persistent_file":
             os.path.join(self.path,
                          "chapters_confproc_scigraph_citations.pkl")
         },
         "books_conferences": {
             "process_data": "_process_data_books_conferences",
             "persistent_file": os.path.join(self.path,
                                             "books_conferences.pkl")
         },
         "author_id_chapters": {
             "process_data":
             "_process_data_author_id_chapters",
             "persistent_file":
             os.path.join(self.path, "author_id_chapters.pkl")
         },
         "author_name_chapters": {
             "process_data":
             "_process_data_author_name_chapters",
             "persistent_file":
             os.path.join(self.path, "author_name_chapters.pkl")
         },
         "confproc_scigraph_citations_chapters": {
             "process_data":
             "_process_data_confproc_scigraph_citations_chapters",
             "persistent_file":
             os.path.join(self.path,
                          "confproc_scigraph_citations_chapters.pkl")
         }
     }
class SpiderMain(object):
    def __init__(self):
        self.manager = UrlManager()
        self.downloader = FileDownLoader()
        self.parser = FileParser()
        self.output = DataOutput()

    def crawl(self, root_files):

        for root_file in root_files:
            new_urls = self.parser.parser(root_file)
            self.manager.add_new_urls(new_urls)

            while (self.manager.has_new_url()):
                try:
                    new_url = self.manager.get_new_url()
                    data = self.downloader.download(new_url)
                    self.output.store_data(data, root_file, new_url)
                    print("已经抓取%s个链接" % self.manager.old_url_size())

                    interval = random.randint(1, 3)

                    time.sleep(interval)
                    print("sleep: %d" % interval)

                except Exception as err:
                    self.output.mark_result(root_file, new_url, False)
                    print("crawl faild:" + str(err))
Ejemplo n.º 28
0
class ParityResolver:

    with open('settings.json') as config:

        configuration = json.load(config)

        input_nodes_qty = 35
        hidden_nodes_qty = configuration['multilayer_hidden_nodes_parity']
        output_nodes_qty = 1
        lr = configuration['multilayer_lr']
        training_qty = configuration['multilayer_training_qty']
        max_training_epochs = configuration['multilayer_max_training_epochs']

        pb_entries = fp.mlp_entries_parser(7, 5)
        pb_targets = [('0'), ('1'), ('0'), ('1'), ('0'), ('1'), ('0'), ('1'),
                      ('0'), ('1')]
        mp = MultilayerPerceptron(pb_entries, pb_targets, input_nodes_qty,
                                  hidden_nodes_qty, output_nodes_qty,
                                  training_qty, lr, max_training_epochs)

        # for i in range(max_training_epochs):
        #     x = random.randint(0,training_qty)
        #     mp.train(np.matrix(pb_entries[x]).transpose(),np.matrix(pb_targets[x]).transpose())

        mp.train()
        # plot_entries = []
        for pb_e in pb_entries:
            # plot_entries.append(mp.feed_forward(np.matrix(pb_e).transpose()).item(0))
            print(mp.feed_forward(np.matrix(pb_e).transpose()))
Ejemplo n.º 29
0
class PerceptronSimpleMain:

    with open('settings.json') as config:

        configuration = json.load(config)

        learning_rate = float(configuration["learning_rate"])
        operation = str(configuration["operation"]).upper()
        steps = int(configuration["steps"])
        isLinear = str(configuration["isLinear"]).upper() == "TRUE"
        betha = float(configuration["betha"])

        entries = [[-1, 1], [1, -1], [-1, -1], [1, 1]]
        if operation and (operation == "XOR" or operation == "OR"
                          or operation == "AND"):
            if operation == "XOR":
                output = [1, 1, -1, -1]
            elif operation == "OR":
                output = [1, 1, -1, 1]
            elif operation == "AND":
                output = [-1, -1, -1, 1]
            SimplePerceptron(learning_rate, entries, output, steps).perform()
        else:
            [data, test_data, min_value, max_value] = fp.data_parser()
            spl = SimplePerceptronEJ2(learning_rate, data, test_data,
                                      max_value, min_value, steps, betha,
                                      isLinear)
            spl.perform()
Ejemplo n.º 30
0
def functionality(my_option):
    while True:
        if my_option.lower() == 'file':
            dir = input("Enter directory and name of your file.")
            opt = input(
                "Type 's' if you want only to save your file without displaying. "
                "Type whatever if you just want to display the tree.")
            p = FileParser(dir, opt)
            my_option = ""
        elif my_option.lower() == 'console':
            o = input("Type 'p' for primitive console tree, type everything else for normal tree.")
            newick = input("Please enter your input in newick format.")
            p = Parser(newick, o)
            my_option = ""
        elif my_option.lower() == 'help':
            print("- Type 'file' if you want to import tree in newick format from file. "
                  "Then, after pressing enter, you will have to provide the path. \n"
                  "- Type 'console' if you want to paste it to console.\n"
                  "- Type 'r' if you want to generate random tree.\n"
                  "- Press 'q' for exit.\n"
                  "For more information read README.md.")
            my_option = ""
        elif my_option.lower() == 'r':
            opt = input(
                "Type 's' if you want only to save your file without displaying. "
                "Type whatever if you just want to display the tree.")
            p = RandomTree(opt)
            my_option = ""
        elif my_option.lower() == 'q':
            sys.exit()
        else:
            print("Need help? Type 'help'.")
            my_option = input()
            functionality(my_option)
Ejemplo n.º 31
0
def main():
    command_line_arguments = sys.argv

    if has_valid_command_line_arguments(command_line_arguments):
        file_name = get_file_name(command_line_arguments)
        file_path = get_file_path(FILE_PATH_CSV, file_name)

        logging.info("Parsing file...")
        file_parser = FileParser()
        file_content = file_parser.parse(file_path)

        sentiment_score_provider = SentimentScoreProvider()
        translate_provider = TranslateProvider()

        logging.info("Getting and appending translations and sentiment score")
        number_of_requests = 0
        number_of_rows = len(file_content)

        for row in file_content:
            if number_of_requests > 0 and number_of_requests % MAX_NUMBER_OF_REQUESTS == 0:
                logging.info('Waiting %s seconds before sending more requests',
                             DELAY_SECONDS_BETWEEN_REQUEST_LIMIT)
                time.sleep(DELAY_SECONDS_BETWEEN_REQUEST_LIMIT)

            text_answer = row[COLUMN_TO_ANALYZE]
            translated_text_answer = ""
            sentiment_score = {'magnitude': "", 'score': ""}

            if text_answer:
                translated_text_answer = translate_provider.translate(
                    TRANSLATION_TARGET_LANGUAGE, text_answer)
                sentiment_score = sentiment_score_provider.analyze(
                    translated_text_answer)

            row['sentiment_magnitude'] = sentiment_score['magnitude']
            row['sentiment_score'] = sentiment_score['score']
            row['translated_' + COLUMN_TO_ANALYZE] = translated_text_answer
            logging.info("Analyzing line %s/%s", str(number_of_requests),
                         str(number_of_rows))
            number_of_requests += 1

        logging.info("Writing new csv file to results folder %s...", file_name)
        result_file_path = get_file_path(FILE_PATH_RESULTS, file_name)
        file_parser.write(result_file_path, file_content)
        logging.info("Done")
    else:
        sys.exit(1)
Ejemplo n.º 32
0
async def mute(ctx, *args):
    """Mute all modmail DMs from a user (mods only)"""
    if len(args) < 1:
        await ctx.send("You need to specify an ID!\neg. `b.mute 0`")
    else:
        logger.info("b.mute called by " + str(ctx.author.id) + " ; " + args[0])
        try:
            idx = int(args[0])
        except:
            await ctx.send(args[0] + " is not a valid index! try again")
            return
        if len(userMap[2]) <= idx:
            await ctx.send(args[0] + " is not a valid index! try again")
        else:
            id = userMap[2][idx]
            userMap[3].append(id)
            FileParser.writeNestedList("usermap.txt", userMap, 'w')
            await ctx.send("Ignoring DMs from User ID " + str(idx) + ":thumbsup:")
    def run(self):

        fileParser = FileParser()
        start_time = time.time()
        print("the Execution Time11:%s seconds " % (time.time() - start_time))

        fileParser.FileParser()
        print("the Execution Time22:%s seconds " % (time.time() - start_time))
        """
        while True:
            if not q.full():
                item = random.randint(1,10)
                q.put(item)
                logging.debug('Putting ' + str(item)  
                              + ' : ' + str(q.qsize()) + ' items in queue')
                time.sleep(random.random())
        """
        return
Ejemplo n.º 34
0
    def _getFilesToParse(self, root, headers, excludedSources=None):
        """
        Given a root path (ex: PXPaths.DB + date + '/SA/') and a list of 
        headers (ex: ['SAAK31 KWBC', 'SAAK41 KNKA', 'SAUS20 KNKA', 'SAUS70 KWBC']),
        find the list of files matching these criterias.
        """
        filesToParse = [] 
        
        if headers == ['']:
            pass
        else:
            centers = FileParser.removeDuplicate([header.split()[1] for header in headers])
    
            # Request SA PATQ 
            # => headers = ['SAAK31 KWBC', 'SAAK41 KNKA', 'SAUS20 KNKA', 'SAUS70 KWBC']
            # => ttaaiis = {'KNKA': ['SAAK41', 'SAUS20'], 'KWBC': ['SAAK31', 'SAUS70']}
            ttaaiis = {}    
            for header in headers:
               ttaaiis.setdefault(header.split()[1], []).append(header.split()[0])
    
            try:
                if not excludedSources: excludedSources = []
                sources = os.listdir(root)

                for source in excludedSources:
                    if source in sources:
                        sources.remove(source)

            except:
                (type, value, tb) = sys.exc_info()
                if self.printout:
                    print("Type: %s, Value: %s" % (type, value))
                return filesToParse
    
            #print("Headers: %s" % headers)
            #print("ttaaiis: %s" % ttaaiis)
            #print("centers: %s" % centers)
            #print("sources: %s\n" % sources)
    
            for source in sources:
                for center in centers:    
                    pathToCenter = root + source + '/' + center
                    try:
                        for file in os.listdir(pathToCenter):
                            for ttaaii in ttaaiis[center]:
                                if file[:len(ttaaii)] == ttaaii:
                                    filesToParse.append(pathToCenter + '/' + file)
                                    break
                    except:
                        (type, value, tb) = sys.exc_info()
                        if self.printout:
                            if self.debug: print("Type: %s, Value: %s" % (type, value))
                        continue
    
            #print ("len(filesToParse) = %d\n" % len(filesToParse))

        return filesToParse
Ejemplo n.º 35
0
#-*- coding: utf-8 -*-
import sys
sys.path.append("../")
from FileParser import FileParser
import threading
SUCCESS = 0
FAIL = 1

if __name__ == "__main__":
   #from audio import Audio
   #play = Audio()
   _files = sys.argv[1:] 
   if not _files:
       print "需要执行命令\"python %s 1.pcm\"" % __file__
       sys.exit(1)
   stream_test = FileParser()
   for _file in _files:
       if stream_test.read_file(_file) == SUCCESS:
           print _file
   
   t = threading.Thread(target=stream_test.run)
   #t = threading.Thread(target=stream_test.run, kwargs={"fun":play.play_stream})
   #t.setDaemon(True)
   t.start()
   t.join()