Esempio n. 1
0
 def on_message_handler(self, instmt, message):
     """
     Incoming message handler
     :param instmt: Instrument
     :param message: Message
     """
     keys = message.keys()
     if 'info' in keys:
         Logger.info(self.__class__.__name__, message['info'])
     elif 'subscribe' in keys:
         Logger.info(self.__class__.__name__, 'Subscription of %s is %s' % \
                     (message['request']['args'], \
                      'successful' if message['success'] else 'failed'))
     elif 'table' in keys:
         if message['table'] == 'trade':
             for trade_raw in message['data']:
                 if trade_raw["symbol"] == instmt.get_instmt_code():
                     # Filter out the initial subscriptions
                     trade = self.api_socket.parse_trade(instmt, trade_raw)
                     if trade.trade_id != instmt.get_exch_trade_id():
                         instmt.incr_trade_id()
                         instmt.set_exch_trade_id(trade.trade_id)
                         self.insert_trade(instmt, trade)
         elif message['table'] == 'orderBookL2':
             l2_depth = self.api_socket.parse_l2_depth(instmt, message)
             if l2_depth is not None and l2_depth.is_diff(instmt.get_l2_depth()):
                 instmt.set_prev_l2_depth(instmt.get_l2_depth())
                 instmt.set_l2_depth(l2_depth)
                 instmt.incr_order_book_id()
                 self.insert_order_book(instmt)
         else:
             Logger.info(self.__class__.__name__, json.dumps(message,indent=2))
     else:
         Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
    def get_order_book_worker(self, instmt):
        """
        Get order book worker
        :param instmt: Instrument
        """
        ExchGwQuoine.num_of_connections_lock.acquire()
        ExchGwQuoine.num_of_connections += 1
        Logger.info(
            self.__class__.__name__, "Current number of connections = %d" %
            ExchGwQuoine.num_of_connections)
        ExchGwQuoine.num_of_connections_lock.release()
        instmt.set_order_book_id(self.get_order_book_init(instmt))

        while True:
            try:
                l2_depth = self.api_socket.get_order_book(instmt)
                if l2_depth is not None and l2_depth.is_diff(
                        instmt.get_l2_depth()):
                    instmt.set_prev_l2_depth(instmt.get_l2_depth())
                    instmt.set_l2_depth(l2_depth)
                    instmt.incr_order_book_id()
                    self.insert_order_book(instmt)
            except Exception as e:
                Logger.error(self.__class__.__name__,
                             "Error in order book: %s" % e)
            ExchGwQuoine.num_of_connections_lock.acquire()
            time.sleep(ExchGwQuoine.num_of_connections +
                       ExchGwQuoine.extra_waiting_sec)
            ExchGwQuoine.num_of_connections_lock.release()
Esempio n. 3
0
    def get_trades_worker(self, instmt):
        """
        Get order book worker thread
        :param instmt: Instrument name
        """
        trade_id, last_exch_trade_id = self.get_trades_init(instmt)
        instmt.set_trade_id(trade_id)
        instmt.set_exch_trade_id(last_exch_trade_id)

        while True:
            try:
                ret = self.api_socket.get_trades(instmt)
                for trade in ret:
                    instmt.incr_trade_id()
                    self.insert_trade(instmt, trade)

                # After the first time of getting the trade, indicate the instrument
                # is recovered
                if not instmt.get_recovered():
                    instmt.set_recovered(True)

            except Exception as e:
                Logger.error(self.__class__.__name__,
                             "Error in trades: %s\nReturn: %s" % (e, ret))
            time.sleep(0.5)
 def on_message_handler(self, instmt, message):
     """
     Incoming message handler
     :param instmt: Instrument
     :param message: Message
     """
     if 'ping' in message:
         #handle ping response
         ts = message['ping']
         self.api_socket.send(json.dumps({'pong': ts}))
     elif 'ch' in message:
         if 'trade.detail' in message['ch']:
             trades = self.api_socket.parse_trade(instmt,
                                                  message['tick']['data'])
             for trade in trades:
                 if trade.trade_id != instmt.get_exch_trade_id():
                     instmt.incr_trade_id()
                     instmt.set_exch_trade_id(trade.trade_id)
                     self.insert_trade(instmt, trade)
         elif 'depth.step' in message['ch']:
             instmt.set_prev_l2_depth(instmt.get_l2_depth().copy())
             self.api_socket.parse_l2_depth(instmt, message['tick'])
             if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()):
                 instmt.incr_order_book_id()
                 self.insert_order_book(instmt)
         else:
             Logger.error(self.__class__.__name__, 'Not Trade or Market')
Esempio n. 5
0
    def create_output_dir():
        """Cria a pasta e os arquivos de saídas '.csv' (datasets)."""
        try:
            # se o diretório de saída existir, apaga seu conteúdo
            if os.path.exists(CSVParser.__output_dir):
                shutil.rmtree(CSVParser.__output_dir)

            # cria o diretório de saída
            os.mkdir(CSVParser.__output_dir)

            # cria todos os arquivos de saída '.csv' (datasets)
            CSVParser.__create_csv_file(CSVParser.__periodos_csv,
                                        Periodo.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__turmas_csv,
                                        Turma.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__atividades_csv,
                                        Atividade.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__estudantes_csv,
                                        Estudante.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__execucoes_csv,
                                        Execucao.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__solucoes_csv,
                                        Solucao.get_csv_header())
            CSVParser.__create_csv_file(CSVParser.__erros_csv,
                                        Erro.get_csv_header())
        except OSError:
            Logger.error('Erro ao criar diretório de saída!')
Esempio n. 6
0
def start(dog_id, level):
    if level < 1 or level > 4:
        Logger.error("You have entered an invalid level for Phase 2.")
        return
    Logger.info("Phase Two: Starting")
    if level == 1:
        if level_one(dog_id):
            Logger.info("Phase Two: Complete")
        else:
            Logger.warning("Phase Two: Failed")
    elif level == 2:
        if level_two(dog_id):
            Logger.info("Phase Two: Complete")
        else:
            Logger.warning("Phase Two: Failed")
    elif level == 3:
        if level_three(dog_id):
            Logger.info("Phase Two: Complete")
        else:
            Logger.warning("Phase Two: Failed")
    elif level == 4:
        if level_four(dog_id):
            Logger.info("Phase Two: Complete")
        else:
            Logger.warning("Phase Two: Failed")
Esempio n. 7
0
    def get_trades_worker(self, instmt):
        """
        Get order book worker thread
        :param instmt: Instrument name
        """
        trade_id, exch_trade_id = self.get_trades_init(instmt)
        instmt.set_trade_id(trade_id)
        instmt.set_exch_trade_id(exch_trade_id)

        while True:
            try:
                ret = self.api_socket.get_trades(instmt)
                if ret is None or len(ret) == 0:
                    time.sleep(1)
                    continue
            except Exception as e:
                Logger.error(self.__class__.__name__, "Error in trades: %s" % e)                
                
            for trade in ret:
                assert isinstance(trade.trade_id, str), "trade.trade_id(%s) = %s" % (type(trade.trade_id), trade.trade_id)
                assert isinstance(instmt.get_exch_trade_id(), str), \
                       "instmt.get_exch_trade_id()(%s) = %s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())
                if int(trade.trade_id) > int(instmt.get_exch_trade_id()):
                    instmt.set_exch_trade_id(trade.trade_id)
                    instmt.incr_trade_id()
                    self.insert_trade(instmt, trade)
            
            # After the first time of getting the trade, indicate the instrument
            # is recovered
            if not instmt.get_recovered():
                instmt.set_recovered(True)

            time.sleep(1)
 def get_order_book_worker(self, instmt):
     """
     Get order book worker
     :param instmt: Instrument
     """
     while True:
         ExchGwQuoine.last_query_time_lock.acquire()
         if datetime.now() - ExchGwQuoine.last_query_time < timedelta(
                 seconds=ExchGwQuoine.waiting_seconds):
             ExchGwQuoine.last_query_time_lock.release()
             time.sleep(random.uniform(0, 1))
         else:
             ExchGwQuoine.last_query_time = datetime.now()
             try:
                 l2_depth = self.api_socket.get_order_book(instmt)
                 if l2_depth is not None and l2_depth.is_diff(
                         instmt.get_l2_depth()):
                     instmt.set_prev_l2_depth(instmt.get_l2_depth())
                     instmt.set_l2_depth(l2_depth)
                     instmt.incr_order_book_id()
                     self.insert_order_book(instmt)
             except Exception as e:
                 Logger.error(self.__class__.__name__,
                              "Error in order book: %s" % e)
             ExchGwQuoine.last_query_time_lock.release()
Esempio n. 9
0
    def create(self, table, columns, types, primary_key_index=[], is_ifnotexists=True):
        """
        Create table in the database.
        Caveat - Assign the first few column as the keys!!!
        :param table: Table name
        :param columns: Column array
        :param types: Type array
        :param is_ifnotexists: Create table if not exists keyword
        """
        if len(columns) != len(types):
            raise Exception("Incorrect create statement. Number of columns and that of types are different.\n%s\n%s" % \
                (columns, types))

        if is_ifnotexists:
            ret = self.conn("\\v")
            if ret is not None:
                for t in ret:
                    if table == self.decode_qtypes(t):
                        Logger.info(self.__class__.__name__, "Table %s has been created." % table)
                        return True
                        
            Logger.info(self.__class__.__name__, "Table %s is going to be created." % table)
            
        c = columns[:]
        
        for i in range(0, len(types)):
            t = self.convert_type(types[i])
            if t is str:
                if columns[i].find('date_time') > -1:
                    c[i] += ":`timestamp$()"
                else:
                    c[i] += ":`symbol$()"
            elif t is float:
                c[i] += ":`float$()"
            elif t is int:
                c[i] += ":`int$()"
        
        keys = []
        for i in primary_key_index:
            keys.append(c[i])
        
        for i in sorted(primary_key_index, reverse=True):
            del c[i]
        
        if len(keys) > 0:
            command = '%s:([%s] %s)' % (table, '; '.join(keys), '; '.join(c))
        else:
            command = '%s:(%s)' % (table, '; '.join(c))

        self.lock.acquire()
        try:
            self.conn.sync(command)
        except Exception as e:
            Logger.error(self.__class__.__name__, "Error in creat statement(%s).\n%s" % (command, e))
        finally:
            self.lock.release()
        
        return True
Esempio n. 10
0
def start(dog_id, level):
    if level != 1:
        Logger.error("You have entered an invalid level for Phase One.")
        return
    Logger.info("Phase One: Starting")
    if level_one(dog_id):
        Logger.info("Phase One: Complete")
    else:
        Logger.warning("Phase One: Failed")
def writeAllPaths(pathModel, outputFileName):
    Logger.info("-----Write File Started-----")
    Logger.info("Writing file :" + str(outputFileName))
    try:
        with open(outputFileName, 'wb') as fp:
            pickle.dump(pathModel, fp)
        Logger.info("-----Write File Complete-----")
    except OSError as err:
        Logger.error("OS error: %s" % err.message)
    except Exception as exp:
        Logger.error("exception: %s" % exp.message)
Esempio n. 12
0
 def get_order_book(cls, instmt):
     """
     Get order book
     :param instmt: Instrument
     :return: Object L2Depth
     """
     res = cls.request(cls.get_order_book_link(instmt))
     if len(res) > 0 and 'error' in res and len(res['error']) == 0:
         res = list(res['result'].values())[0]
         return cls.parse_l2_depth(instmt=instmt, raw=res)
     else:
         Logger.error(cls.__name__,
                      "Cannot parse the order book. Return:\n%s" % res)
         return None
def phase_three(dog_id,
                level,
                quiet_length,
                fail_max,
                treat_frequency_min,
                treat_frequency_max=-1):
    if treat_frequency_max == -1:
        treat_frequency_max = treat_frequency_min

    quiet_timer = 0
    treat_timer = 0
    treat_dispense_time = 0
    fail_count = 0
    # Reset quiet status
    NoiseUtil.reset_bark_status()
    while quiet_timer < quiet_length and fail_count < fail_max:
        # Wait 1 second
        time.sleep(1)

        if not Config.RUN_FLAG:
            Logger.data(dog_id, 3, level, "cancelled")
            return False

        if treat_timer == 0:
            treat_dispense_time = random.randint(treat_frequency_min,
                                                 treat_frequency_max)

        if NoiseUtil.has_dog_barked:
            Logger.info("Dog just barked, incrementing fail count.")
            Logger.data(dog_id, 3, level, "dog_bark")
            fail_count += 1
            quiet_timer = 0
            NoiseUtil.reset_bark_status()
            continue
        else:
            quiet_timer += 1
            treat_timer += 1

        # Check if a treat should be dispensed
        if treat_timer == treat_dispense_time:
            PetTutorUtil.dispense_treat()
            treat_timer = 0
            fail_count = 0

    if quiet_timer >= quiet_length:  # Dog passed the Challenge.
        return True
    if fail_count >= fail_max:  # Dog has failed the challenge
        return False

    Logger.error("[Quiet Challenge] CODE SHOULD NEVER GET HERE!")
Esempio n. 14
0
    def on_message_handler(self, instmt, message):
        """
        Incoming message handler
        :param instmt: Instrument
        :param message: Message
        """

        if not message:
            return

        keys = message.keys()

        if "bids" in keys:
            self.order_book = self.api_socket.parse_l2_depth(instmt, message)
            # Insert only if the first 5 levels are different
            if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()):
                instmt.incr_order_book_id()
                self.insert_order_book(instmt)

        elif "create_update" in keys:
            if message['create_update']:
                message['create_update'].update({"timestamp": message['timestamp']})
                self.api_socket.parse_l2_depth(instmt, message['create_update'])
                # Insert only if the first 5 levels are different
                if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()):
                    instmt.incr_order_book_id()
                    self.insert_order_book(instmt)

            elif message['delete_update']:
                message['delete_update'].update({"timestamp": message['timestamp']})
                self.api_socket.parse_l2_depth(instmt, message['delete_update'])
                # Insert only if the first 5 levels are different
                if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()):
                    instmt.incr_order_book_id()
                    self.insert_order_book(instmt)

            elif message['trade_updates']:
                for new_trade in message['trade_updates']:
                    new_trade.update({"timestamp": message['timestamp']})
                    trade = self.api_socket.parse_trade(instmt, new_trade)
                    self.api_socket.parse_l2_depth(instmt, new_trade)
                    if trade.trade_id != instmt.get_exch_trade_id():
                        instmt.incr_trade_id()
                        instmt.set_exch_trade_id(trade.trade_id)
                        self.insert_trade(instmt, trade)

        else:
            Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
 def get_order_book_worker(self, instmt):
     """
     Get order book worker
     :param instmt: Instrument
     """
     while True:
         try:
             l2_depth = self.api_socket.get_order_book(instmt)
             if l2_depth is not None and l2_depth.is_diff(instmt.get_l2_depth()):
                 instmt.set_prev_l2_depth(instmt.get_l2_depth())
                 instmt.set_l2_depth(l2_depth)
                 instmt.incr_order_book_id()
                 self.insert_order_book(instmt)
         except Exception as e:
             Logger.error(self.__class__.__name__, "Error in order book: %s" % e)
         time.sleep(1)
def start(dog_id, level):
    if str(level) not in ('1', '2', '1D', '2D', '3D', '4D', '5D', '6D', '7D',
                          '8D', '9D'):
        Logger.error("You have entered an invalid level for Phase Five.")
        return
    Logger.info("Phase Five: Starting.")
    # Start mic recording
    NoiseUtil.record_bark_status()
    MoveUtil.record_down_status()

    if str(level) in ('1', '2'):
        if str(level) == '1':
            dog_standing(dog_id, 1)
        elif str(level) == '2':
            dog_standing(dog_id, 2)
    else:
        dog_down(dog_id, level)
    def get_trades_worker(self, instmt):
        """
        Get order book worker thread
        :param instmt: Instrument name
        """
        ExchGwQuoine.num_of_connections_lock.acquire()
        ExchGwQuoine.num_of_connections += 1
        Logger.info(
            self.__class__.__name__, "Current number of connections = %d" %
            ExchGwQuoine.num_of_connections)
        ExchGwQuoine.num_of_connections_lock.release()
        trade_id, exch_trade_id = self.get_trades_init(instmt)
        instmt.set_trade_id(trade_id)
        instmt.set_exch_trade_id(exch_trade_id)

        while True:
            try:
                ret = self.api_socket.get_trades(instmt)
                if ret is None or len(ret) == 0:
                    time.sleep(1)
                    continue
            except Exception as e:
                Logger.error(self.__class__.__name__,
                             "Error in trades: %s" % e)

            for trade in ret:
                assert isinstance(trade.trade_id,
                                  str), "trade.trade_id(%s) = %s" % (type(
                                      trade.trade_id), trade.trade_id)
                assert isinstance(instmt.get_exch_trade_id(), str), \
                       "instmt.get_exch_trade_id()(%s) = %s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())
                if int(trade.trade_id) > int(instmt.get_exch_trade_id()):
                    instmt.set_exch_trade_id(trade.trade_id)
                    instmt.incr_trade_id()
                    self.insert_trade(instmt, trade)

            # After the first time of getting the trade, indicate the instrument
            # is recovered
            if not instmt.get_recovered():
                instmt.set_recovered(True)

            ExchGwQuoine.num_of_connections_lock.acquire()
            time.sleep(ExchGwQuoine.num_of_connections +
                       ExchGwQuoine.extra_waiting_sec)
            ExchGwQuoine.num_of_connections_lock.release()
Esempio n. 18
0
def start(dog_id, level):
    if level < 2 or level > 6:
        Logger.error("You have entered an invalid level for Phase Three.")
        return
    Logger.info("Phase Three: Starting.")
    # Start mic recording
    NoiseUtil.record_bark_status()

    #    if level == 1:
    #        if level_one(dog_id):
    #            Logger.info("Phase Three: Complete.")
    #            return
    #        else:
    #            Logger.warning("Phase Three: Failed")
    if level == 2:
        if level_two(dog_id):
            Logger.info("Phase Three: Complete.")
            return
        else:
            Logger.warning("Phase Three: Failed")
    elif level == 3:
        if level_three(dog_id):
            Logger.info("Phase Three: Complete.")
            return
        else:
            Logger.warning("Phase Three: Failed")
    elif level == 4:
        if level_four(dog_id):
            Logger.info("Phase Three: Complete.")
            return
        else:
            Logger.warning("Phase Three: Failed")
    elif level == 5:
        if level_five(dog_id):
            Logger.info("Phase Three: Complete.")
            return
        else:
            Logger.warning("Phase Three: Failed")
    elif level == 6:
        if level_six(dog_id):
            Logger.info("Phase Three: Complete.")
            return
        else:
            Logger.warning("Phase Three: Failed")
    def get_trades_worker(self, instmt):
        """
        Get order book worker thread
        :param instmt: Instrument name
        """
        while True:
            ExchGwQuoine.last_query_time_lock.acquire()
            if datetime.now() - ExchGwQuoine.last_query_time < timedelta(
                    seconds=ExchGwQuoine.waiting_seconds):
                ExchGwQuoine.last_query_time_lock.release()
                time.sleep(random.uniform(0, 1))
            else:
                ExchGwQuoine.last_query_time = datetime.now()
                try:
                    ret = self.api_socket.get_trades(instmt)
                    if ret is None or len(ret) == 0:
                        ExchGwQuoine.last_query_time_lock.release()
                        continue

                    for trade in ret:
                        assert isinstance(
                            trade.trade_id,
                            str), "trade.trade_id(%s) = %s" % (type(
                                trade.trade_id), trade.trade_id)
                        assert isinstance(instmt.get_exch_trade_id(), str), \
                            "instmt.get_exch_trade_id()(%s) = %s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())
                        if int(trade.trade_id) > int(
                                instmt.get_exch_trade_id()):
                            instmt.set_exch_trade_id(trade.trade_id)
                            instmt.incr_trade_id()
                            self.insert_trade(instmt, trade)

                    # After the first time of getting the trade, indicate the instrument
                    # is recovered
                    if not instmt.get_recovered():
                        instmt.set_recovered(True)

                except Exception as e:
                    Logger.error(self.__class__.__name__,
                                 "Error in trades: %s" % e)

                ExchGwQuoine.last_query_time_lock.release()
def tracePath(fileName):
    Logger.info("-----Read File Started-----")
    Logger.info("Reading File :" + str(fileName))
    try:
        with open(fileName, 'rb') as fp:
            pathModel = pickle.load(fp)
            outputImage = numpy.full((int(pathModel.height), int(pathModel.width), 3), dtype='uint8',
                                     fill_value=[255, 255, 255])
            for currentList in pathModel.tracks:
                i = (random.random() * 1000) % 255
                j = (random.random() * 1000) % 255
                k = (random.random() * 1000) % 255
                for points in currentList:
                    cv2.circle(outputImage, points[0], 3, color=(i, j, k))
                    cv2.imshow("output", outputImage)
                    k = cv2.waitKey(5) & 0xff
                    if k == 27:
                        break
        cv2.destroyAllWindows()
        Logger.info("-----Read File Complete-----")
    except OSError as err:
        Logger.error("OS error: %s" % err.message)
    except Exception as exp:
        Logger.error("exception: %s" % exp.message)
Esempio n. 21
0
if __name__ == "__main__":
    log.info("Starting contact management process")
    Management().create_db_table()
    parser = argparse.ArgumentParser(
        prog='contact_book',
        description=
        'contact book management for storing, deleting and searching address book',
        usage='%(prog)s path')
    parser.add_argument('action',
                        type=str,
                        choices=['save', 'delete', 'edit', 'search'],
                        help='Select one of the actions '
                        'you want to perform')
    args = parser.parse_args()

    if args.action == 'save':
        save_info()
    elif args.action == 'delete':
        delete_info()
    elif args.action == 'edit':
        edit_info()
    elif args.action == 'search':
        search_info()
    else:
        log.error(
            "Action Value can only be one of this: ['save', 'delete', 'edit', 'search']"
        )
        raise SystemExit(2)
    args = parser.parse_args()
Esempio n. 22
0
def main():
    Logger.open_log_files()  # This must be run before any logging is done
    Logger.info("P4E Training Protocol is Starting...")

    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGTERM, handler)

    # Read in Arguments
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--debug',
        default='False',
        dest='debug',
        help="Boolean whether debug is on or off. Default False",
        type=argbool)
    parser.add_argument('--com',
                        default=Config.PETTUTOR_COMPORT,
                        dest='comport',
                        help="COM port to use. Default is %s" %
                        Config.PETTUTOR_COMPORT,
                        type=int)
    parser.add_argument(
        '--wait',
        default=Config.PETTUTOR_ERROR_WAIT_INTERVAL,
        dest='error_wait',
        help=
        "Number of seconds to wait if connection fails before retrying. -1 to not retry. Default is %s"
        % Config.PETTUTOR_ERROR_WAIT_INTERVAL,
        type=int)
    parser.add_argument(
        '--mic',
        default=Config.MIC_SENSITIVITY,
        dest='mic',
        type=float,
        help="Number in decibels for dog barking sensitivity. Default is %s" %
        Config.MIC_SENSITIVITY)
    parser.add_argument('--camera',
                        default=Config.CAMERA_ID,
                        dest='camera',
                        type=int,
                        help="Camera ID, generally 0 or 1. Default is %s" %
                        Config.CAMERA_ID)
    parser.add_argument(
        '--disable-pettutor',
        default='False',
        dest='disable_pettutor',
        help="True to disable connection to PetTutor (testing purposes only)",
        type=argbool)

    # Set values
    argvalues = parser.parse_args()
    Config.PRINT_DEBUG = argvalues.debug
    Config.PETTUTOR_COMPORT = argvalues.comport
    Config.PETTUTOR_ERROR_WAIT_INTERVAL = argvalues.error_wait
    Config.MIC_SENSITIVITY = argvalues.mic
    Config.CAMERA_ID = argvalues.camera
    Config.DISABLE_PETTUTOR = argvalues.disable_pettutor

    # Log Configuration
    Logger.debug("Debug is %s" % Config.PRINT_DEBUG)
    Logger.debug("PetTutor Comport: %s" % Config.PETTUTOR_COMPORT)
    Logger.debug("PetTutor Wait Time: %s" %
                 Config.PETTUTOR_ERROR_WAIT_INTERVAL)
    Logger.debug("Mic Sensitivity: %s" % Config.MIC_SENSITIVITY)

    # Connect to PetTutor
    PetTutorUtil.connect(Config.PETTUTOR_COMPORT)

    while True:
        Logger.prompt("Dog ID (-1 to Quit): ")
        dog_id = raw_input()
        Logger.debug("Dog ID is %s" % dog_id)

        if dog_id == '-1':
            break

        if re.match('^[\w-]+$', dog_id) is None:
            Logger.error(
                "'%s' is not a valid dog ID. Please only use alphanumeric characters."
                % dog_id)
            continue

        Config.RUN_FLAG = True

        data = FileIOUtil.load(dog_id)

        if data.get('dog_size') is None:
            Logger.prompt("Please enter dog's size (small/medium/large): ")
            dog_size = raw_input()
            Logger.debug("Dog Size is %s " % dog_size)
            if dog_size.lower() in ("small", "s", "0"):
                dog_size = 0
                FileIOUtil.save_size(dog_id, 0)
            elif dog_size.lower() in ("medium", "m", "1"):
                dog_size = 1
                FileIOUtil.save_size(dog_id, 1)
            elif dog_size.lower() in ("large", "l", "2"):
                dog_size = 2
                FileIOUtil.save_size(dog_id, 2)
            else:
                Logger.error(
                    "'%s' did not match expected input (small/medium/large)" %
                    dog_size)
                continue
        else:
            dog_size = data["dog_size"]

        (phase, level) = find_next_level(data)
        input_required = False

        if phase != -1 and level != -1:
            Logger.info(
                "Dog %s's first incomplete level is Phase %s Level %s." %
                (dog_id, phase, level))
        else:
            Logger.info("Dog %s has completed all Phases/Levels." % dog_id)
            input_required = True

        if not input_required:
            Logger.prompt("Continue? (Y/n): ")
            input_required = raw_input().lower() == 'n'
            Logger.debug("Continue is %s" % (not input_required))

        if input_required:
            Logger.info("Please specify which Phase you want to run.")
            Logger.prompt("Enter a Phase Number (-1 to go back): ")
            try:
                phase = int(raw_input())
            except ValueError:
                Logger.error("Phases can only be numbers.")
                continue
            Logger.debug("Selected phase is %d" % phase)
            if phase == -1:
                continue
            Logger.prompt("Enter a Level Number (-1 to go back): ")
            level = raw_input()
            Logger.debug("Selected level is %s" % level)
            if level == '-1':
                continue

        keyboard_thread = Thread(target=keyboard_listener)
        keyboard_thread.start()

        # Start a phase.
        if phase == 1:
            PhaseOne.start(dog_id, int(level))
            Config.RUN_FLAG = False
            continue
        elif phase == 2:
            PhaseTwo.start(dog_id, int(level))
            Config.RUN_FLAG = False
            continue
        elif phase == 3:
            PhaseThree.start(dog_id, int(level))
            Config.RUN_FLAG = False
            continue
        elif phase == 4:
            PhaseFour.start(dog_id, dog_size, int(level))
            Config.RUN_FLAG = False
            continue
        elif phase == 5:
            PhaseFive.start(dog_id, level.upper())
            Config.RUN_FLAG = False
            continue

        Logger.error("You entered an invalid Phase.")

    Config.RUN_FLAG = False
    time.sleep(1)
    Logger.close_log_files()
Esempio n. 23
0
class LarsRegression:
    def __init__(self, cv_num_vec, repeat_num, coef_precision):
        """
        :param cv_num_vec:
        :param repeat_num:
        """
        self._cv_num_vec = cv_num_vec
        self._repeat_num = repeat_num

        self._x_train_matrix = None
        self._x_train_matrix_scaled = None
        self._x_train_norm_vec = None
        self._x_train_mean_vec = None

        self._y_train_vec = None
        self._y_train_vec_scaled = None
        self._y_train_mean = None
        self._y_train_std = None

        self._x_train_remained_instance = None
        self._y_train_remained_instance = None

        self._predict_coef = None

        self._model_vec = []

        self._coef_precision = coef_precision

        self._logger = Logger()

    @property
    def predict_coef(self):
        return self._predict_coef

    @property
    def predict_coef_nonzero_idx(self):
        nonzero_list = np.nonzero(self._predict_coef)[0].tolist()
        return nonzero_list

    @property
    def predict_triparams_list(self):
        return list(
            zip(self._predict_coef, self._x_train_mean_vec,
                self._x_train_norm_vec))

    @property
    def pred_train_mean(self):
        return self._y_train_mean

    @property
    def pred_train_std(self):
        return self._y_train_std

    def filling(self, x_train, y_train):
        assert (len(x_train) == len(y_train) and len(x_train) > 1)

        # remain the last instance left as a valid instance,
        # so as to avoid extremely situation in model fitting.
        self._x_train_remained_instance = x_train[-1]
        self._y_train_remained_instance = y_train[-1]
        try:
            self._x_train_matrix = np.array(x_train[0:-1])
            self._x_train_matrix_scaled, self._x_train_norm_vec, \
            self._x_train_mean_vec = scale_train_x(self._x_train_matrix)

            self._y_train_vec = np.array(y_train[0:-1])
            self._y_train_vec_scaled, self._y_train_mean = zeromean_y(
                self._y_train_vec)
            self._y_train_std = np.std(self._y_train_vec)
        except:
            self._x_train_matrix = None
            self._y_train_vec = None
            self._logger.error(module='factormodel',
                               file='lasso.py',
                               content=traceback.format_exc())

    def fitting_with_cross_validation(self):
        # init predict coefficients to be zero vector
        self._predict_coef = np.array([0.0] * len(self._x_train_mean_vec))

        if len(self._x_train_matrix) < 10:
            self._logger.warning(
                module='factormodel',
                file='lasso.py',
                content=
                "the num of training instances is less than 10. fitting exit.")
            return

        x_valid_instance_matrix = \
            scale_test_x(np.array([self._x_train_remained_instance]),
                         self._x_train_norm_vec,
                         self._x_train_mean_vec)

        y_valid_instance_value = self._y_train_remained_instance
        x_train_matrix_shuffled = self._x_train_matrix_scaled
        y_train_vec_shuffled = self._y_train_vec_scaled

        model_num = 0
        repeat_num = self._repeat_num

        for cv_num in self._cv_num_vec:
            while True:
                if repeat_num == 0 or model_num > 0:
                    break
                repeat_num -= 1
                x_train_matrix_shuffled, y_train_vec_shuffled = \
                    shuffle(x_train_matrix_shuffled, y_train_vec_shuffled)
                try:
                    model = LarsRegression._model_fitting_cv(
                        x_train_matrix_shuffled, y_train_vec_shuffled, cv_num)
                    pred = model.predict(x_valid_instance_matrix)[0]
                    dist_d = abs(
                        (pred + self._y_train_mean - y_valid_instance_value) /
                        self._y_train_std)
                    if dist_d > 5:
                        self._logger.warning(module='factormodel',
                                             file='lasso.py',
                                             content="fitting restart.")
                        continue
                    for i, v in enumerate(model.coef_):
                        self._predict_coef[i] += v
                    model_num += 1
                except:
                    self._logger.error(module='factormodel',
                                       file='lasso.py',
                                       content=traceback.format_exc())
                    continue
        if model_num == 0:
            self._logger.warning(
                module='factormodel',
                file='lasso.py',
                content=
                "fitting with cross validation failed. model num is zero.")

        precision_base = float(np.power(10, self._coef_precision))
        # let coef vec keep 2 decimal precision
        self._predict_coef = [
            int(v * precision_base) / precision_base
            for v in self._predict_coef
        ]

    def fitting_without_cross_validation(self):
        self._predict_coef = np.array([0.0] * len(self._x_train_mean_vec))

        if len(self._x_train_matrix) < 10:
            self._logger.warning(
                module='factormodel',
                file='lasso.py',
                content=
                "the num of training instances is less than 10. fitting exit.")
            return
        try:
            model = LarsRegression._model_fitting(
                self._x_train_matrix_scaled +
                [self._x_train_remained_instance],
                self._y_train_vec_scaled + [self._y_train_remained_instance])
            for i, v in enumerate(model.coef_):
                self._predict_coef[i] += v
        except:
            # traceback.print_exc()
            self._logger.error(
                module='factormodel',
                file='lasso.py',
                content="fitting without cross validation failed.")

    def predict(self, x_test_array):
        x_test_matrix = scale_test_x(np.array([x_test_array]),
                                     self._x_train_norm_vec,
                                     self._x_train_mean_vec)
        pred = 0
        for v, p in list(zip(x_test_matrix[0], self._predict_coef)):
            pred += v * p
        pred += self._y_train_mean
        return pred

    def nonzero_list(self):
        # nonzero_idx_arr = sorted(np.nonzero(coef)[0].tolist(),
        #    cmp=lambda x1, x2: 1 if abs(coef[x1]) < abs(coef[x2]) else -1)
        return sorted(self.predict_coef_nonzero_idx)

    # LassoLarsCV: least angle regression
    @classmethod
    def _model_fitting_cv(cls, x, y, num_cv, plotting=False):
        # Compute paths
        # print("Computing regularization path using the Lars lasso...")
        model = linear_model.LassoLarsCV(cv=num_cv).fit(x, y)
        # Display results
        if plotting:
            import matplotlib.pyplot as plt
            m_log_alphas = -np.log10(model.cv_alphas_)
            plt.figure(figsize=(20, 10))
            plt.plot(m_log_alphas, model.cv_mse_path_, ':')
            plt.plot(m_log_alphas,
                     model.cv_mse_path_.mean(axis=-1),
                     'k',
                     label='Average across the folds',
                     linewidth=2)
            plt.axvline(-np.log10(model.alpha_),
                        linestyle='--',
                        color='k',
                        label='alpha CV')
            plt.legend()
            plt.xlabel('-log(alpha)')
            plt.ylabel('Mean square error')
            plt.axis('tight')
            plt.savefig('cross_validation',
                        dpi=None,
                        facecolor='w',
                        edgecolor='w',
                        orientation='portrait',
                        papertype=None,
                        format=None,
                        transparent=False,
                        bbox_inches=None,
                        pad_inches=0.1,
                        frameon=None)
            plt.plot()
        return model

    @classmethod
    def _model_fitting(cls, x, y, alpha=0, max_iter=500):
        model = linear_model.LassoLars(alpha=alpha,
                                       max_iter=max_iter).fit(x, y)
        return model
class BNPoissMF(Recommender):
    def __init__(self, trainMatrix, testMatrix, configHandler):
        Recommender.__init__(trainMatrix, testMatrix, configHandler)
        self.logger = Logger('BNPoissMF.log')


    def initModel(self):
        ''' Read the model parameters, and get some common values.
        '''
        self.numUsers, self.numItems = self.trainMatrix.shape()
        self.prediction = dok_matrix((self.numUsers, self.numItems))
        self.MAX_Iterations = int(self.configHandler.getParameter('BPoissMF', 'MAX_Iterations'))
        self.numFactors = int(self.configHandler.getParameter('BPoissMF', 'numFactors'))
        self.threshold = float(self.configHandler.getParameter('BPoissMF', 'threshold'))

        # Get the Parameters
        self.user_alpha = float(self.configHandler.getParameter('BPoissMF', 'user_alpha'))
        self.user_c = float(self.configHandler.getParameter('BPoissMF', 'user_c'))

        self.item_a = float(self.configHandler.getParameter('BPoissMF', 'item_a'))
        self.item_b = float(self.configHandler.getParameter('BPoissMF', 'item_b'))

        # The model parameters for users
        self.gamma0 = np.zeros(self.numUsers)
        self.gamma1 = np.zeros(self.numUsers)
        self.s = np.zeros(self.numUsers)
        self.nu = np.zeros((self.numUsers, self.numFactors))
        self.theta = np.zeros((self.numUsers, self.numFactors))

        # The model parameters for stick proportions
        self.tau = np.zeros((self.numUsers, self.numFactors))

        # The model parameters for item weights
        self.lambda0 = np.zeros((self.numItems, self.numFactors))
        self.lambda1 = np.zeros((self.numItems, self.numFactors))
        self.beta = np.zeros((self.numItems, self.numFactors))

        self.z = np.zeros((self.numUsers, self.numItems))

        self.pi = np.zeros((self.numUsers, self.numItems))
        self.logPi = np.zeros((self.numUsers, self.numItems))


    def buildModels(self):
        pass

    def initUserScalingParameters(self):
        ''' initial equations for the user scaling parameters gamma_u0 and gamma_u1

        '''




    def initStickProportions(self):
        ''' The update equations for the stick proportions tau_uk can be obtained by taking the derivative of the objective function with respect to tau_uk

        '''
        self.nu = 0.001 * np.random.random((self.numUsers, self.numFactors))


    def computePi(self):
        ''' Equation (10)

        '''



    def initItemWeights(self):
        pass

    def updateUserScalingParameters(self):
        pass

    def updateStickProportions(self):
        pass

    def updateItemWeights(self):
        pass

    def calculateConjugacy(self):
        pass

    def GammaPoisson(self):
        pass

    def solveQuadratic(self, a, b, c):
        '''
        '''
        s1 = (-b + sqrt(b*b - 4*a*c)) / (2*a)
        s2 = (-b - sqrt(b*b - 4*a*c)) / (2*a)

        if s1 > .0 and s1 <= 1.0 and s2 > .0 and s2 <= 1.0:
            self.logger.error('s1 %f and s2 %f are out of range in solve_quadratic()' % (s1, s2))
            self.logger.error('a = %.5f, b = %.5f, c = %.5f\n' % (a, b, c))

            if s1 < s2:
                return s1 + THRESHOLD
            else:
                return s2 + THRESHOLD

        if s1 > .0 and s1 <= 1.0:
            return s1

        if s2 > .0 and s1 <= 1.0:
            return s2

        if np.abs(s1 - .0) < THRESHOLD:
            return THRESHOLD

        if np.abs(1.0 - s1) < THRESHOLD:
            return 1.0 - THRESHOLD

        if np.abs(s2 - .0) < THRESHOLD:
            return THRESHOLD

        if np.abs(s2 - 1.0) < THRESHOLD:
            return 1.0 - THRESHOLD

        self.logger.error('WARNING: s1 %.10f and s2 %.10f are out of range in solve_quadratic()' % (s1, s2))
        return s1
Esempio n. 25
0
        python = "python"

    Logger.info("===================Begin===================")

    # Step1. Initialize the database
    Logger.info("Step1. Initialize the database.")
    sql_str = "mysql -u%s -p%s -e 'drop database if exists %s; create database %s; use %s; source '%s';'" \
              % (Configuration.configs['mysql_usr'],
                 Configuration.configs['mysql_pwd'],
                 Configuration.configs['mysql_db'],
                 Configuration.configs['mysql_db'],
                 Configuration.configs['mysql_db'],
                 Configuration.configs['TraceSqlPath'])
    result = os.system(sql_str)
    if result != 0:
        Logger.error("Fail to create the database")
        exit()

    # Step2. Import trace data into the database
    Logger.info("Step2. Import trace data to the database.")
    result = os.system("%s %s/REP/recordToDatabase.py" %
                       (python, Configuration.configs['ProjectPath']))
    if result != 0:
        Logger.error("Fail to import trace data to the database")
        exit()

    # # Step2-2. Import log data into the database
    # Logger.info("Step2-2. Import log data to the database.")
    # result = os.system("%s %s/REP/process_log.py" % (python, Configuration.configs['ProjectPath']))
    # if result != 0:
    #     Logger.error("Fail to import log data to the database in Step2-2.")
def phase_four(dog_id,
               level,
               still_length,
               fail_max,
               treat_frequency_min,
               treat_frequency_max=-1):
    if treat_frequency_max == -1:
        treat_frequency_max = treat_frequency_min

    still_timer = 0
    treat_timer = 0
    treat_dispense_time = 0
    fail_count = 0
    # Reset quiet and move status
    NoiseUtil.reset_bark_status()
    MoveUtil.reset_move_status()
    while still_timer < still_length and fail_count < fail_max:
        # Wait 1 second
        time.sleep(1)

        if not Config.RUN_FLAG:
            Logger.data(dog_id, 4, level, "cancelled")
            return False

        if treat_timer == 0:
            treat_dispense_time = random.randint(treat_frequency_min,
                                                 treat_frequency_max)

        if NoiseUtil.has_dog_barked or MoveUtil.has_dog_moved:
            if NoiseUtil.has_dog_barked:
                Logger.data(dog_id, 4, level, "dog_bark")
            else:
                Logger.data(dog_id, 4, level, "dog_move")
            fail_count += 1
            still_timer = 0
            NoiseUtil.reset_bark_status()
            MoveUtil.reset_move_status()

            # Check if the dog has failed too many times
            if fail_count >= fail_max:
                break
            """
            Wait for 10 seconds to allow the dog to stop moving.
            """
            sleep_time = 0
            for i in range(1, Config.MOVE_PAUSE + 1):
                time.sleep(1)
                Logger.debug("Letting the dog stop moving... (%s seconds)" % i)
                if not MoveUtil.has_dog_moved:
                    sleep_time += 1
                else:
                    sleep_time = 0
                    MoveUtil.reset_move_status()
                if sleep_time >= 2:
                    break

            continue  # Continue loop (timer has been reset, don't reward dog, etc)
        else:
            still_timer += 1
            treat_timer += 1

        # Check if a treat should be dispensed
        if treat_timer == treat_dispense_time:
            PetTutorUtil.dispense_treat()
            treat_timer = 0
            fail_count = 0
            """
            Wait for 5 seconds to allow the dog to eat the treat.
            """
            sleep_time = 0
            MoveUtil.reset_move_status(
            )  # This is just a precaution, it should already be reset
            for i in range(1, Config.TREAT_PAUSE + 1):
                time.sleep(1)
                if not MoveUtil.has_dog_moved:
                    sleep_time += 1  # Increment the amount of time the dog has not moved.
                else:
                    sleep_time = 0  # The dog moved, reset sleep time
                    MoveUtil.reset_move_status()
                if sleep_time >= 2:  # Once dog has not moved for X seconds, continue training.
                    break
                Logger.debug("Letting the dog eat... (%s seconds)" % i)

    if still_timer >= still_length:  # Dog passed the Challenge.
        return True
    if fail_count >= fail_max:  # Dog has failed the challenge
        return False

    Logger.error("[Still Challenge] CODE SHOULD NEVER GET HERE!")
Esempio n. 27
0
class Controller:
    def __exit__(self):
        self.driver.closeDriver()

    def __init__(self, setting):
        try:
            self.logger = Logger(setting.saveDirPath)
            self.fm = FileManager(setting.saveDirPath, setting.downloadDirPath,
                                  self.logger)
            self.driver = IDriver(setting.chromeDriverPath)
            self.setting = setting
        except:
            raise ('[error] setting is not valid')

    def chrollPostList(self, postListUrl):
        self.driver.movePage(postListUrl)
        postList = PostList(postListUrl)
        soup = BeautifulSoup(self.driver.getPageSource(), 'html.parser')
        trList = soup.find_all('tr', attrs={'class': 'ub-content us-post'})

        soup.get('')
        for trElement in trList:
            if len(trElement.find_all('b')) > 0: continue
            postList.addPost(self._initPost(trElement))
        self.logger.print('[done] ' + postListUrl + ' 게시글 목록 chrolling 완료')
        return postList

    def chrollInRange(self, startPostNumber, endPostNumber):
        for pageIdx in range(1, 1000):
            postList = self.chrollPostList(self.setting.getPageUrl() % pageIdx)
            postsInRange = postList.getPostsInRange(startPostNumber,
                                                    endPostNumber)
            for post in postsInRange:
                if self.setting.passExistFile and post.status == PostStatus.ALREADY_EXIST:
                    continue
                self.chrollPost(post)

            for post in postsInRange:
                if post.status == PostStatus.END_CHROLLING:
                    self.moveImages(post)

                if post.status == PostStatus.END_MOVING_IMAGES:
                    self.makeHtml(post)

                self.logger.print('[END]  ' + post.getTitle() + ' 상태 :' +
                                  str(post.status))

            if not postList.needToContinue(startPostNumber):
                break

    def _initPost(self, trElement):
        aTagElement = trElement.find('td', attrs={
            'class': 'gall_tit ub-word'
        }).find('a')
        postNumber = trElement.find('td', attrs={'class': 'gall_num'}).text

        title = aTagElement.text.strip()
        url = aTagElement['href']

        post = Post(url=url, number=postNumber, title=title)
        if self.fm.existFile(self.setting.saveDirPath + post.getTitle() +
                             '.html'):
            post.status = PostStatus.ALREADY_EXIST
        else:
            post.status = PostStatus.END_INITIALIZING

        return post

    def chrollPost(self, post):
        self.driver.movePage(Url.postUrlPrefixs + post.url)
        soup = BeautifulSoup(self.driver.getPageSource(),
                             features='html.parser')

        readInnerHTML_script = "return arguments[0].innerHTML;"
        contents = self.driver.excecuteScriptToElement(
            readInnerHTML_script, self.driver.findElement(Xpath.postBody))

        post.writingTime = self.driver.findElement(
            Xpath.postWritingTime).getAttribute('title')
        post.bodySoup = BeautifulSoup(contents, 'html.parser')
        post.replySoup = soup.find('div', attrs={'class': 'comment_wrap show'})

        emptyElement = self.driver.findElement(Xpath.emptyClickBox)
        emptyElement.click()

        downloadList = []
        for imageIndex in range(1, 50):
            if (self.driver.notExistElement(Xpath.saveBtn % imageIndex)): break
            saveBtn = self.driver.findElement(Xpath.saveBtn % imageIndex)
            saveBtn.click()

            downloadFileName = saveBtn.getText().replace('~', '_').replace(
                '+', ' ')
            downloadList.append(downloadFileName)

        post.downloadList = downloadList
        post.status = PostStatus.END_CHROLLING
        self.logger.print('[done] ' + post.getTitle() + ' 게시글 chrolling')

    def moveImages(self, post):
        try:
            post.imgList = []
            for file in post.downloadList:
                post.imgList.append(
                    self.fm.moveFile2SaveDirectory(post.getTitle(), file))
            post.status = PostStatus.END_MOVING_IMAGES
        except:
            self.logger.error('[error] ' + post.getTitle() +
                              ' image 파일 옮기는 과정에서 에러남')

    def makeHtml(self, post):
        try:
            hb = HtmlBuilder(self.setting.saveDirPath, post.getTitle(),
                             post.writingTime, self.logger)
            self._mappingAttributes2SaveImages(post)
            hb.writeBody(str(post.bodySoup))
            hb.writeReply(str(post.replySoup))
            hb.close()
            post.status = PostStatus.END_MAKING_HTML
        except:
            self.logger.error('[error] ' + post.getTitle() +
                              ' html 빌드 과정에서 에러남')

    def _mappingAttributes2SaveImages(self, post):
        imgTagElementsList = post.bodySoup.find_all('img')
        tagListSize = len(imgTagElementsList)
        downloadImgSize = len(post.downloadList)
        # if tagListSize != downloadImgSize:
        #     self.logger.error('[error] img tag의 갯수(%d)와 download img 갯수(%d)가 다름'%(tagListSize,downloadImgSize))
        # dc 이모티콘으로 인해 img 갯수가 많을 수 있습니다
        for i in range(downloadImgSize):
            filePath = './' + post.getTitle() + '/' + post.downloadList[i]
            element = imgTagElementsList[i]
            element['alt'] = ''
            element['onclick'] = ''
            element['src'] = filePath
Esempio n. 28
0
if options.check is not None:

    for section in config.sections():
        print("Section [%s]" % section)
        print(json.dumps(dict(config[section]), indent=4))
        print("\n")

elif options.start is not None:

    app = config.get('Base', 'APP')

    if app == 'PB':
        from PB import PBClient as Client
    elif app == "PKURunner":
        from PKURunner import PKURunnerClient as Client
    elif app == "Joyrun":
        from Joyrun import JoyrunClient as Client
    else:
        raise ValueError("unsupported running APP -- %s !" % app)

    try:
        client = Client()
        client.run()
    except Exception as err:
        logger.error("upload record failed !")
        raise err
    else:
        logger.info("upload record success !")

Esempio n. 29
0
from util import LINE_MESSAGE_TEXTS
# https://github.com/line/line-bot-sdk-python
# https://developers.line.biz/en/reference/messaging-api/

# -- Setup
# TODO: encrypt keys on AWS
# TODO: mypy when matured
# TODO: pytest with testing db
# TODO: lambda layer to share enum and others
# Logger
logger = Logger(LAMBDA_NAME, (logging.StreamHandler(), logging.DEBUG))

# Get channel_secret from environment variable
channel_secret = os.getenv('LINE_CHANNEL_SECRET', None)
if channel_secret is None:
    logger.error('Specify LINE_CHANNEL_SECRET as environment variable.')
    sys.exit(1)

# Line webhook
line_handler = WebhookHandler(channel_secret)


# -- Main handler
def lambda_handler(requestEvent, context):
    # Get X-Line-Signature header value
    signature = requestEvent['headers']['X-Line-Signature']

    # Get request body as text
    body = requestEvent['body']

    # Give logger `lineUserId` info
Esempio n. 30
0
            """.format(file_path, THREAD_NUMS, TIMEOUT, RETRIES, RQ))
    writer = open("../stat", "w", encoding='utf8')
    # 文件类型 可选 "tang" or "wu"
    if file_type == "fulltext":
        file_from = "wu"
        
        dir_path, file_name = get_file_name(file_path)
        # 输入文件
        tag = time.strftime("%Y%m%d%H%M", time.localtime(time.time()))
        uni_format(file_path, file_from=file_from, id=RQ, logger=LOG, writer=writer)
        # 调度器
        sch = Scheduler("./tmp/{}_referers.tsv".format(RQ), "utf8", "tsv")
        sch.scheduling()
        ret_file_path = "./Logs/{}_ret.txt".format(tag)
        LOG.info("开始合并文件")
        concat(ret_file_path, "./tmp/{}_{}.tsv".format(RQ, file_from), "./datas/{}_ret.csv".format(file_name),
               LOG,
               is_filter_by_word, is_filter_by_input, is_filter_by_country, 
               writer=writer)
       	writer.close()
    elif file_type == "urls":
        sch = Scheduler(file_path, "utf8", "tsv")
        sch.scheduling()
        writer.close()
    else:
        LOG.error("Wrong file type: {}, Please choose 'fulltext or 'urls'".format(file_type))
        writer.close()
        import sys

        sys.exit(-1)
Esempio n. 31
0
class Mail(object):
    """model for the Mail."""
    
    id_is_valid = staticmethod(lambda num: 0 < int(num) <= 1L << 31)
    
    def __init__(self, env, id=None, db=None, messageid=None, row=None):
        self.env = env
        self.db = db
        self.log = Logger(env)
        
        if id is not None:
            self.resource = Resource('mailarchive', str(id), None)
            self._fetch_mail(id)
        elif messageid is not None:
            self._fetch_mail_by_messageid(messageid)
            self.resource = Resource('mailarchive', self.id, None)
        elif row is not None:
            self._fetch_mail_by_row(row)
            self.resource = Resource('mailarchive', self.id, None)
        else:
            self.messageid = ''
            self.subject = ''
            self.utcdate = 0
            self.localdate = ''
            self.zoneoffset = 0
            self.body = ''
        
    def __eq__(self, other):
        if isinstance(other, Mail):
            return self.messageid == other.messageid
        return super.__eq__(other)
        
    def _get_db(self):
        if self.db:
            return self.db
        else:
            return self.env.get_db_cnx()

    def _get_db_for_write(self):
        if self.db:
            return (self.db, False)
        else:
            return (self.env.get_db_cnx(), True)
        
    def get_sanitized_fromaddr(self):
        return self.fromaddr.replace('@',
                                     self.env.config.get('mailarchive',
                                                         'replaceat', '@'))
        
    def get_fromtext(self):
        return get_author(self.fromname, self.fromaddr) 
        
    def get_category(self):
        yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate))
        category = self.mlid + yearmonth
        return category.encode('utf-8')
        
    def get_plain_body(self):
        return self._sanitize(self.env, self.body)
    
    def get_html_body(self, req):
        
        # for HTML Mail
        if self.body.lstrip().startswith('<'):
            return Markup(self.body)
        
        contentlines = self.body.splitlines()
        htmllines = ['',]
        
        #customize!
        #http://d.hatena.ne.jp/ohgui/20090604/1244114483
        wikimode = req.args.get('wikimode', 'on')
        for line in contentlines:
            if self.env.config.get('mailarchive', 'wikiview',' enabled') == 'enabled' and wikimode == 'on':
                htmllines.append(wiki_to_oneliner(line, self.env, self.db, False, False, req))
            else:
                htmllines.append(Markup(Markup().escape(line).replace(' ','&nbsp;')))
            
        content = Markup('<br/>').join(htmllines)
        return content
        
    def _sanitize(self, env, text):
        return text.replace('@', env.config.get('mailarchive', 'replaceat','_at_') )
    
    def _fetch_mail(self, id):
        row = None
        if self.id_is_valid(id):
            db = self._get_db()
            cursor = db.cursor()
            cursor.execute(SELECT_FROM_MAILARC + " WHERE id=%s", (id,))

            row = cursor.fetchone()
        if not row:
            raise ResourceNotFound('Mail %s does not exist.' % id,
                                   'Invalid Mail Number')

        self._fetch_mail_by_row(row)
    
    def _fetch_mail_by_messageid(self, messageid):
        row = None

        db = self._get_db()
        cursor = db.cursor()
        cursor.execute(SELECT_FROM_MAILARC + " WHERE messageid=%s",
                        (messageid,))

        row = cursor.fetchone()
        if not row:
            raise ResourceNotFound('Mail messageid %s does not exist.' % messageid,
                                   'Invalid Mail messageid Number')

        self._fetch_mail_by_row(row)
        
    def _fetch_mail_by_row(self, row):
        self.id = row[0]
        self.messageid = row[1]
        self.utcdate = row[2]
        self.zoneoffset = row[3]
        self.subject = row[4]
        self.fromname = row[5]
        self.fromaddr = row[6]
        self.header =row[7]
        self.body = row[8]
        self.thread_root = row[9]
        self.thread_parent = row[10]
        
        self.zone = self._to_zone(self.zoneoffset)
        self.localdate = self._to_localdate(self.utcdate, self.zoneoffset)
        
    def _to_localdate(self, utcdate, zoneoffset):
        return time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate + zoneoffset))

    def _to_zone(self, zoneoffset):
        #zone and date
        zone = ''
        if zoneoffset == '':
            zoneoffset = 0
        if zoneoffset > 0:
            zone = ' +' + time.strftime('%H%M', time.gmtime(zoneoffset))
        elif zoneoffset < 0:
            zone = ' -' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset))
        return zone
                
    def get_href(self, req):
        return req.href.mailarchive(self.id)
    
    def get_subject(self):
        if is_empty(self.subject):
            return '(no subject)'
        else:
            return self.subject
    
    def get_senddate(self):
        return self.localdate + self.zone
    
    def get_thread_root(self):
        if self.thread_root == '':
            return self
        try:
            root_mail = Mail(self.env, messageid=self.thread_root)
        except ResourceNotFound:
            return self
        
        #self.thread_rootはオリジナル版だと親のメールになってしまっている。
        #互換性維持のため、ルートではない場合は自力で探しにいくロジックを走らす
        if root_mail.thread_root == '':
            return root_mail
        else:
            if self.thread_parent != '':
                root_id = MailFinder.find_root_id(self.env, self.messageid)
                return Mail(self.env, messageid=root_id)
    
    def get_thread_parent_id(self):
        if self.thread_parent != '':
            return self.thread_parent.split(' ')[0]
        return None
    
    def get_thread_parent(self):
        if self.thread_parent != '':
            return Mail(self.env, db=self.db, messageid=self.get_thread_parent_id())
        return self
    
    def get_children(self, desc=False, cached_mails=None):
        if cached_mails:
            self.log.debug("[%s] mail's threads is cached." % self.id)
            return [x for x in cached_mails if x.get_thread_parent_id() == self.messageid]
            
        db = self._get_db()
        cursor = db.cursor()
        sql = SELECT_FROM_MAILARC + " WHERE threadparent LIKE %s ORDER BY utcdate"
        
        if desc:
            sql += " DESC"
        
        cursor.execute(sql, ('%s %%' % self.messageid,))
        
        children = []
        
        for row in cursor:
            child_mail = Mail(self.env, row=row, db=self.db)
            children.append(child_mail)
        return children
    
    def get_thread_mails(self, desc=False):
        root = self.get_thread_root()
        
        db = self._get_db()
        cursor = db.cursor()
        sql = SELECT_FROM_MAILARC + " WHERE threadroot = %s ORDER BY utcdate"
        
        if desc:
            sql += " DESC"
        
        cursor.execute(sql, (root.messageid,))
        mails = []
        for row in cursor:
            mails.append(Mail(self.env, row=row, db=self.db))
        return mails
    
    def has_children(self, cached_mails=None):
        rtn = len(self.get_children(cached_mails=cached_mails)) > 0
        return rtn 

    def get_related_tickets(self, req):
        db = self._get_db()
        return get_related_tickets(self.env, req, db, self.id)
    
    def has_attachments(self, req):
        attachment = MailArchiveAttachment(self.env, self.id)
        return attachment.has_attachments(req)

    def populate(self, author, msg, mlid):
        """Populate the mail with 'suitable' values from a message"""
        
        if 'message-id' not in msg:
            raise 'Illegal Format Mail!'
        
        self.is_new_mail = False
        self.mlid = mlid

        self._parse_messageid(msg)
        self._parse_date(msg)
        self._parse_subject(msg)
        
        if msg.is_multipart():
            self._parse_multipart(author, msg)
        else:
            self._parse_body(msg)

        ref_messageid = self._parse_reference(msg)
        self._make_thread(ref_messageid)
        
    def update_or_save(self):
        if self.messageid is None or self.messageid == '':
            raise "Can't save mail to database."
        
        db, has_tran = self._get_db_for_write()
        cursor = db.cursor()

        yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate))
        category = self.mlid + yearmonth
        cursor.execute("SELECT category, mlid, yearmonth, count FROM mailarc_category WHERE category=%s",
                        (category.encode('utf-8'),))
        row = cursor.fetchone()
        count = 0
        if row:
            count = row[3]
            pass
        else:
            cursor.execute("INSERT INTO mailarc_category (category, mlid, yearmonth, count) VALUES(%s, %s, %s, %s)",
                            (category.encode('utf-8'),
                             self.mlid.encode('utf-8'),
                             yearmonth,
                             0))
        if self.is_new_mail:
            count = count + 1
        cursor.execute("UPDATE mailarc_category SET count=%s WHERE category=%s",
            (count, category.encode('utf-8')))

        # insert or update mailarc

        #self.log.debug(
        #    "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" %(str(id),
        #    category.encode('utf-8'),
        #    messageid,
        #     utcdate,
        #      zoneoffset,
        #     subject.encode('utf-8'), fromname.encode('utf-8'),
        #     fromaddr.encode('utf-8'),'','',
        #     thread_root,thread_parent))
        cursor.execute("DELETE FROM mailarc where messageid=%s",
                       (self.messageid,))

        cursor.execute("INSERT INTO mailarc ("
            "id, category, messageid, utcdate, zoneoffset, subject,"
            "fromname, fromaddr, header, text, threadroot, threadparent) "
            "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
            (str(self.id),
            category.encode('utf-8'),
            self.messageid,
            self.utcdate,
            self.zoneoffset,
            self.subject.encode('utf-8'), self.fromname.encode('utf-8'),
            self.fromaddr.encode('utf-8'), '', self.body.encode('utf-8'),
            self.thread_root, self.thread_parent))

        if has_tran:
            db.commit()

    def _parse_messageid(self, msg):
        self.messageid = msg['message-id'].strip('<>')

        #check messageid is unique
        self.log.debug("Creating new mailarc '%s'" % 'mailarc')
        
        db = self._get_db()
        cursor = db.cursor()
        cursor.execute("SELECT id from mailarc WHERE messageid=%s", (self.messageid,))
        row = cursor.fetchone()
        id = None
        if row:
            id = row[0]
            
        if id == None or id == "":
            # why? get_last_id return 0 at first.
            #id = db.get_last_id(cursor, 'mailarc')
            self.is_new_mail = True
            cursor.execute("SELECT Max(id)+1 as id from mailarc")
            row = cursor.fetchone()
            if row and row[0] != None:
                id = row[0]
            else:
                id = 1
        self.id = int(id) # Because id might be 'n.0', int() is called.

    def _parse_date(self, msg):
        if 'date' in msg:
            datetuple_tz = email.Utils.parsedate_tz(msg['date'])
            localdate = calendar.timegm(datetuple_tz[:9]) #toDB
            zoneoffset = datetuple_tz[9] # toDB
            utcdate = localdate - zoneoffset # toDB
            #make zone ( +HHMM or -HHMM
            zone = ''
            if zoneoffset > 0:
                zone = '+' + time.strftime('%H%M', time.gmtime(zoneoffset))
            elif zoneoffset < 0:
                zone = '-' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset))
            #self.log.debug( time.strftime("%y/%m/%d %H:%M:%S %z",datetuple_tz[:9]))
            
            self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate)))
            self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(localdate)))
            self.log.debug(zone)
        
        fromname, fromaddr = email.Utils.parseaddr(msg['from'])
        
        self.fromname = self._decode_to_unicode(fromname)
        self.fromaddr = self._decode_to_unicode(fromaddr)
        self.zone = zone
        self.utcdate = utcdate
        self.zoneoffset = zoneoffset
        self.localdate = self._to_localdate(utcdate, zoneoffset)
        
        self.log.info('  ' + self.localdate + ' ' + zone +' '+ fromaddr)
        
    def _parse_subject(self, msg):
        if 'subject' in msg:
            self.subject = self._decode_to_unicode(msg['subject'])
            
    def _parse_reference(self, msg):
        # make thread infomations
        ref_messageid = ''
        if 'in-reply-to' in msg:
            ref_messageid = ref_messageid + msg['In-Reply-To'] + ' '
            self.log.debug('In-Reply-To:%s' % ref_messageid )

        if 'references' in msg:
            ref_messageid = ref_messageid + msg['References'] + ' '

        m = re.findall(r'<(.+?)>', ref_messageid)
        ref_messageid = ''
        for text in m:
            ref_messageid = ref_messageid + "'%s'," % text
            
        ref_messageid = ref_messageid.strip(',')
        
        self.log.debug('RefMessage-ID:%s' % ref_messageid)
        
        return ref_messageid

    def _parse_multipart(self, author, msg):
        body = ''
        # delete all attachement at message-id
        Attachment.delete_all(self.env, 'mailarchive', self.id, self.db)

        for part in msg.walk():
            content_type = part.get_content_type()
            self.log.debug('Content-Type:' + content_type)
            file_counter = 1

            if content_type == 'multipart/mixed':
                pass
            
            elif content_type == 'text/html' and self._is_file(part) == False:
                if body != '':
                    body += "\n------------------------------\n\n"
                    
                body = part.get_payload(decode=True)
                charset = part.get_content_charset()
                
                self.log.debug('charset:' + str(charset))
                # Todo:need try
                if charset != None:
                    body = self._to_unicode(body, charset)
                
            elif content_type == 'text/plain' and self._is_file(part) == False:
                #body = part.get_payload(decode=True)
                if body != '':
                    body += "\n------------------------------\n\n"
                    
                current_body = part.get_payload(decode=True)
                charset = part.get_content_charset()
                
                self.log.debug('charset:' + str(charset))
                # Todo:need try
                if charset != None:
                    #body = self._to_unicode(body, charset)
                    body += self._to_unicode(current_body, charset)
                else:
                    body += current_body
                
            elif part.get_payload(decode=True) == None:
                pass
            
            # file attachment
            else:
                self.log.debug(part.get_content_type())
                # get filename
                # Applications should really sanitize the given filename so that an
                # email message can't be used to overwrite important files
                
                filename = self._get_filename(part)
                if not filename:
                    import mimetypes
                    
                    ext = mimetypes.guess_extension(part.get_content_type())
                    if not ext:
                        # Use a generic bag-of-bits extension
                        ext = '.bin'
                    filename = 'part-%03d%s' % (file_counter, ext)
                    file_counter += 1

                self.log.debug("filename:" + filename.encode(OUTPUT_ENCODING))

                # make attachment
                tmp = os.tmpfile()
                tempsize = len(part.get_payload(decode=True))
                tmp.write(part.get_payload(decode=True))

                tmp.flush()
                tmp.seek(0,0)

                attachment = Attachment(self.env, 'mailarchive', self.id)

                attachment.description = '' # req.args.get('description', '')
                attachment.author = author #req.args.get('author', '')
                attachment.ipnr = '127.0.0.1'

                try:
                    attachment.insert(filename,
                            tmp, tempsize, None, self.db)
                except Exception, e:
                    try:
                        ext = filename.split('.')[-1]
                        if ext == filename:
                            ext = '.bin'
                        else:
                            ext = '.' + ext
                        filename = 'part-%03d%s' % (file_counter, ext)
                        file_counter += 1
                        attachment.description += ', Original FileName: %s' % filename
                        attachment.insert(filename,
                                tmp, tempsize, None, self.db)
                        self.log.warn('As name is too long, the attached file is renamed : ' + filename)

                    except Exception, e:
                        self.log.error('Exception at attach file of Message-ID:' + self.messageid)
                        traceback.print_exc(e)

                tmp.close()