Example #1
0
def validate_time_period(query_tokens):
    log = Logger().get('reportserver.manager.utilities')
    log.debug("given query_tokens:" + str(query_tokens))

    uom = None
    units = None

    for token in query_tokens:
        if '=' in token:
            uom,units = token.split('=')
            if uom in UnitOfMeasure.get_values(UnitOfMeasure):
                units = int(units)
                break
            else:
                uom  = None
                units = None

    # default if we aren't given valid uom and units
    #TODO:  get this from a config file.
    if uom is None or units is None:
        uom = "days"
        units = 1

    log.debug("validate_time_period: " + str(uom) + ": " + str(units))
    return (uom, units)
Example #2
0
def sjoin_noise_values(gdf,
                       noise_layers: dict,
                       log: Logger = None) -> gpd.GeoDataFrame:
    sample_gdf = gdf.copy()
    sample_gdf['sample_idx'] = sample_gdf.index
    for name, noise_gdf in noise_layers.items():
        log.debug(f'joining noise layer [{name}] to sampling points')
        sample_gdf = gpd.sjoin(sample_gdf, noise_gdf, how='left',
                               op='within').drop(['index_right'], axis=1)

    if (len(sample_gdf.index) > len(gdf.index)):
        log.warning(
            f'joined multiple noise values for one or more sampling points ({len(sample_gdf.index)} != {len(gdf.index)})'
        )

    distinct_samples = remove_duplicate_samples(sample_gdf, 'sample_idx',
                                                noise_layers)

    if (len(distinct_samples.index) == len(gdf.index)):
        log.info('successfully removed duplicate samples')
    else:
        log.error('error in removing duplicate samples')

    if (list(sample_gdf.columns).sort() != list(
            distinct_samples.columns).sort()):
        log.error(
            'schema of the dataframe was altered during removing duplicate samples'
        )

    return distinct_samples.drop(columns=['sample_idx'])
 def read_excel_sheet(self):
     Logger.debug(
         "TestDataReaderCommonUtil.read_excel_sheet file name is : {0}, sheet_name is : {1}"
         .format(self.file_name, self.sheet_name), self.__class__.__name__)
     excel = load_workbook(self.file_name)
     sheet = excel[self.sheet_name]
     return sheet
Example #4
0
def validate_port_number(givenStr):
    log = Logger().get('reportserver.manager.utilities')
    log.debug("given str is: " + givenStr)

    try:
        return int(givenStr)
    except Exception as e:
        log.error("Error:  Received invalid string to convert to int: " + givenStr)
        log.error (str(e))
        return None
def filter_out_features_outside_mask(log: Logger, gdf, mask_poly):
    gdf['inside'] = [
        True if mask_poly.intersects(geom.boundary) else False
        for geom in gdf['geometry']
    ]
    filtered = gdf[gdf['inside'] == True]
    log.debug(
        f'Filtered out {len(gdf)-len(filtered)} rows outside the mask of total {len(gdf)} rows'
    )
    return filtered
class KeyWordConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10*1024*1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                          [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct key word consumer error')
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug('Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                                  [ROUTING_KEY], self.logger)

    def _callback(self, channel, method, properties, key_word):
        self.logger.info(os.linesep)
        self.logger.info('----> Get body message %s and start searching this key word...<----' % key_word)
        try:
            url = 'https://play.google.com/store/search?q=%s&c=apps' % key_word
            search_web_driver = SearchWebDriver(url, self._db_conn, self.logger)
            search_web_driver.search()
        except Exception:
            self.logger.exception('Search key word %s error' % key_word)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set key word %s as consumed' % key_word)
        self._set_key_word_consumed(key_word)

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect to database error')

    def _set_key_word_consumed(self, key_word):
        query = 'UPDATE key_word SET status=%s WHERE key_word=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, key_word), self._db_conn)
        except Exception:
            self.logger.exception('Set key word %s as consumed error' % key_word)
Example #7
0
class PortManager:

    # Port Manager: calls necessary managers and utilities to generate parameters for sql.
    # List of valid ports it can receive is taken from the Configuration setup.
    #
    validPortNumbers = ()

    def __init__(self):
        self.g_config = GlobalConfig()
        self.validPortNumbers = self.g_config.get_ports()
        self.date_time_field = self.g_config.get_db_datetime_name()
        self.log = Logger().get('reportserver.manager.PortManager.PortManager')


    def isPortValid(self, port_number):
        if (port_number in self.validPortNumbers):
            return True
        else:
            return False

    def getPort(self, port_number, uom, unit):
        self.log.info("Retrieving port:" + str(port_number) + "uom:" + uom + " size: " + str(unit))

        items = []

        if self.isPortValid(port_number):
            results = DatabaseHandler().get_json_by_time(port_number, uom, unit)
            items = utilities.process_data(results)

        port_json = {
            'port': str(port_number),
            'timespan': uom + "=" + str(unit),
            'items':items
        }

        return port_json


    def get_port_attack_count(self, tablename, unit, uom):
        fromDate = dateTimeUtility.get_begin_date_iso(unit, uom)

        sql = "select count(distinct session) as total_attacks from %s where %s >= '%s' " %(tablename, self.date_time_field, fromDate)
        self.log.debug("sql is:" + sql)
        result = DatabaseHandler().query_db(sql)[0]
        return int(result['total_attacks'])

    def get_unique_ips(self, tablename, unit, uom):
        fromDate = dateTimeUtility.get_begin_date_iso(unit, uom)
        sql = "select count(distinct peerAddress) as unique_ips from %s where %s >= '%s' " % (tablename, self.date_time_field, fromDate)
        self.log.debug("sql is:" + sql)
        result = DatabaseHandler().query_db(sql)[0]
        return int(result['unique_ips'])
class DrawLots(object):
    def __init__(self):
        self.log = Logger()
        '''获取抽签列表'''
        csv_file = csv.reader(open(DRAW_LOTS_WORDS_PATH, 'r', encoding='UTF-8'))
        for word in csv_file:
            lots_dict[word[0]] = word[1]
            words_dict[word[0]] = word[2]

    def play(self):       
        draw_random_num = random.randint(1, len(lots_dict)) - 1
        self.log.debug('draw_random_num:{}'.format(draw_random_num))
        return lots_dict[str(draw_random_num)], words_dict[str(draw_random_num)]
Example #9
0
class IpsServiceHandler:
    def __init__(self):
        self.log = Logger().get("reportserver.manager.IpsServiceHandler.py")

    def process(self, rqst, path_tokens, query_tokens):
        uom = None
        units = None
        self.log.info("processing ipaddress request:" + str(path_tokens) + str(query_tokens))

        try:
            time_period = utilities.validate_time_period(query_tokens)
            uom = time_period[0]
            units = time_period[1]
        except ValueError:
            rqst.badRequest(units)
            return

        if len(path_tokens) == 5:
            ipaddress = path_tokens[4].strip()
            self.log.debug("requested: " + str(ipaddress))
            if ipaddress is not None or ipaddress is not "":
                try:
                    ipaddress = utilities.validate_ipaddress(ipaddress)
                    self.get_ips_data_by_time(rqst, ipaddress, uom, units)
                except ValueError:
                    rqst.badRequest(badIpAddress)
                    return
            elif ipaddress == None or ipaddress == "":
                self.get_ips_data_by_time(rqst, "", uom, units)
            else:
                rqst.badRequest()
                return
        elif len(path_tokens) == 4:
            self.get_ips_list_json(rqst, uom, units)
        else:
            rqst.badRequest()
            return

    def get_ips_data_by_time(self, rqst, ipaddress, uom, units):

        ips_manager = IpsManager()
        addressjsondata = ips_manager.get_data(ipaddress, uom, units)
        if addressjsondata is not None:
            # send response:
            rqst.sendJsonResponse(addressjsondata, 200)
        else:
            rqst.notFound()

    def get_ips_list_json(self, rqst, uom, units):
        response = "{not implemented yet.}"
        rqst.sendJsonResponse(response, 200)
Example #10
0
class DatabaseHandler:

    def __init__(self):
        self.global_config = GlobalConfig()
        self.db_path = self.global_config['Database']['path']
        self.log = Logger().get('reportserver.dao.DatabaseHandler.DatabaseHandler')

    # Connect to given database.
    # Defaults to the honeypot db, but another path can be passed in (mainly for testing).
    # Database needs to exist first.
    def connect(self, database_name):
        if (database_name == None):
            database_name = self.db_path

        if not os.path.exists(database_name):
            self.log.error("Database does not exist in path: " + database_name)
            return None
        try:
            conn = sqlite3.connect(database_name)
        except sqlite3.OperationalError as oe:
            self.log.error("****Problem connecting to database*** at: " + database_name)
            self.log.error(oe)
        else:
            return conn

    # Query DB and return JSON
    def query_db(self, query, args=(), one=False, db=None):
        #print ("#debug args are: " +str(args))
        cur = self.connect(db).cursor()
        cur.execute(query, args)
        r = [dict((cur.description[i][0], value) \
                for i, value in enumerate(row)) for row in cur.fetchall()]
        cur.connection.close()
        return (r[0] if r else None) if one else r

    # Unit of Measure could be "weeks", "days", "hours", "minutes".
    # Return all data from the DB within that measure of time as JSON.
    def get_json_by_time(self, portnumber, uom, units):
        begin_date_iso = dateTimeUtility.get_begin_date_iso(uom, units)
        tableName = self.global_config.get_plugin_config(portnumber)['table']
        date_time_field = self.global_config.get_db_datetime_name()

        #  query = query_db("SELECT * FROM %s where (datetime > '%s')" % (tableName, query_date_iso))
        queryString = "SELECT * FROM %s where %s >= '%s' order by id, %s" % (tableName, date_time_field, begin_date_iso, date_time_field)
        #args = (tableName, date_time_field, begin_date_iso)
        self.log.info("queryString is: " + str(queryString))
        #print ("args to use: " + str(args))
        results = self.query_db(queryString)
        self.log.debug("results: " + str(results))

        return results
Example #11
0
class DataManager(Thread):
    """
    This is the DataManager class, it creates the database, data queue and
    the condition variable for synchronization between it, the framework and
    the plugins
    """
    def __init__(self):
        super().__init__()
        self.db = Database()
        self.db.create_default_database()
        self.q = DataQueue()
        self.condition = Condition()
        self.kill = False
        self.logger = Logger().get('database.datamanager.DataManager')

    def run(self):
        """
        This will insert all data in the queue and then once finished give up
        control of the condition variable
        """
        while not self.kill:
            self.condition.acquire()
            if self.q.check_empty():
                self.condition.wait()

            while not self.q.check_empty():
                value = self.q.get_next_item()
                Table_Insert.prepare_data_for_insertion(
                    self.q.dv.table_schema, value)
                self.condition.notify()
            self.condition.release()

    def insert_data(self, data):
        """
        Synchronously inserts data into the database.

        :param data: A dictionary with a table name as its key and a dictionary
                     of column names and corresponding values as its value.
        """
        self.condition.acquire()
        if self.q.insert_into_data_queue(data):
            self.condition.notify()
        self.condition.release()

    def shutdown(self):
        self.kill = True
        self.condition.acquire()
        self.condition.notify()
        self.condition.release()
        self.join()
        self.logger.debug('Data manager has shut down.')
Example #12
0
class IpsManager:

    # Ips Manager: calls necessary managers and utilities to generate parameters for sql.
    #
    validPortNumbers = ()

    def __init__(self):
        self.g_config = GlobalConfig()
        self.valid_port_numbers = self.g_config.get_ports()
        self.date_time_field = self.g_config.get_db_datetime_name()
        self.log = Logger().get('reportserver.manager.IpsManager.py')


    def get_data(self, ipaddress, uom, unit):
        self.log.info("Retrieving ipaddress data: " + str(ipaddress) + "  uom:  " + uom + " size: " + str(unit))

        port_data = []

        for port in self.valid_port_numbers:
            results = self.get_json_by_ip(port, ipaddress, uom, unit)
            items = utilities.process_data(results)
            port_data.append({port:items})

        port_json = {
            'ipaddress': str(ipaddress),
            'timespan': uom + "=" + str(unit),
            'ports':port_data
        }
        return port_json


    def get_json_by_ip(self, portnumber, ipaddress, uom, units):
        begin_date_iso = dateTimeUtility.get_begin_date_iso(uom, units)
        table_name = self.g_config.get_plugin_config(portnumber)['table']
        date_time_field = self.g_config.get_db_datetime_name()

        #  query = query_db("SELECT * FROM %s where (datetime > '%s')" % (tableName, query_date_iso))
        queryString = "SELECT * FROM %s where %s >= '%s' and peerAddress = '%s' order by id, %s" % (
            table_name, date_time_field, begin_date_iso, ipaddress, date_time_field)
        # args = (tableName, date_time_field, begin_date_iso)
        self.log.info("queryString is: " + str(queryString))
        # print ("args to use: " + str(args))
        results = DatabaseHandler().query_db(queryString)
        self.log.debug("results: " + str(results))

        return results
 def get_cel_value_by_row_col(self, row, col):
     """
     获取excel中单元格内容
     :param row:
     :param col:
     :return:
     """
     Logger.debug(
         "TestDataReaderCommonUtil.get_cel_value_by_row_col 获取excel中单元格内容 "
         "file name is : {0}, row:{1}, col:{2}".format(
             self.file_name, row, col), self.__class__.__name__)
     cell_type = type(self.read_excel_sheet().cell(row=row,
                                                   column=col).value)
     cell_value = self.read_excel_sheet().cell(row=row, column=col).value
     # 整数类型去除小数点,如dept_code,读取出来是3203.0格式的,去除小数点后为3203格式
     if cell_type == float and cell_value % 1 == 0:
         cell_value = int(cell_value)
     return cell_value
Example #14
0
def validate_time_period(query_tokens):
    log = Logger().get('reportserver.manager.utilities')
    log.debug("given query_tokens:" + str(query_tokens))

    uom = None
    units = None

    for token in query_tokens:
        if '=' in token:
            uom,units = token.split('=')
            if uom in UnitOfMeasure.get_values(UnitOfMeasure):
                units = int(units)
                break
            else:
                uom  = None
                units = None

    #print("\n#debug validate_time_period: " + str(uom) + ": " + str(units))
    return (uom, units)
def explode_multipolygons_to_polygons(
        log: Logger, polygon_gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
    row_accumulator = []

    def explode_multipolygons(row):
        if (row['geometry'].type == 'MultiPolygon'):
            for geom in row['geometry'].geoms:
                new_row = row.to_dict()
                new_row['geometry'] = geom
                row_accumulator.append(new_row)
        else:
            row_accumulator.append(row.to_dict())

    polygon_gdf.apply(explode_multipolygons, axis=1)
    gdf = gpd.GeoDataFrame(row_accumulator, crs=CRS.from_epsg(3879))
    if (len(polygon_gdf) != len(gdf)):
        log.debug(
            f'Exploaded {len(gdf)} polygons from {len(polygon_gdf)} multipolygons'
        )
    return gdf
Example #16
0
class ReadLocator:
    """
    所有定位器写在yaml文件中,实例化此类调用read()方法读取
    :param filename: yaml文件名
    :param page_number: 页数,yaml文件使用‘---’分割页数
    """
    def __init__(self, filename: str, page_number: int):
        self._filename = filename
        if not self._filename.endswith('.yaml'):
            self._filename = filename + '.yaml'
        self._page_number = page_number
        self._all_result = self._read_all_page()
        self._log = Logger('读取定位器').get_logger()

    def _read_all_page(self):
        """
        读取指定页全部数据
        :return: 指定页全部数据
        """
        path = os.path.join(constant.yaml_path, self._filename)
        with open(path, encoding='utf-8') as fp:
            reader = fp.read()
        all_result = yaml.load_all(reader, Loader=yaml.FullLoader)
        return list(all_result)[self._page_number - 1]

    def read(self, locator_name: str):
        """
        根据给定名称获取定位器
        :param locator_name: 定位器名称
        :return: 定位器值
        """
        locator = self._all_result.get(locator_name)
        if locator:
            self._log.debug(f'获取到定位器({locator_name}):{locator}')
            return locator
        else:
            self._log.error('定位器不存在({})!!'.format(locator_name))
class OperateConfig:
    """
    配置文件读写
    :param file_path: 配置文件路径
    """
    def __init__(self, file_path):
        self._file_name_path = file_path
        self._cg = configparser.ConfigParser()
        self._cg.read(self._file_name_path, encoding='utf-8')
        self._log = Logger('读取配置').get_logger()

    def get_str(self, section, option):
        """
        读取字符型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.get(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def get_int(self, section, option):
        """
        读取数值型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.getint(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def get_bool(self, section, option):
        """
        读取布尔型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.getboolean(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def write_data(self, section, option, value):
        """
        把传入的参数写入到文件
        :param section: 类名
        :param option: key
        :param value: value
        :return: None
        """
        self._cg.set(section, option, value)
        with open(self._file_name_path, 'w') as fp:
            self._cg.write(fp)
        self._log.info('成功写入参数:{} = {}'.format(option, value))
Example #18
0
class ReadApi:
    """
    读取接口信息
    :param filename: yaml文件名
    """
    def __init__(self, filename: str):
        self._filename = filename
        if not self._filename.endswith('.yaml'):
            self._filename = filename + '.yaml'
        self._log = Logger('读取API信息').get_logger()

    def read(self, api_name: str):
        """
        读取指定接口信息
        :param api_name: 接口名
        :return: 返回字典,包含此接口所有信息
        """
        path = os.path.join(constant.api_data_path, self._filename)
        with open(path) as fp:
            reader = fp.read()
        all_ = yaml.load(reader, Loader=yaml.FullLoader)
        single = all_.get(api_name)
        self._log.debug('获取到接口{}:{}'.format(api_name, single))
        return single
Example #19
0
class IPInfoAgent(Thread):
    def __init__(self, peer_address, framework, instance_name):
        super().__init__()
        self.log = Logger().get('recon.ipinfoagent.IPInfoAgent')
        self.peer_address = peer_address
        self.cache = IPInfoCache()
        self.framework = framework
        self.instance_name = instance_name

    def run(self):
        peer_info = self.cache[self.peer_address]
        if not peer_info:
            request_str = 'http://ipinfo.io/' + self.peer_address
            self.log.debug('making REST request to ' + request_str)
            response = requests.get(request_str, timeout=10)
            peer_info = response.json()

            peer_info['timestamp'] = datetime.now()

            if 'loc' in peer_info:
                lat, long = peer_info['loc'].split(',')
                peer_info['lat'] = float(lat)
                peer_info['long'] = float(long)
                del(peer_info['loc'])

            self.cache[self.peer_address] = peer_info
        else:
            self.log.debug('ipinfo.io data for ' +
                           self.peer_address +
                           ' is still cached')

        peer_info['plugin_instance'] = self.instance_name

        clean_dict = {}
        known_cols = ['ip', 'hostname', 'city', 'region', 'country', 'lat',
                      'long', 'org', 'postal', 'timestamp', 'plugin_instance']
        for col in known_cols:
            if col in peer_info:
                clean_dict[col] = peer_info[col]
        self.framework.insert_data({'ipInfo': clean_dict})

        self.log.debug('ipinfo.io data for ' +
                       self.peer_address +
                       ': ' +
                       str(peer_info))
Example #20
0
class P0fAgent(Thread):
    def __init__(self, peer_address, framework, session):
        super().__init__()
        self.config = GlobalConfig()
        self.framework = framework
        self.fs_sock = self.config['Framework']['p0f.fs_sock']
        self.log = Logger().get('recon.p0fagent.P0fAgent')
        self.peer_address = peer_address
        self.session = session

    def run(self):
        p0f = P0f(self.fs_sock)
        peer_info = None
        try:
            peer_info = p0f.get_info(self.peer_address)
        except P0fException as e:
            self.log.warn('p0f request failed for ' +
                          self.peer_address +
                          ': ' + str(e))
            return
        except KeyError as e:
            self.log.warn('p0f couldn\'t find any info for ' +
                          self.peer_address)
            return
        except ValueError as e:
            self.log.warn('p0f returned bad data for ' +
                          self.peer_address)
            return
        except FileNotFoundError as e:
            self.log.error('p0f filesystem socket not found')
            return
        except Exception as e:
            self.log.error('unknown p0f error occurred on address ' +
                           self.peer_address + ': ' + str(e))
            return

        # prettify C/null-terminated byte arrays in p0f info dict
        for key in peer_info.keys():
            if type(peer_info[key]) == bytes:
                decoded = peer_info[key].decode('utf-8')
                peer_info[key] = decoded.partition('\x00')[0]
            elif type(peer_info[key]) == datetime:
                peer_info[key] = peer_info[key].isoformat()
            elif type(peer_info[key]) == timedelta:
                peer_info[key] = str(int(peer_info[key].total_seconds()))
            elif type(peer_info[key]) == int:
                peer_info[key] = str(peer_info[key])

        data = { 'p0f': {
                 'session': self.session,
                 'first_seen': peer_info['first_seen'],
                 'last_seen': peer_info['last_seen'],
                 'uptime': peer_info['uptime'],
                 'last_nat': peer_info['last_nat'],
                 'last_chg': peer_info['last_chg'],
                 'distance': peer_info['distance'],
                 'bad_sw': peer_info['bad_sw'],
                 'os_name': peer_info['os_name'],
                 'os_flavor': peer_info['os_flavor'],
                 'os_match_q': peer_info['os_match_q'],
                 'http_name': peer_info['http_name'],
                 'http_flavor': peer_info['http_flavor'],
                 'total_conn': peer_info['total_conn'],
                 'link_type': peer_info['link_type'],
                 'language': peer_info['language']
             }
        }
        self.framework.insert_data(data)

        self.log.debug('p0f info for ' +
                       self.peer_address +
                       ': ' +
                       str(peer_info))
Example #21
0
def convert_otp_graph_to_igraph(
    node_csv_file: str,
    edge_csv_file: str,
    hma_poly_file: str,
    igraph_out_file: str,
    b_export_otp_data_to_gpkg: bool = False,
    b_export_decomposed_igraphs_to_gpkg: bool = False,
    b_export_final_graph_to_gpkg: bool = False,
    debug_otp_graph_gpkg: str = 'debug/otp_graph_features.gpkg',
    debug_igraph_gpkg: str = 'debug/otp2igraph_features.gpkg',
    log: Logger = Logger(printing=True)
) -> dict:

    hma_poly = geom_utils.project_geom(
        gpd.read_file(hma_poly_file)['geometry'][0])

    # 1) read nodes nodes from CSV
    n = pd.read_csv(node_csv_file, sep=';')
    log.info(f'read {len(n.index)} nodes')
    log.debug(f'node column types: {n.dtypes}')
    log.debug(f'nodes head: {n.head()}')
    log.info('creating node gdf')
    n[Node.geometry.name] = [
        shapely.wkt.loads(geom) if isinstance(geom, str) else Point()
        for geom in n[Node.geometry.name]
    ]
    n[Node.geom_wgs.name] = n[Node.geometry.name]
    n = gpd.GeoDataFrame(n,
                         geometry=Node.geometry.name,
                         crs=CRS.from_epsg(4326))
    log.info('reprojecting nodes to etrs')
    n = n.to_crs(epsg=3879)
    log.debug(f'nodes head: {n.head()}')

    # 2) read edges from CSV
    e = pd.read_csv(edge_csv_file, sep=';')
    log.info(f'read {len(e.index)} edges')
    log.debug(f'edge column types: {e.dtypes}')
    log.debug(f'edges head: {e.head()}')
    log.info('creating edge gdf')
    e[Edge.geometry.name] = [
        shapely.wkt.loads(geom) if isinstance(geom, str) else LineString()
        for geom in e[Edge.geometry.name]
    ]
    e[Edge.geom_wgs.name] = e[Edge.geometry.name]
    e = gpd.GeoDataFrame(e,
                         geometry=Edge.geometry.name,
                         crs=CRS.from_epsg(4326))
    log.info('reprojecting edges to etrs')
    e = e.to_crs(epsg=3879)
    log.debug(f'edges head: {e.head()}')

    # 3) export graph data to gpkg
    if (b_export_otp_data_to_gpkg == True):
        log.info('writing otp graph data to gpkg')
        e.drop(columns=[Edge.geom_wgs.name]).to_file(debug_otp_graph_gpkg,
                                                     layer='edges',
                                                     driver='GPKG')
        log.info(f'exported edges to {debug_otp_graph_gpkg} (layer=edges)')
        n.drop(columns=[Edge.geom_wgs.name]).to_file(debug_otp_graph_gpkg,
                                                     layer='nodes',
                                                     driver='GPKG')
        log.info(f'exported nodes to {debug_otp_graph_gpkg} (layer=nodes)')

    # 4) filter out edges that are unsuitable for both walking and cycling
    def filter_df_by_query(df: pd.DataFrame, query: str, name: str = 'rows'):
        count_before = len(df.index)
        df_filt = df.query(query).copy()
        filt_ratio = (count_before - len(df_filt.index)) / count_before
        log.info(
            f'filtered out {count_before-len(df_filt.index)} {name} ({round(filt_ratio * 100, 1)} %) by {query}'
        )
        return df_filt

    e_filt = filter_df_by_query(
        e,
        f'{Edge.allows_walking.name} == True or {Edge.allows_biking.name} == True',
        name='edges')
    e_filt = filter_df_by_query(e_filt,
                                f'{Edge.is_no_thru_traffic.name} == False',
                                name='edges')

    # 5) create a dictionaries for converting otp ids to ig ids and vice versa
    log.debug('create maps for converting otp ids to ig ids')
    n[Node.id_ig.name] = np.arange(len(n.index))
    ids_otp_ig = {}
    ids_ig_otp = {}
    for node in n.itertuples():
        ids_otp_ig[getattr(node,
                           Node.id_otp.name)] = getattr(node, Node.id_ig.name)
        ids_ig_otp[getattr(node,
                           Node.id_ig.name)] = getattr(node, Node.id_otp.name)

    # 6) add nodes to graph
    log.info('adding nodes to graph')
    G = ig.Graph(directed=True)
    G.add_vertices(len(n.index))
    for attr in Node:
        if (attr.name in n.columns):
            G.vs[attr.value] = list(n[attr.name])
        else:
            log.warning(f'node column {attr.name} not present in dataframe')

    # 7) add edges to graph
    log.info('adding edges to graph')

    # get edge lengths by projected geometry
    e_filt[Edge.length.name] = [
        round(geom.length, 4) if isinstance(geom, LineString) else 0.0
        for geom in e_filt[Edge.geometry.name]
    ]

    def get_ig_uv(edge):
        return (ids_otp_ig[edge['node_orig_id']],
                ids_otp_ig[edge['node_dest_id']])

    e_filt['uv_ig'] = e_filt.apply(lambda row: get_ig_uv(row), axis=1)
    e_filt[Edge.id_ig.name] = np.arange(len(e_filt.index))
    G.add_edges(list(e_filt['uv_ig']))
    for attr in Edge:
        if (attr.name in e_filt.columns):
            G.es[attr.value] = list(e_filt[attr.name])
        else:
            log.warning(f'edge column {attr.name} not present in dataframe')

    # 8) delete edges outside Helsinki Metropolitan Area (HMA)
    hma_buffered = hma_poly.buffer(100)

    def intersects_hma(geom: LineString):
        if (geom.is_empty == True): return True
        return True if geom.intersects(hma_buffered) else False

    e_gdf = ig_utils.get_edge_gdf(G)
    log.info('finding edges that intersect with HMA')
    e_gdf['in_hma'] = [
        intersects_hma(line) for line in e_gdf[Edge.geometry.name]
    ]
    e_gdf_del = e_gdf.query('in_hma == False').copy()
    out_ratio = round(100 * len(e_gdf_del.index) / len(e_gdf.index), 1)
    log.info(f'found {len(e_gdf_del.index)} ({out_ratio} %) edges outside HMA')

    log.info('deleting edges')
    before_count = G.ecount()
    G.delete_edges(e_gdf_del.index.tolist())
    after_count = G.ecount()
    log.info(f'deleted {before_count-after_count} edges')

    # check if id_ig:s need to be updated to edge attributes
    mismatch_count = len([
        edge.index for edge in G.es
        if edge.attributes()[Edge.id_ig.value] != edge.index
    ])
    log.info(f'invalid edge ids: {mismatch_count}')
    # reassign igraph indexes to edge and node attributes
    G.es[Edge.id_ig.value] = [e.index for e in G.es]
    G.vs[Node.id_ig.value] = [v.index for v in G.vs]
    # check if id_ig:s need to be updated to edge attributes
    mismatch_count = len([
        edge.index for edge in G.es
        if edge.attributes()[Edge.id_ig.value] != edge.index
    ])
    log.info(f'invalid edge ids: {mismatch_count} (after re-indexing)')

    # 9) find and inspect subgraphs by decomposing the graph
    sub_graphs = G.decompose(mode='STRONG')
    log.info(f'found {len(sub_graphs)} subgraphs')

    graph_sizes = [graph.ecount() for graph in sub_graphs]
    log.info(
        f'subgraphs with more than 10 edges: {len([s for s in graph_sizes if s > 10])}'
    )
    log.info(
        f'subgraphs with more than 50 edges: {len([s for s in graph_sizes if s > 50])}'
    )
    log.info(
        f'subgraphs with more than 100 edges: {len([s for s in graph_sizes if s > 100])}'
    )
    log.info(
        f'subgraphs with more than 500 edges: {len([s for s in graph_sizes if s > 500])}'
    )
    log.info(
        f'subgraphs with more than 10000 edges: {len([s for s in graph_sizes if s > 10000])}'
    )

    small_graphs = [graph for graph in sub_graphs if graph.ecount() <= 15]
    medium_graphs = [
        graph for graph in sub_graphs
        if (graph.ecount() > 15 and graph.ecount() <= 500)
    ]
    big_graphs = [graph for graph in sub_graphs if graph.ecount() > 500]

    small_graph_edges = []
    for graph_id, graph in enumerate(small_graphs):
        edges = ig_utils.get_edge_dicts(
            graph, attrs=[Edge.id_otp, Edge.id_ig, Edge.geometry])
        for edge in edges:
            edge['graph_id'] = graph_id
        small_graph_edges.extend(edges)

    medium_graph_edges = []
    for graph_id, graph in enumerate(medium_graphs):
        edges = ig_utils.get_edge_dicts(
            graph, attrs=[Edge.id_otp, Edge.id_ig, Edge.geometry])
        for edge in edges:
            edge['graph_id'] = graph_id
        medium_graph_edges.extend(edges)

    big_graph_edges = []
    for graph_id, graph in enumerate(big_graphs):
        edges = ig_utils.get_edge_dicts(
            graph, attrs=[Edge.id_otp, Edge.id_ig, Edge.geometry])
        for edge in edges:
            edge['graph_id'] = graph_id
        big_graph_edges.extend(edges)

    if (b_export_decomposed_igraphs_to_gpkg == True):
        log.info('exporting subgraphs to gpkg')
        # graphs with <= 15 edges
        small_graph_edges_gdf = gpd.GeoDataFrame(small_graph_edges,
                                                 crs=CRS.from_epsg(3879))
        small_graph_edges_gdf.to_file(debug_igraph_gpkg,
                                      layer='small_graph_edges',
                                      driver='GPKG')
        # graphs with  15–500 edges
        medium_graph_edges_gdf = gpd.GeoDataFrame(medium_graph_edges,
                                                  crs=CRS.from_epsg(3879))
        medium_graph_edges_gdf.to_file(debug_igraph_gpkg,
                                       layer='medium_graph_edges',
                                       driver='GPKG')
        # graphs with > 500 edges
        big_graph_edges_gdf = gpd.GeoDataFrame(big_graph_edges,
                                               crs=CRS.from_epsg(3879))
        big_graph_edges_gdf.to_file(debug_igraph_gpkg,
                                    layer='big_graph_edges',
                                    driver='GPKG')
        log.info(f'graphs exported')

    # 10) delete smallest subgraphs from the graph
    del_edge_ids = [edge[Edge.id_ig.name] for edge in small_graph_edges]
    log.info(f'deleting {len(del_edge_ids)} isolated edges')
    before_count = G.ecount()
    G.delete_edges(del_edge_ids)
    after_count = G.ecount()
    del_ratio = round(100 * (before_count - after_count) / before_count, 1)
    log.info(f'deleted {before_count-after_count} ({del_ratio} %) edges')

    # 11) delete isolated nodes from the graph
    del_node_ids = [v.index for v in G.vs.select(_degree_eq=0)]
    log.info(f'deleting {len(del_node_ids)} isolated nodes')
    before_count = G.vcount()
    G.delete_vertices(del_node_ids)
    after_count = G.vcount()
    del_ratio = round(100 * (before_count - after_count) / before_count, 1)
    log.info(f'deleted {before_count-after_count} ({del_ratio} %) nodes')

    # check if id_ig:s need to be updated to edge attributes
    mismatch_count = len([
        edge.index for edge in G.es
        if edge.attributes()[Edge.id_ig.value] != edge.index
    ])
    log.info(f'invalid edge ids: {mismatch_count}')
    # reassign igraph indexes to edge and node attributes
    G.es[Edge.id_ig.value] = [e.index for e in G.es]
    G.vs[Node.id_ig.value] = [v.index for v in G.vs]
    # check if id_ig:s need to be updated to edge attributes
    mismatch_count = len([
        edge.index for edge in G.es
        if edge.attributes()[Edge.id_ig.value] != edge.index
    ])
    log.info(f'invalid edge ids: {mismatch_count} (after re-indexing)')

    # 12) export graph data to GeoDataFrames fro debugging

    if (b_export_final_graph_to_gpkg == True):
        log.info(f'exporting final graph to {debug_igraph_gpkg} for debugging')
        e_gdf = ig_utils.get_edge_gdf(G,
                                      attrs=[Edge.id_otp, Edge.id_ig],
                                      ig_attrs=['source', 'target'])
        n_gdf = ig_utils.get_node_gdf(G, ig_attrs=['index'])
        e_gdf.to_file(debug_igraph_gpkg,
                      layer='final_graph_edges',
                      driver='GPKG')
        n_gdf.to_file(debug_igraph_gpkg,
                      layer='final_graph_nodes',
                      driver='GPKG')

    if (igraph_out_file != None and igraph_out_file != ''):
        ig_utils.export_to_graphml(G, igraph_out_file)

    return G
Example #22
0
class DeveloperProducer:
    def __init__(self):
        self.logger = Logger(LOG_FILE, LOG_NAME, 10 * 1024 * 1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_producer(
            EXCHANGE_NAME, self.logger)
        if not rabbit_topic:
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect database error')
            return

        while 1:
            try:
                if self._is_no_more_records():
                    self.logger.info('There are no more records, wait...')
                    time.sleep(PRODUCE_WAIT_TIME)

                developer_list = self._fetch_developer_list()
                for developer in developer_list:
                    if RabbitTopic.is_queue_full(QUEUE_NAME, QUEUE_LIMIT,
                                                 self.logger):
                        self.logger.info('Queue %s is full, wait...' %
                                         QUEUE_NAME)
                        time.sleep(PRODUCE_WAIT_TIME)
                        continue
                    try:
                        rabbit_topic.publish(ROUTING_KEY, developer)
                        self.logger.info(
                            'Publish developer %s and update status' %
                            developer)
                        self._update_status(PUBLISHED, developer)
                    except ConnectionClosed:
                        self.logger.debug(
                            'Connection to rabbitmq server closed, re-connecting...'
                        )
                        rabbit_topic = RabbitTopic.init_rabbitmq_producer(
                            EXCHANGE_NAME, self.logger)
            except Exception:
                self.logger.exception('Publish developer error')

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _fetch_developer_list(self):
        developer_list = []

        self.logger.info('Get un-published developer list...')
        query = 'SELECT name FROM developer WHERE status=%s LIMIT %s'
        try:
            results = MySQLDBUtil.fetch_multiple_rows(
                query, (UN_PUBLISHED, QUEUE_LIMIT), self._db_conn)
            for result in results:
                (developer, ) = result
                developer_list.append(developer)
        except Exception:
            self.logger.exception('Query un-published developer error')
        return developer_list

    def _is_no_more_records(self):
        query = 'SELECT COUNT(*) FROM developer WHERE status=%s'
        try:
            result = MySQLDBUtil.fetch_single_row(query, (UN_PUBLISHED, ),
                                                  self._db_conn)
            if result:
                (count, ) = result
                if count == 0:
                    return True
        except Exception:
            self.logger.exception('Check if there is no more developers error')
        return False

    def _update_status(self, status, developer=None):
        if not developer:
            query = 'UPDATE developer SET status=%s'
        else:
            query = 'UPDATE developer SET status=%s WHERE name=%s'
        try:
            if not developer:
                MySQLDBUtil.update(query, (status, ), self._db_conn)
            else:
                MySQLDBUtil.update(query, (status, developer), self._db_conn)
        except Exception:
            self.logger.exception('Update record status')
Example #23
0
class PortsServiceHandler():
    def __init__(self):
        self.log = Logger().get('reportserver.server.PortServiceHandler.PortServiceHandler')

    def process(self, rqst, path_tokens, query_tokens):
        uom = None
        units = None
        self.log.info("processing ports request:" + str(path_tokens) + str(query_tokens))

        if len(query_tokens) > 0:
            try:
                time_period = utilities.validate_time_period(query_tokens)
                uom = time_period[0]
                units = time_period[1]
            except ValueError:
                rqst.badRequest(units)
                return

        # default if we aren't given valid uom and units
        if uom is None or units is None:
            uom = "days"
            units = 1

        if len(path_tokens) == 5:
            portNbr = utilities.validate_port_number(path_tokens[4])
            self.log.debug("requested: " + str(portNbr))
            if portNbr is not None and 0 < portNbr < 9000:
                self.get_port_data_by_time(rqst, portNbr, uom, units)
            elif portNbr == None or "":
                self.get_port_list_json(rqst, uom, units)
            else:
                rqst.badRequest()
                return
        elif len(path_tokens) == 4:
            self.get_port_list_json(rqst, uom, units)
        else:
            rqst.badRequest()
            return


    def get_port_list_json(self,rqst, uom, units):
        jsondata = self.construct_port_summary_list(rqst, uom, units)
        rqst.sendJsonResponse(jsondata, 200)

    def get_port_data_by_time(self, rqst, portnumber, uom, units):

        portmgr = PortManager()
        portjsondata = portmgr.getPort(portnumber, uom, units)
        if portjsondata is not None:
            # send response:
            rqst.sendJsonResponse(portjsondata, 200)
        else:
            rqst.notFound()

    def construct_port_summary_list(self, rqst, uom, units):
        g_config = GlobalConfig()
        plugins_dictionary = g_config.get_plugin_dictionary()

        json_list = []
        for key, val in plugins_dictionary.items():
            json_list.append(self.construct_port_summary(rqst, val['port'], val['table'], uom, units))

        return json_list

    def construct_port_summary(self, rqst, portnumber, tablename, uom, units):
        portmgr = PortManager()
        port_attacks = portmgr.get_port_attack_count(tablename, uom, units)
        unique_ips = portmgr.get_unique_ips(tablename, uom, units)
        timespan = uom+"="+str(units)

        response_json = {
            'port': str(portnumber),
            'total_attacks': str(port_attacks),
            'unique_ipaddresses': str(unique_ips),
            'timespan':timespan,
            'rel_link': rqst.get_full_url_path() + "/ports/" + str(portnumber)+"?" + timespan
        }

        return response_json
class BackFillToExcel:
    """
    测试结果回写
    """
    def __init__(self):
        # 引用日志类
        self._log = Logger('API测试结果回写').get_logger()

        # 获取excel绝对路径
        self._path = constant.api_result_excel_path

        # 初始化判断,若excel文件不存在,就创建
        if not os.path.exists(self._path):
            BackupOrNewFile().create_result_file()

        # 加载excel,并转到指定的sheet页,返回WorkSheet对象
        self._wb = openpyxl.load_workbook(self._path)
        self._ws = self._wb[self._wb.sheetnames[0]]

        self._log.debug('成功加载测试结果文件:{}'.format(os.path.basename(self._path)))
        self._log.debug('成功定位到回写数据表:{}'.format(self._wb.sheetnames[0]))

    def save_excel(self):
        """
        保存当前用例所有回填结果
        :return: None
        """
        try:
            # self._merge_cells_before_save()
            self._wb.save(self._path)
        except PermissionError as e:
            self._log.error('保存失败!!EXCEL文件被打开,请关闭后重新执行测试:{}'.format(e))
        else:
            self._log.debug('成功保存当前用例测试结果。')

    def fill_api_name(self, api_name):
        """
        回写接口名称
        :param api_name: 接口名
        :return: None
        """
        self._ws.cell(self._ws.max_row, API_NAME_COL).value = api_name
        self._log.debug('成功回写当前接口名称:{}'.format(api_name))

    def fill_api_url(self, api_url):
        """
        回写接口URL
        :param api_url: URL
        :return: None
        """
        self._ws.cell(self._ws.max_row, API_URL_COL).value = api_url
        self._log.debug('成功回写当前接口URL:{}'.format(api_url))

    def fill_case_number(self, case_number):
        """
        回写用例编号
        注:此函数在基类的setUp方法内第一个调用,先确定回写行数,其他回写函数就可以最大行数为准写入
        :param case_number: 用例编号
        :return: None
        """
        # 第一个调用,所以行数是 max_row + 1
        self._ws.cell(self._ws.max_row + 1,
                      CASE_NUMBER_COL).value = case_number
        self._log.debug('成功回写当前用例编号为{},并确定当前用例回写行是第{}行'.format(
            case_number, self._ws.max_row))

    def fill_case_name(self, case_name):
        """
        回写用例名称
        :param case_name: 用例名
        :return: None
        """
        self._ws.cell(self._ws.max_row, CASE_NAME_COL).value = case_name
        self._log.debug('成功回写当前用例名称:{}'.format(case_name))

    def fill_judgement_result(self, result=1):
        """
        回写判定结果
        :param result: 0 失败,1 成功
        :return: None
        """
        if result:
            self._ws.cell(self._ws.max_row,
                          JUDGEMENT_RESULT_COL).value = 'SUCCESS'
            self._log.debug('默认回写当前用例执行判定结果为:SUCCESS')
        else:
            self._ws.cell(self._ws.max_row,
                          JUDGEMENT_RESULT_COL).value = 'FAILURE'
            self._log.debug('当前用例断言失败或执行失败,改写判定结果为:FAILURE')
            self._set_color_if_failure(self._ws.max_row)

    def fill_excepted(self, excepted_result):
        """
        回写预期结果
        :param excepted_result: 预期结果
        :return: None
        """
        self._ws.cell(self._ws.max_row,
                      EXPECTED_RESULT_COL).value = str(excepted_result)
        self._log.debug('成功回写当前用例预期结果:{}'.format(excepted_result))

    def fill_compare_result(self, compare_result):
        """
        回写比对结果
        :param compare_result: 比对结果列表
        :return: None
        """
        if not compare_result:
            self._ws.cell(self._ws.max_row,
                          COMPARE_RESULT_COL).value = '比对结果一致'
        else:
            self._ws.cell(self._ws.max_row,
                          COMPARE_RESULT_COL).value = str(compare_result)
            self._log.debug('成功回写当前用例比对结果:{}'.format(compare_result))

    def fill_response(self, response):
        """
        回写返回JSON
        :param response: JSON
        :return: None
        """
        self._ws.cell(self._ws.max_row, RESPONSE_COL).value = str(response)
        self._log.debug('成功回写当前用例返回结果:{}'.format(response))

    def fill_test_data(self, curr_case_data):
        """
        回写本条用例测试数据
        :param curr_case_data: 测试数据
        :return: None
        """
        self._ws.cell(self._ws.max_row,
                      CASE_DATA_COL).value = str(curr_case_data)
        self._log.debug('成功回写当前用例测试数据:{}'.format(curr_case_data))

    def _set_color_if_failure(self, row):
        """
        把执行失败的用例背景色填充为红色
        :param row: 行数
        :return: None
        """
        fill = PatternFill(fill_type='solid', fgColor="FF0000")
        for col in range(API_NAME_COL, CASE_DATA_COL + 1):
            self._ws.cell(row, col).fill = fill
        self._log.debug('当前用例执行失败标记为红色!')

    def _merge_cells_before_save(self):
        """
        在保存之前调用,用于合并单元格(用例名和URL)
        :return: None
        """
        # 确定最小合并行
        min_row_ = self._ws.max_row - self._ws.cell(self._ws.max_row,
                                                    CASE_NUMBER_COL).value + 1
        # 合并单元格
        self._ws.merge_cells(start_row=min_row_,
                             start_column=API_NAME_COL,
                             end_row=self._ws.max_row,
                             end_column=API_NAME_COL)
        self._ws.merge_cells(start_row=min_row_,
                             start_column=API_URL_COL,
                             end_row=self._ws.max_row,
                             end_column=API_URL_COL)
        # 设置垂直居中
        align = Alignment(vertical='center')
        self._ws.cell(min_row_, API_NAME_COL).alignment = align
        self._ws.cell(min_row_, API_URL_COL).alignment = align
Example #25
0
    class _Framework:
        def __init__(self, plugin_cfg_path, global_cfg_path):
            self._global_config = GlobalConfig(plugin_cfg_path, global_cfg_path)
            self._plugin_imports = {}
            self._listener_list= {}
            self._running_plugins_list = []
            self._data_manager = None
            self._shutting_down = False
            self._log = None
            self._pid = os.getpid()

        def start(self):
            self.set_shutdown_hook()
            print('Press Ctrl+C to exit.')
            if not self.drop_permissions():
                return

            self._global_config.read_global_config()

            self.start_logging()

            self._global_config.read_plugin_config()
            self._data_manager = DataManager()
            self._data_manager.start()

            self.start_listeners()

        def start_logging(self):
            log_path = self._global_config['Framework']['logName']
            log_level = self._global_config['Framework']['logLevel']
            self._log = Logger(log_path, log_level).get('framework.frmwork.Framework')
            self._log.info('RECCE7 started (PID %d)' % self._pid)

        @staticmethod
        def drop_permissions():
            if os.getuid() != 0:
                return True

            dist_name = os.getenv('RECCE7_OS_DIST')
            users_dict = {
                'centos': ('nobody', 'nobody'),
                'debian': ('nobody', 'nogroup')
            }
            if dist_name not in users_dict:
                print(
                    'Unable to lower permission level - not continuing as\n'
                    'superuser. Please set the environment variable\n'
                    'RECCE7_OS_DIST to one of:\n\tcentos\n\tdebian\n'
                    'or rerun as a non-superuser.')
                return False
            lowperm_user = users_dict[dist_name]
            nobody_uid = pwd.getpwnam(lowperm_user[0]).pw_uid
            nogroup_gid = grp.getgrnam(lowperm_user[1]).gr_gid

            os.setgroups([])
            os.setgid(nogroup_gid)
            os.setuid(nobody_uid)
            os.umask(0o077)

            return True

        def create_import_entry(self, port, name, clsname):
            imp = import_module('plugins.' + name)
            self._plugin_imports[port] = getattr(imp, clsname)

        def start_listeners(self):
            ports = self._global_config.get_ports()
            for port in ports:
                plugin_config = self._global_config.get_plugin_config(port)
                module = plugin_config['module']
                clsname = plugin_config['moduleClass']
                self.create_import_entry(port, module, clsname)

                address = self._global_config['Framework']['listeningAddress']
                listener = NetworkListener(address, plugin_config, self)
                listener.start()
                self._listener_list[port] = listener

        def set_shutdown_hook(self):
            signal.signal(signal.SIGINT, self.shutdown)

        def shutdown(self, *args):
            self._shutting_down = True

            self._log.debug('Shutting down network listeners')
            for listener in self._listener_list.values():
                listener.shutdown()

            self._log.debug('Shutting down plugins')
            for plugin in self._running_plugins_list:
                plugin.shutdown()

            self._log.debug('Shutting down data manager')
            self._data_manager.shutdown()

            print('Goodbye!')

        #
        # Framework API
        #

        def get_config(self, port):
            """
            Returns the configuration dictionary for the plugin
            running on the specified port.

            :param port: a port number associated with a loaded plugin
            :return: a plugin configuration dictionary
            """
            return self._global_config.get_plugin_config(port)

        def spawn(self, socket, config):
            """
            Spawns the plugin configured by 'config' with the provided
            (accepted) socket.

            :param socket: an open, accepted socket returned by
                           socket.accept()
            :param config: the plugin configuration dictionary describing
                           the plugin to spawn
            :return: a reference to the plugin that was spawned
            """
            # ToDo Throw exception if plugin class not found
            plugin_class = self._plugin_imports[config['port']]
            plugin = plugin_class(socket, config, self)
            plugin.start()
            self._running_plugins_list.append(plugin)
            return plugin

        def insert_data(self, data):
            """
            Inserts the provided data into the data queue so that it can
            be pushed to the database.

            :param data: data object to add to the database
            """
            self._data_manager.insert_data(data)

        def plugin_stopped(self, plugin):
            """
            Tells the framework that the specified plugin has stopped
            running and doesn't need to be shutdown explicitly on program
            exit.

            :param plugin: a reference to a plugin
            """
            if self._shutting_down:
                return

            self._running_plugins_list.remove(plugin)
Example #26
0
class CategoryProducer:
    def __init__(self):
        self.logger = Logger(LOG_FILE, LOG_NAME, 10*1024*1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_producer(EXCHANGE_NAME, self.logger)
        if not rabbit_topic:
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect database error')
            return

        while 1:
            try:
                if self._is_no_more_records():
                    self.logger.info('There are no more records, wait...')
                    time.sleep(PRODUCE_WAIT_TIME)
                    # Still no more available records, then reset...
                    if self._is_no_more_records():
                        self.logger.info('Still there are no more records, reset all records...')
                        self._reset_category()

                category_list = self._fetch_category_list()
                for category in category_list:
                    if RabbitTopic.is_queue_full(QUEUE_NAME, QUEUE_LIMIT, self.logger):
                        self.logger.info('Queue %s is full, wait...' % QUEUE_NAME)
                        time.sleep(PRODUCE_WAIT_TIME)
                        continue
                    try:
                        rabbit_topic.publish(ROUTING_KEY, category)
                        self.logger.info('Publish category %s and update the status' % category)
                        self._update_status(PUBLISHED, category)
                    except ConnectionClosed:
                        self.logger.debug('Connection to rabbitmq server closed, re-connecting...')
                        rabbit_topic = RabbitTopic.init_rabbitmq_producer(EXCHANGE_NAME, self.logger)
            except Exception:
                self.logger.exception('Publish category error')

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _fetch_category_list(self):
        category_list = []

        self.logger.info('Get un-published category list...')
        query = 'SELECT category FROM category WHERE status=%s LIMIT %s'
        try:
            results = MySQLDBUtil.fetch_multiple_rows(query, (UN_PUBLISHED, QUEUE_LIMIT), self._db_conn)
            for result in results:
                (category,) = result
                category_list.append(category)
        except Exception:
            self.logger.exception('Query un-published category error')
        return category_list

    def _is_no_more_records(self):
        query = 'SELECT COUNT(*) FROM category WHERE status=%s'
        try:
            result = MySQLDBUtil.fetch_single_row(query, (UN_PUBLISHED,), self._db_conn)
            if result:
                (count,) = result
                if count == 0:
                    return True
        except Exception:
            self.logger.exception('Check available records error')
        return False

    def _reset_category(self):
        try:
            self.logger.info('All category have been published, reset all to un-published status')
            self._update_status(UN_PUBLISHED)
        except Exception:
            self.logger.exception('Reset category table error')
            return

    def _update_status(self, status, category=None):
        if not category:
            query = 'UPDATE category SET status=%s'
        else:
            query = 'UPDATE category SET status=%s WHERE category=%s'
        try:
            if not category:
                MySQLDBUtil.update(query, (status,), self._db_conn)
            else:
                MySQLDBUtil.update(query, (status, category), self._db_conn)
        except Exception:
            self.logger.exception('Update record status')
 def get_row_count(self):
     """获取excel的行数"""
     Logger.debug(
         "TestDataReaderCommonUtil.get_row_count 获取excel的行数 file name is : "
         + self.file_name, self.__class__.__name__)
     return self.read_excel_sheet().max_row
Example #28
0
class Replier(object):
    """消息回复"""

    level_map = {
        1: "隔壁村的幼稚鬼",
        2: "村子里的弟中弟",
        3: "村民中的初学者",
        4: "村子里的高中生",
        5: "合格的成年村民",
    }

    q_a_list = {
        "生日": ["杨超越的生日是什么时候 a)7月31 b)7月29 c)7月28", "a"],
        "演艺": ["杨超越获得了哪年的演艺人物奖?a)2017 b)2018 c)2019", "b"],
        "主食": ["杨超越最新欢的主食是?a)土豆 b)米饭 c)燕麦片", "b"]
    }

    jokes = [
        '''美国外交代表团到苏联访问,苏修接待官员陪他们参观“建设的伟大成就”,并且得意的说:“到了下一个五年计划,每个苏联家庭都可以拥有一架私人飞机!”\n美国人惊讶的问:“ 他们要飞机干什么呢?”\n苏修官员说:“当然有用啊……譬如你在莫斯科听说列宁格勒开始供应面包了,你可以马上开飞机赶去排队啊。''',
        '''斯大林、赫鲁晓夫和勃列日涅夫乘坐火车出门。开着开着,火车突然停了。\n斯大林把头伸出车窗外,怒吼道:“枪毙火车司机!”可是车还是没有动。\n接着赫鲁晓夫说:“给火车司机恢复名誉!”车仍然没有动。\n勃列日涅夫说:“同志们,不如拉上窗帘,坐在座位上自己摇动身体,做出列车还在前进的样子……”''',
        '''美术馆里有一幅描写亚当和夏娃的画。\n一个英国人看了,说:“他们一定是英国人,男士有好吃的东西就和女士分享。\n一个法国人看了,说:“他们一定是法国人,情侣裸体散步。\n一个苏联人看了,说:“他们一定是苏联人,他们没有衣服,吃得很少,却还以为自己在天堂!''',
        '''杨村制度的优越性在哪里?\n成功克服了其他制度里不存在的问题。''',
        '''黑子:哈哈哈,杨超越这么重要的场合居然口误?\n路人:什么场合?\n黑子:不知道?\n路人:那你看了这么久的视频在看什么?\n黑子:在看杨超越!'''
    ]

    def __init__(self, bot):
        # self.group = group
        self.api_key = api_key
        self.log = Logger()
        self.ycy = YcyReplier()
        self.tuling = Tuling(api_key=TULING_KEY)
        self.user = User()
        self.group = Group()
        self.rsp_game_player_map = {}
        self.bot = bot
        self.draw_lots_game = DrawLots()
        self.real_estate = RealEstate()
        self.user_lots_map = {}
        self.user_lots_read_map = {}
        self.answer = ""
        self.red_bag_num = 0
        '''
        开启每日定时器,每日零时清空抽签内容
        '''
        self.scheduler = BackgroundScheduler()
        self.scheduler.add_job(self.init_lots_map, 'cron', hour='0')
        self.scheduler.start()

    def init_lots_map(self):
        """
        清空抽签内容,可以重新开始抽签
        """
        self.user_lots_map = {}
        self.user_lots_read_map = {}
        self.log.info("=== Init Lots ===")

    def random_img(self, msg) -> tuple:
        """
        随机获取图片
        :return:
        """
        print("===天降超越===")
        print(msg.text)

        if "天降超越" in msg.text or "天将超越" in msg.text:  # todo 待增加
            list_dir = os.listdir(os.path.join('resources', 'pics'))
            path = choice(list_dir)
            self.log.info('choose:-->{}'.format(path))
            self.log.debug(os.path.join('resources', 'pics', path))
            return 'img', os.path.join('resources', 'pics', path), ''
        return empty_result

    def robot_init(self, msg) -> tuple:
        """
        机器人初始化
        :param msg:
        :return:
        """
        real_msg = msg.text.split()
        if msg.member.puid == self.group.admin_puid and len(
                real_msg) != 1:  # 如果是管理员
            if real_msg[len(real_msg) - 1] == "初始化":
                self.log.info(msg.sender)
                # self.group.update_group(msg.sender, self.api_key)
                self.user.update_users(msg.sender, self.api_key)
                self.log.info("初始化完成!")
                return 'text', "初始化完成!", ''
            elif real_msg[1] == "口令红包":
                self.log.info("设置口令红包!")
                print("===口令红包信息===")
                print(real_msg[2])
                print(real_msg[3])
                try:
                    self.red_bag_num = int(real_msg[2])
                except:
                    self.red_bag_num = 0
                if real_msg[3] in self.q_a_list:
                    item = self.q_a_list[real_msg[3]]
                    self.answer = item[1]
                    return 'text', item[0], ''
                else:
                    self.answer = real_msg[3]
                    return 'text', "口令红包设置完成!", ''
            else:
                return empty_result
        return empty_result

    def update_user_info(self, msg):
        self.log.info("更新用户信息中……")
        self.user.update_users(msg.sender, self.api_key)
        self.log.info("用户信息更新完毕……")

    def chaoyue_ana(self, msg) -> tuple:
        """
        超越语录
        :return:
        """
        real_msg = msg.text.split()
        respond_msg = self.ycy.reply_text(real_msg[len(real_msg) -
                                                   1])  # 超越语录无需要@
        if respond_msg:
            return 'text', '@' + msg.member.name + ' ' + respond_msg, ''
        return empty_result

    def set_group(self, puid):
        self.group.set_group(puid)

    def handle_leave_message(self, msg) -> tuple:
        """
        处理留言
        :param msg:
        :return:
        """
        if "村头留言板" in msg.text:
            return "text", "@我并回复:「留言:你想说的话」,则可在村头留言板上留下你的留言内容哦", ""
        is_leave_message = re.search(r'(留言:|留言:)(.*)', msg.text)
        if is_leave_message:
            content = is_leave_message.group(2).strip()  # 获取第二组内容并去除前后空格
            self.log.info('留言内容:{}'.format(content))
            status = create_messages(
                name=msg.member.name,
                content=content,
                fans_id=msg.member.puid,
            )
            if status == "ok":
                return 'text', '@' + msg.member.name + ' ' + "留言成功!点击 {} 可查看你的留言".format(
                    'http://ahasmarter.com/', ), ''
            else:
                return 'text', '@' + msg.member.name + ' ' + "留言失败!稍后再尝试吧", ''
        return empty_result

    def get_group_introduction(self, msg) -> tuple:
        """
        获取群介绍
        :param msg:
        :return:
        """
        real_msg = msg.text.split()
        if real_msg[len(real_msg) - 1] == "群信息" or real_msg[len(real_msg) -
                                                            1] == "群简介":
            return 'text', self.group.intro, ''
        return empty_result

    def finger_guessing_game(self, msg) -> tuple:
        """
        猜拳游戏
        :param msg:
        :return:
        """
        group_id = msg.member.group.puid  # 群组唯一id
        name = msg.member.name  # 玩家名
        user_id = msg.member.puid  # 玩家id
        real_msg = msg.text.split()
        if real_msg[len(real_msg) - 1] == "石头剪刀布" or real_msg[len(real_msg) - 1] == "剪刀石头布" \
                or real_msg[len(real_msg) - 1] == "猜拳":
            self.log.debug('---init猜拳----')
            # { 群组id : {玩家id: [游戏对象 , 开始游戏的时间, 玩家名]}}
            self.rsp_game_player_map.update(
                {
                    group_id: [user_id,
                               RspGame(1),
                               now_to_datetime4(), name],
                }, )
            self.rsp_game_player_map[group_id][1].start(name)  # 开始游戏
            return 'text', '@' + msg.member.name + \
                   " 石头剪刀布开始,你先出吧,赢了我有奖励哦(1局定胜)", ''
        return empty_result

    def play_game(self, msg) -> tuple:
        """
        游戏
        :param msg:
        :return:
        """
        if "超越猜拳" in msg.text:
            return "text", "@我并回复你的出招(比如「剪刀」)就能跟我玩猜拳游戏,赢了我会奖励3积分,输了扣除3积分,如果积分不够则不会进行奖惩", ""
        real_msg = msg.text.split()
        if "石头" in real_msg[len(real_msg) - 1]  or  "剪刀" in real_msg[len(real_msg) - 1]  \
                or "布" in real_msg[len(real_msg) - 1]:
            game = RspGame(1)
            game.start(msg.member.name)
            cancel, result, pic = game.play(msg)
            return 'both', pic, result
        else:
            return empty_result
        """
        group_id = msg.member.group.puid
        user_id = msg.member.puid
        player_map = self.rsp_game_player_map
        self.log.info(player_map)
        # 如果字典中包含群组id并且 玩家id在字典中
        if player_map.get(group_id):
            is_overtime = now_to_datetime4() > two_minutes_later(player_map[group_id][2])
            self.log.info('游戏是否超时:%s' % is_overtime)
            if is_overtime:
                msg = '@' + str(player_map[group_id][3]) + ' 游戏已经超时自动终止了呀!'
                msg.chat.send_msg(msg)
                player_map.pop(group_id)  # 超时删除群组id对应的字典
        if player_map.get(group_id):  # 超时可能会pop掉该key,需要重新判断
            if user_id not in player_map.get(group_id, []):  # 不是玩家的消息,不进行回应
                return 'text', '@' + msg.member.name + " 先等等哦,我正在跟@" + \
                       player_map[group_id][3] + " 玩石头剪刀布", ''
            else:
                cancel, result, pic = player_map[group_id][1].play(msg)  # 玩游戏
                self.log.debug('game result:{} pic:{}'.format(result, pic))
                if cancel == 1:
                    player_map.pop(group_id)  # 如果游戏结束, 删除群组id对应的字典
                return 'both', pic, result
        typ, content1, content2 = self.finger_guessing_game(msg)  # 猜拳游戏
        if typ == 'text':
            return typ, content1, content2
        return empty_result"""

    def red_bag(self, msg) -> tuple:
        if "口令红包" in msg.text:
            return "text", "管理员会在某些时间在群里发出超越百科抢答红包,回答正确会得到超越积分,多多留意~", ""
        real_msg = msg.text.split()
        if self.red_bag_num == 0:  #如果红包剩余数量为 0
            self.answer = ""  # answer清零
        else:
            print(self.answer)
            if self.answer == real_msg[1] and msg.is_at:
                user_puid = msg.member.puid
                bot_id = self.bot.self.puid
                result = self.user.transfer(bot_id, user_puid, self.group.puid,
                                            3, self.api_key)
                self.red_bag_num -= 1
                if result["status"] == "success":
                    return 'text', " 口令正确!奖励给" + msg.member.name + " 3 个超越积分!", ''
                else:
                    return 'text', '红包领完啦!', ''
        return empty_result

    def draw_lots(self, msg) -> tuple:
        if "超越抽签" in msg.text:
            return "text", "每日0点过后,@我并回复「抽签」,可以抽出你当日的运势签,@我并回复「解签」会解释抽签内容~", ""
        real_msg = msg.text.split()
        user_id = msg.member.puid
        if real_msg[len(real_msg) - 1] == "抽签":
            if user_id in self.user_lots_map:
                return 'text', '@' + msg.member.name + ' 今日你的运势签: ' + self.user_lots_map[
                    user_id], ''
            else:
                msg1, msg2 = self.draw_lots_game.play()
                self.user_lots_map[user_id] = msg1
                self.user_lots_read_map[user_id] = msg2
                return 'text', '@' + msg.member.name + ' 今日你的运势签: ' + msg1, ''
        elif real_msg[len(real_msg) - 1] == "解签":
            if user_id in self.user_lots_read_map:
                return 'text', '@' + msg.member.name + ' 解签: ' + self.user_lots_read_map[
                    user_id], ''
            else:
                return 'text', '@' + msg.member.name + ' 今日还未进行抽签哦,请@我回复抽签', ''
        else:
            return empty_result

    def reward(self, msg) -> tuple:
        """
        打赏
        :param msg:
        :return:
        """
        if str.find(msg.text, "打赏") != -1:
            str_after_dashang = msg.text[str.find(msg.text, "打赏") + 3:].split()
            to = self.user.find_user_by_name(msg.sender, str_after_dashang[0])
            from_puid = msg.member.puid
            self.log.info(from_puid)
            self.log.info(to.puid)
            result = self.user.transfer(
                from_puid,
                to.puid,
                self.group.puid,
                int(str_after_dashang[1], ),
                self.api_key,
            )
            if result["status"] == "success":
                payload = '打赏成功!' + msg.member.name + " 打赏给 " + \
                          to.name + " " + str_after_dashang[1] + "个超越积分!"
                return 'text', payload, ''
            else:
                return 'text', '打赏失败!', ''
        return empty_result

    def integral(self, msg) -> tuple:
        """
        积分相关
        :return:
        """
        real_msg = msg.text.split()
        if real_msg[len(real_msg) - 1] == "超越积分":
            msg = "超越积分可以用来干很多好玩的事情,后续推出虚拟房产和虚拟商店,可作为购买力进行交易哦,还支持个人打赏,@我并回复「余额」来查看你的积分总额。"
            return 'text', msg, ''
        if real_msg[len(real_msg) - 1] in ["余额", "积分"]:
            user_puid = msg.member.puid
            print("想拿余额的puid:")
            print(user_puid)
            balance = self.user.get_balance_by_puid(user_puid, self.group.puid)
            msg = "你有" + str(balance) + "超越积分"
            return 'text', msg, ''
        if real_msg[len(real_msg) - 1] == "等级":
            user_puid = msg.member.puid
            level = self.user.get_level_by_puid(user_puid, self.group.puid)
            msg = "你现在是" + str(level) + "级: " + self.level_map[int(level)]
            return 'text', msg, ''
        return empty_result

    def houses(self, msg) -> tuple:
        """
        房产相关
        :return:
        """
        real_msg = msg.text.split()
        try:
            if "超越买房" in msg.text:
                return "text", "超越买房是实验性功能,@我并回复「看房」查看目前「超越大陆」上的房产所有者\n\n"+\
                    "@我并回复「买房 房产名 价格」可以进行房产购买,例如「@全村的希望 买房 火炉堡 30」\n"+\
                        "注意!!!你出的价格至少要比当前的价格大 1,才能买房成功 \n" +\
                        "如果你是房产所有者,@我并回复「房产名 签名:「你要签名的内容」」可进行签名,例如「@全村的希望 火炉堡 签名:靓仔」", ""
            if real_msg[len(real_msg) - 1] == "看房":
                print("=== 看房ing ===")
                msg = self.real_estate.look()
                return 'text', msg, ''
            elif re.search(r'(签名:|签名:)(.*)', msg.text):
                print("=== 签名ing ===")
                house_name = real_msg[1]
                print(msg.text)
                signature = msg.text[msg.text.find("签名") + 3:]
                print(house_name)
                print(self.api_key)
                print(msg.member.puid)
                print(self.group.puid)
                print(signature)
                res = self.real_estate.leave_sig(msg.member.puid,
                                                 self.group.puid, signature,
                                                 house_name, self.api_key)
                print(res)
                if res["result"] == "success":
                    payload = "你在" + house_name + "上留下了你的签名:" + signature
                    return 'text', payload, ''
                else:
                    payload = "签名失败!"
                    return 'text', payload, ''
            elif real_msg[1] == "买房":
                print("=== 买房ing ===")
                house_name = real_msg[2]
                amount = int(real_msg[3])
                print(house_name)
                print(amount)
                res = self.real_estate.buy_house(msg.member.puid,
                                                 self.group.puid, house_name,
                                                 amount, self.api_key)
                if res["result"] == "success":
                    payload = "买房成功!\n你现在是 " + house_name + " 的领主!"
                    return 'text', payload, ''
                else:
                    payload = "买房失败!"
                    return 'text', payload, ''

            return empty_result
        except:
            return empty_result

    def extra(self, msg) -> tuple:
        """
        额外添加
        :param msg:
        :return:
        """
        real_msg = msg.text.split()
        if real_msg[len(real_msg) - 1] in ["致谢", "鸣谢"]:
            return 'text', "感谢「心理医生聪」与「禹sen」,提供超越语录的支持!", ''
        if real_msg[len(real_msg) - 1] in ["帮助", "?", "?"]:
            payload = "本 AI 目前支持以下功能: \n" + \
                        "- 超越积分\n" + \
                        "- 天降超越\n" + \
                        "- 超越猜拳\n" + \
                        "- 村头留言板\n" + \
                        "- 超越抽签\n" + \
                        "- 超越接龙\n" + \
                        "- 口令红包(管理员功能)\n" + \
                        "- 超越买房"

            return 'text', payload, ''
        if real_msg[len(real_msg) - 1] == "投票":
            payload = "https://ke.qq.com/cates/ccyy/index.html?act_id=1&work_id=29&mmticket= ⬅ 欢迎猛戳链接投票"
            return 'text', payload, ''
        if real_msg[len(real_msg) - 1].find("笑话") != -1:
            payload = choice(self.jokes)
            return 'text', payload, ''
        return empty_result

    def handle_solo_msg(self, msg):
        """
        处理私聊回复
        :param msg:
        :return:
        """
        friend = msg.sender
        # 向新的好友发送消息
        friend.send('你好呀,我是全村的希望!')
        friend.send('参与内测看我朋友圈的图片,扫二维码加群')
        # friend.send_image('group.jpeg')

    def handle_group_msg(self, msg) -> tuple:
        """
        处理群组回复消息
        :param msg:
        :return:
        """
        self.log.info('receive: %s' % msg.text)

        typ, content1, content2 = self.reward(msg)  # 打赏可能被@ 也可能不被@
        if typ:
            self.log.info(content1)
            return typ, content1, content2

        typ, content1, content2 = self.robot_init(msg)  # 紧急情况下的初始化 以及口令红包的初始化
        if typ:
            self.log.info(content1)
            return typ, content1, content2
        typ, content1, content2 = self.red_bag(msg)  # 口令红包
        if typ:
            self.log.info(content1)
            return typ, content1, content2

        typ, content1, content2 = self.random_img(msg)  # 天降超越
        if typ:
            self.log.info(content1)
            return typ, content1, content2

        typ, content1, content2 = self.chaoyue_ana(msg)  # 超越语录
        if typ:
            self.log.info(content1)
            return typ, content1, content2

        if msg.is_at:  # 如果@到机器人,才进行的回应
            typ, content1, content2 = self.play_game(
                msg)  # 玩游戏,高优先级,内部存在拦截其他回复
            if typ:
                self.log.info(content1)
                user_puid = msg.member.puid
                bot_id = self.bot.self.puid
                user_balance = self.user.get_balance_by_puid(
                    user_puid, msg.member.group.puid)
                bot_balance = self.user.get_balance_by_puid(
                    bot_id, msg.member.group.puid)
                if user_balance < 3:
                    payload = " 由于你余额不足 3 积分,所以本次游戏没有奖惩哦~"
                elif bot_balance < 3:
                    payload = " 超越宝宝的钱包瘪了,所以本次游戏没有奖惩哦~"
                else:
                    if "游戏结束,恭喜你赢了" in content2:
                        from_puid = bot_id
                        print(from_puid)
                        to_puid = user_puid
                        result = self.user.transfer(from_puid, to_puid,
                                                    self.group.puid, 3,
                                                    self.api_key)
                        if result["status"] == "success":
                            payload = " 奖励给 " + msg.member.name + " 3 个超越积分!"
                        else:
                            payload = " 但是我没钱啦~"
                    elif "你输了" in content2:
                        from_puid = user_puid
                        to_puid = bot_id
                        result = self.user.transfer(from_puid, to_puid,
                                                    self.group.puid, 3,
                                                    self.api_key)
                        if result["status"] == "success":
                            payload = " 扣除 " + msg.member.name + " 3 个超越积分!"
                        else:
                            payload = " 你钱不够,接下来的游戏会没有奖励哦~"
                    else:
                        payload = ""
                return typ, content1, content2 + payload
            typ, content1, content2 = self.draw_lots(msg)  # 抽签
            if typ:
                self.log.info(content1)
                return typ, content1, content2
            typ, content1, content2 = self.handle_leave_message(msg)  # 处理留言请求
            if typ:
                self.log.info(content1)
                return typ, content1, content2
            typ, content1, content2 = self.get_group_introduction(msg)  # 群简介
            if typ:
                self.log.info(content1)
                return typ, content1, content2
            typ, content1, content2 = self.integral(msg)  # 积分相关
            if typ:
                self.log.info(content1)
                return typ, content1, content2

            typ, content1, content2 = self.houses(msg)  # 积分相关
            if typ:
                self.log.info(content1)
                return typ, content1, content2

            typ, content1, content2 = self.extra(msg)  # 额外信息
            if typ:
                self.log.info(content1)
                return typ, content1, content2
            tuling_reply = self.tuling.reply_text(msg).replace(
                "图灵机器人", "超越宝宝").replace("清华大学硕士杨超?",
                                         "杨超越最美不允许反驳").replace("你接错了", "我不会接")
            self.log.info(tuling_reply)
            return 'text', tuling_reply, ''

        return empty_result
Example #29
0
def log1():
    while True:
        Logger.debug("test1")
Example #30
0
class AndroidEngine:
    """
    安卓引擎,自动化启动appium服务,自动连接设备,真机可自动切换WiFi连接,获取driver
    1、实例化前请确保模拟器已开启,真机已连接电脑,并且打开调试模式
    2、暂时只支持单机连接启动,若本机同时连接多个设备,则默认连接已连接设备列表第一个设备
    """
    def __init__(self):
        self._log = Logger('安卓引擎').get_logger()
        # 读取配置
        self._reader = OperateConfig(constant.config_pro_app)
        self._if_wifi = self._reader.get_bool('server', 'if_wifi')
        self._android_mode = self._reader.get_str('server', 'android_mode')

    def get_driver(self):
        """
        根据配置获取driver
        :return: driver对象
        """
        self._start_server()
        devices = self._get_device_names()
        version = self._get_android_version(devices[0])
        app_path = publicFunctions.get_apk_path()
        ports = eval(self._reader.get_str('connected', 'server_ports'))
        if self._android_mode == 'simulator':
            desired_caps = DesiredCaps.caps_android_simulator
            desired_caps['platformVersion'] = version
            desired_caps['deviceName'] = devices[0]
            desired_caps['app'] = app_path
            driver = self._get_driver(desired_caps, ports[0])
            return driver
        elif self._android_mode == 'machine':
            desired_caps = DesiredCaps.caps_android_machine
            desired_caps['platformVersion'] = version
            desired_caps['deviceName'] = devices[0]
            desired_caps['app'] = app_path
            driver = self._get_driver(desired_caps, ports[0])
            return driver
        else:
            self._log.error('启动模式配置有误,请确认:{}'.format(self._android_mode))
            self._kill_server()
            sys.exit()

    def quit_driver(self, driver):
        """
        退出驱动程序,断开模拟器连接,杀掉node进程
        :param driver: driver对象
        :return: None
        """
        if self._android_mode == 'simulator':
            self._disconnect_simulators()
        driver.quit()
        self._kill_server()
        sleep(3)
        self._log.info('已退出驱动')

    def _start_server(self):
        """
        使用命令行自动化启动appium server
        :return: driver对象
        """
        if self._android_mode == 'simulator':
            self._connect_simulators()
        devices = self._get_device_names()
        if self._if_wifi is True and self._android_mode == 'machine':
            self._switch_to_wifi()
            devices = [device for device in self._get_device_names() if ':' in device]
        commands = self._create_appium_commands(devices)
        for cmd in commands:
            cmd = r"start {}".format(cmd)
            os.system(cmd)
            sleep(3)
            self._log.info('appium server已启动:{}'.format(cmd))

    def _get_driver(self, desired_caps: dict, port: str):
        """
        获取driver
        :param desired_caps: 连接参数
        :param port: 服务端口
        :return: driver对象
        """
        try:
            driver = webdriver.Remote(command_executor='http://127.0.0.1:{}/wd/hub'.format(port),
                                      desired_capabilities=desired_caps)
            sleep(1)
            self._log.info('appium server已连接')
            return driver
        except WebDriverException as e:
            self._log.error('appium server连接失败:{}'.format(e))
            sys.exit()

    def _connect_simulators(self):
        """
        对于模拟器,在启动后可以调用此方法实现自动连接电脑
        :return: None
        """
        simulators = self._reader.get_str('server', 'simulator').split(';')
        for simulator in simulators:
            cmd = 'adb connect {}'.format(simulator)
            os.system(cmd)
            self._log.debug('模拟器({})已连接'.format(simulator))

    def _disconnect_simulators(self):
        """
        断开全部已连接模拟器设备
        :return: None
        """
        devices = self._reader.get_str('server', 'simulator').split(';')
        for device in devices:
            cmd = 'adb disconnect {}'.format(device)
            os.system(cmd)
            self._log.debug('设备({})已断开'.format(device))

    def _switch_to_wifi(self):
        """
        对于真机,若需要使用WiFi连接模式,在手机用USB线连接到电脑打开调试模式后,调用此方法可切换至WIFI连接
        :return: None
        """
        devices = self._get_device_names()
        simulators = self._reader.get_str('server', 'simulator').split(';')
        machines = list(set(devices) - set(simulators))
        ports = self._create_useful_ports(5555, machines)
        for machine, port in zip(machines, ports):
            if str(port) in '|'.join(self._get_device_names()):
                cmd_1 = 'adb -s {} shell ip -f inet addr show wlan0'.format(machine)
                result_1 = self._execute_command(cmd_1)
                ip = re.search(r"inet\s(\d+\.\d+\.\d+\.\d+)", result_1).group(1)
                cmd_2 = 'adb -s {} tcpip {}'.format(machine, port)
                os.system(cmd_2)
                cmd_3 = 'adb connect {}:{}'.format(ip, port)
                result_2 = self._execute_command(cmd_3)
                if 'connected' in result_2:
                    self._log.debug('设备({})成功切换至WIFI连接:{}'.format(machine, result_2.strip()))
                    self._log.warning('请拔掉设备({})USB线!!'.format(machine))
                else:
                    self._log.error('设备({})切换至WIFI连接失败:{}'.format(machine, result_2.strip()))

    def _get_device_names(self):
        """
        获取已连接安卓设备名
        :return: 安卓设备名列表
        """
        cmd = 'adb devices'
        result = self._execute_command(cmd)
        devices = re.findall(r"(.*[^\s])\s*device", result)
        devices.pop(0)
        if devices:
            self._log.debug('获取到已连接设备列表:{}'.format(devices))
            return devices
        else:
            self._log.error('未检测到安卓设备。')
            sys.exit()

    def _get_android_version(self, device: str):
        """
        获取已连接安卓设备版本号
        :param device: 设备名
        :return: 版本号
        """
        cmd = f'adb -s {device} shell getprop ro.build.version.release'
        result = self._execute_command(cmd)
        self._log.debug('获取到设备版本号:{}'.format(result))
        return result.strip()

    def _get_package_and_activity(self, apk_path=publicFunctions.get_apk_path()):
        """
        通过'aapt'命令自动获取appPackage和appActivity
        :param apk_path: apk路径
        :return: appPackage和appActivity
        """
        sdk_path = self._get_sdk_path()
        adb_disk = sdk_path.split(':')[0]
        build_tools_path = os.path.join(sdk_path, 'build-tools')
        aapt_path = os.path.join(build_tools_path, os.listdir(build_tools_path)[0])
        cmd = f'{adb_disk}:&cd {aapt_path}&aapt dump badging {apk_path}'
        result = self._execute_command(cmd)
        package = re.search(r"package: name='([\w\\.]+)'", result).group(1)
        activity = re.search(r"launch.*activity: name='([\w\\.]+)'", result).group(1)
        return package, activity

    def _get_sdk_path(self):
        """
        从PATH环境变量中提取Android SDK路径
        :return: Android SDK路径
        """
        path_env = os.environ['PATH']
        sdk_search = re.search(r'(.+?)\\platform-tools', path_env)
        if sdk_search:
            sdk_path = sdk_search.group(1).split(';')[-1]
            if '%' in sdk_path:
                sdk_path = os.environ[sdk_path.strip('%')]
            return sdk_path
        else:
            self._log.error('Android SDK环境变量未配置!!')
            exit()

    @staticmethod
    def _execute_command(cmd: str):
        """
        执行cmd命令
        :param cmd: cmd命令
        :return: 命令行输出
        """
        with os.popen(cmd) as f:
            result = f.read()
        return result

    def _kill_server(self):
        """
        用于每次执行完毕,杀掉进程
        :return: None
        """
        cmd1 = 'tasklist | find "node.exe"'
        if self._execute_command(cmd1):
            cmd2 = 'taskkill -F -PID node.exe'
            self._execute_command(cmd2)
            self._log.info('杀掉appium server进程')

    def _create_appium_commands(self, devices_list: list):
        """
        创建Appium命令行模式启动命令
        :param devices_list: 设备名列表
        :return: cmd命令列表
        """
        p_port_list = self._create_useful_ports(4723, devices_list)
        bp_port_list = self._create_useful_ports(4900, devices_list)
        self._reader.write_data('connected', 'server_ports', str(p_port_list))
        cmd_list = ['appium -a 127.0.0.1 -p {} -bp {}'.format(
            p_port_list[i], bp_port_list[i]
            ) for i in range(len(devices_list))
        ]
        self._log.debug('已生成启动命令:{}'.format(cmd_list))
        return cmd_list

    def _create_useful_ports(self, start_port: int, devices_list: list):
        """
        根据获取的已连接设备创建指定数量的可用端口
        :param start_port: 起始端口
        :param devices_list: 从命令行自动获取的设备列表
        :return: 可用端口列表
        """
        port_list = []
        cmd = 'netstat -ano | findstr {}'.format(start_port)
        while len(port_list) != len(devices_list):
            if not self._execute_command(cmd):
                port_list.append(start_port)
            start_port += 1
        self._log.debug('已生成可用端口:{}'.format(port_list))
        return port_list
class CategoryConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10 * 1024 * 1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
            EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct category consumer error')
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug(
                    'Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
                    EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY],
                    self.logger)

    def _callback(self, channel, method, properties, category):
        self.logger.info(os.linesep)
        self.logger.info(
            '----> Get body message %s and start query this category... <----'
            % category)
        try:
            detail_urls = self._parse_detail_urls(category)
            if not detail_urls:
                self.logger.debug('No detail category urls got')
                return
            for detail_url in detail_urls:
                self.logger.info('Query detail category url %s' % detail_url)
                category_web_driver = CategoryWebDriver(
                    detail_url, self._db_conn, self.logger)
                category_web_driver.query()
                time.sleep(10)
        except Exception:
            self.logger.exception('Query category %s error' % category)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set category %s as consumed' % category)
        self._set_category_consumed(category)

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    # see more
    def _parse_detail_urls(self, category):
        detail_urls = set()
        category_url = '%s/%s' % (CATEGORY_HOST_URL, category)
        try:
            response = requests.get(category_url)
            if response.status_code == requests.codes.ok:
                # html is from lxml to parse html page
                html_tree = html.fromstring(response.content)
                category_detail_links = html_tree.xpath(
                    '//a[@class="see-more play-button small id-track-click apps id-responsive-see-more"]//@href'
                )
                for category_detail_link in category_detail_links:
                    category_detail_link = category_detail_link.strip()
                    detail_urls.add('https://play.google.com%s' %
                                    category_detail_link)
            else:
                self.logger.debug(
                    'Access category url %s and returns wrong response code %d'
                    % (category_url, response.status_code))
        except Exception:
            self.logger.exception('Get category web page error')
        return detail_urls

    def _set_category_consumed(self, category):
        query = 'UPDATE category SET status=%s WHERE category=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, category), self._db_conn)
        except Exception:
            self.logger.exception('Set category %s as consumed error' %
                                  category)
class SimilarConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10 * 1024 * 1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
            EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct similar consumer error')
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug(
                    'Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
                    EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY],
                    self.logger)

    def _callback(self, channel, method, properties, package_name):
        self.logger.info(os.linesep)
        self.logger.info(
            '----> Get body message %s and start query apps similar to this... <----'
            % package_name)
        try:
            url = '%s=%s' % (SIMILAR_HOST_URL, package_name)
            self.logger.info('Query similar apps with url %s' % url)
            package_names = self._extract_package_names(url)
            self.logger.info('Store package names...')
            self._store_package_names(package_names)
        except Exception:
            self.logger.exception('Query similar apps %s error' % package_name)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set package name %s as consumed' % package_name)
        self._set_package_consumed(package_name)

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _extract_package_names(self, url):
        package_names = set()

        try:
            response = requests.get(url)
        except Exception:
            self.logger.exception('Get content with url %s error' % url)
            return package_names

        if response.status_code == requests.codes.ok:
            html_tree = html.fromstring(response.content)
            app_links = html_tree.xpath('//a[@class="title"]/@href')
            for app_link in app_links:
                try:
                    package_names.add(app_link.split('id=')[-1])
                except Exception:
                    self.logger.exception(
                        'Extract package from link %s error' % app_link)
        else:
            self.logger.debug(
                'Access similar app url %s and returns wrong response code %d'
                % (url, response.status_code))
        return package_names

    def _store_package_names(self, package_names):
        values = []
        for package_name in package_names:
            values.append('("%s")' % package_name)

        if len(values) > 0:
            query = 'INSERT IGNORE INTO package_name (package_name) VALUES ' + ','.join(
                values)
            try:
                MySQLDBUtil.insert(query, None, self._db_conn)
            except Exception:
                self.logger.exception('Store package names into database fail')

    def _set_package_consumed(self, package_name):
        query = 'UPDATE similar_app SET status=%s WHERE package_name=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, package_name), self._db_conn)
        except Exception:
            self.logger.exception('Set package name %s as consumed error' %
                                  package_name)
class DBOracle:
    """
    操作ORACLE
    """
    def __init__(self):
        # 引用日志类
        self._log = Logger("ORACLE").get_logger()

        # 获取数据库配置
        self._db_config = OperateConfig(config_common_path)

        self._conn_str = '{}/{}@{}:{}/{}'.format(
            self._db_config.get_str('oracle', 'username'),
            self._db_config.get_str('oracle', 'password'),
            self._db_config.get_str('oracle', 'host'),
            self._db_config.get_str('oracle', 'port'),
            self._db_config.get_str('oracle', 'database'))
        try:
            self._conn = cx_Oracle.connect(self._conn_str)
            self._log.info('成功连接数据库')
        except cx_Oracle.Error as e:
            self._log.error('数据库连接失败:{}'.format(e))

    @property
    def conn(self):
        """
        返回数据库连接实例,可单独cx_Oracle库其他方法
        :return: 数据库连接实例
        """
        return self._conn

    def disconnect(self):
        """
        断开连接
        :return: None
        """
        self._conn.close()
        self._log.info('成功断开数据库')

    def __del__(self):
        self.disconnect()

    def select_all(self, sql_string: str):
        """
        执行查询sql
        :param sql_string: sql语句
        :return: 元组列表
        """
        c = self._conn.cursor()
        self._log.info('执行查询语句:%s' % sql_string)
        x = c.execute(sql_string)
        # 获取全部结果集(元组列表),可使用下标访问结果集,如datalist[0][1]
        datalist = x.fetchall()
        self._log.info('查询结果如下:')
        for data in datalist:
            self._log.debug('第 {} 条数据:{}'.format(
                datalist.index(data) + 1, data))
        c.close()
        return datalist

    def select_one(self, sql_string: str):
        """
        执行查询sql
        :param sql_string: sql语句
        :return: 单个查询字段值或单条记录
        """
        c = self._conn.cursor()
        self._log.info('执行查询语句:%s' % sql_string)
        x = c.execute(sql_string)
        # 获取查询的单个字段的值或单条记录
        data = x.fetchone()
        self._log.debug('查询结果如下:{}'.format(data))
        c.close()
        if len(data[0]) == 1:
            return data[0][0]
        else:
            return data

    def execute_sql(self, sql_string: str):
        """
        执行插入、更新、删除操作
        :param sql_string: sql语句
        :return: None
        """
        try:
            c = self._conn.cursor()
            self._log.info('执行%s语句:%s' % (sql_string.split()[0], sql_string))
            c.execute(sql_string)
            self._conn.commit()
            c.close()
        except cx_Oracle.Error as e:
            self._log.error('执行失败:%s' % str(e))
            self._conn.rollback()
            self._log.error('成功回滚操作')

    def exec_function(self, function_name: str, *parameters,
                      **keyword_parameters):
        """
        执行指定函数,可指定参数
        :param function_name: 函数名
        :param parameters: 元组可变参数
        :param keyword_parameters: 字典可变参数
        :return: None
        """
        try:
            c = self._conn.cursor()
            self._log.info('执行函数:{}'.format(function_name))
            c.callfunc(function_name, *parameters, **keyword_parameters)
            c.close()
        except cx_Oracle.Error as e:
            self._log.error('执行失败:%s' % str(e))
            self._conn.rollback()
            self._log.error('成功回滚操作')

    def exec_process(self, process_name, *parameters, **keyword_parameters):
        """
        执行指定存储过程,可指定参数
        :param process_name: 过程名
        :param parameters: 元组可变参数
        :param keyword_parameters: 字典可变参数
        :return: None
        """
        try:
            c = self._conn.cursor()
            self._log.info('执行过程:{}'.format(process_name))
            c.callproc(process_name, *parameters, **keyword_parameters)
            c.close()
        except cx_Oracle.Error as e:
            self._log.error('执行失败:%s' % str(e))
            self._conn.rollback()
            self._log.error('成功回滚操作')
class DBMySql:
    """
    操作MYSQL
    """
    def __init__(self):
        # 引用日志类
        self._log = Logger("MYSQL").get_logger()

        # 获取数据库配置
        self._db_config = OperateConfig(config_common_path)

        try:
            self._conn = pymysql.connect(
                user=self._db_config.get_str('mysql', 'username'),
                password=self._db_config.get_str('mysql', 'password'),
                host=self._db_config.get_str('mysql', 'host'),
                port=self._db_config.get_str('mysql', 'port'),
                database=self._db_config.get_str('mysql', 'database'))
            self._log.info('成功连接数据库')
        except pymysql.Error as e:
            self._log.error('数据库连接失败:{}'.format(e))

    @property
    def conn(self):
        """
        返回数据库连接实例,可单独PyMySql库其他方法
        :return: 数据库连接实例
        """
        return self._conn

    def disconnect(self):
        """
        断开连接
        :return: None
        """
        self._conn.close()
        self._log.info('成功断开数据库')

    def __del__(self):
        self.disconnect()

    def select_all(self, sql_string: str):
        """
        执行查询sql
        :param sql_string: sql语句
        :return: 元组列表
        """
        c = self._conn.cursor()
        self._log.info('执行查询语句:%s' % sql_string)
        x = c.execute(sql_string)
        datalist = x.fetchall()
        self._log.info('查询结果如下:')
        for data in datalist:
            self._log.debug('第 {} 条数据:{}'.format(
                datalist.index(data) + 1, data))
        c.close()
        return datalist

    def select_one(self, sql_string: str):
        """
        执行查询sql
        :param sql_string: sql语句
        :return: 单个查询字段值或单条记录
        """
        c = self._conn.cursor()
        self._log.info('执行查询语句:%s' % sql_string)
        x = c.execute(sql_string)
        # 获取查询的单个字段的值或单条记录
        data = x.fetchone()
        self._log.debug('查询结果如下:{}'.format(data))
        c.close()
        if len(data[0]) == 1:
            return data[0][0]
        else:
            return data

    def execute_sql(self, sql_string: str):
        """
        执行插入、更新、删除操作
        :param sql_string: sql语句
        :return: None
        """
        try:
            c = self._conn.cursor()
            self._log.info('执行%s语句:%s' % (sql_string.split()[0], sql_string))
            c.execute(sql_string)
            self._conn.commit()
            c.close()
        except pymysql.Error as e:
            self._log.error('执行失败:%s' % str(e))
            self._conn.rollback()
            self._log.error('成功回滚操作')
Example #35
0
class AppProducer:
    def __init__(self):
        self.logger = Logger(LOG_FILE, LOG_NAME, 10*1024*1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_producer(EXCHANGE_NAME, self.logger)
        if not rabbit_topic:
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect database error')
            return

        while 1:
            try:
                if self._is_no_more_records():
                    self.logger.info('There are no more available records, wait...')
                    time.sleep(PRODUCE_WAIT_TIME)

                package_list = self._fetch_package_list()
                for package_name in package_list:
                    if RabbitTopic.is_queue_full(QUEUE_NAME, QUEUE_LIMIT, self.logger):
                        self.logger.info('Queue %s is full, wait to consume...' % QUEUE_NAME)
                        time.sleep(PRODUCE_WAIT_TIME)
                        continue
                    try:
                        rabbit_topic.publish(ROUTING_KEY, package_name)
                        self.logger.info('Publish package %s and update status' % package_name)
                        self._update_status(PUBLISHED, package_name)
                    except ConnectionClosed:
                        self.logger.debug('Connection to rabbitmq server closed, re-connecting...')
                        rabbit_topic = RabbitTopic.init_rabbitmq_producer(EXCHANGE_NAME, self.logger)
            except Exception:
                self.logger.exception('Publish similar app package name error')

    def _init_rabbitmq(self):
        try:
            rabbit_topic = RabbitTopic(EXCHANGE_NAME)
            rabbit_topic.construct_producer()
        except Exception:
            self.logger.exception('Construct app producer error')
            return None
        return rabbit_topic

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _fetch_package_list(self):
        package_list = []

        self.logger.info('Get un-published package list...')
        query = 'SELECT package_name FROM package_name WHERE status=%s LIMIT %s'
        try:
            results = MySQLDBUtil.fetch_multiple_rows(query, (UN_PUBLISHED, QUEUE_LIMIT), self._db_conn)
            for result in results:
                (package_name,) = result
                package_list.append(package_name)
        except Exception:
            self.logger.exception('Query un-used package name error')
        return package_list

    def _is_no_more_records(self):
        query = 'SELECT COUNT(*) FROM package_name WHERE status=%s'
        try:
            result = MySQLDBUtil.fetch_single_row(query, (UN_PUBLISHED,), self._db_conn)
            if result:
                (count, ) = result
                if count == 0:
                    return True
        except Exception:
            self.logger.exception('Check if there is no more packages error')
        return False

    def _update_status(self, status, package_name=None):
        if not package_name:
            query = 'UPDATE package_name SET status=%s'
        else:
            query = 'UPDATE package_name SET status=%s WHERE package_name=%s'
        try:
            if not package_name:
                MySQLDBUtil.update(query, (status,), self._db_conn)
            else:
                MySQLDBUtil.update(query, (status, package_name), self._db_conn)
        except Exception:
            self.logger.exception('Update record status')
class RspGame(object):
    """
    随机整数[1,3]决定机器人出招,分别为1:石头;2:剪刀;3:布
    """
    def __init__(self, game_num):
        self.log = Logger()
        self.game_all_num = game_num
        self.player_score = 0
        self.com_score = 0
        self.player_name = ''
        self.img_dir = ['rock', 'scissors', 'paper', 'emoticon']
        # self.rsp_img = [os.path.join(GAME_IMAGE_PATH, 'rock_1.jpg'), os.path.join(GAME_IMAGE_PATH, 'scissors_1.jpg'), os.path.join(GAME_IMAGE_PATH, 'paper_1.jpg')]
        # print(self.rsp_img)
        self.draw_msg = " 平局了,继续来~"
        self.fail_msg = " 我输了 "
        self.win_msg = " 我赢了 "
        self.over_msg = [" 游戏结束,你输了", " 游戏结束,恭喜你赢了"]
        self.msg_code = {"石头": 0, "剪刀": 1, "布": 2}

    def start(self, player_name):
        self.player_score = 0
        self.com_score = 0
        self.player_name = player_name

    def random_img(self, random_num):
        list_dir = os.listdir(
            os.path.join('resources', 'game', self.img_dir[random_num]))
        path = choice(list_dir)
        self.log.info('choose:-->{}'.format(path))
        return os.path.join('resources', 'game', self.img_dir[random_num],
                            path)

    def get_result(self, winer):
        """获取本场比赛情况"""
        if winer == 1:
            self.player_score += 1
            if self.player_score == (self.game_all_num + 1) / 2:
                return 1, '@' + self.player_name + self.over_msg[1]
            else:
                return 0, '@' + self.player_name + self.fail_msg
        elif winer == -1:
            self.com_score += 1
            if self.com_score == (self.game_all_num + 1) / 2:
                return 1, '@' + self.player_name + self.over_msg[0]
            else:
                return 0, '@' + self.player_name + self.win_msg

    def play(self, msg):
        """
        返回[a, b, c]
        a代表游戏是否已经结束 1:结束 0:未结束
        b代表本次出招结果
        c代表本次出招图片
        :param msg:
        :return:
        """
        self.log.info('play:{}'.format(msg))
        real_msg = msg.text.split()
        valid_msg = real_msg[len(real_msg) - 1]
        self.log.debug('commond:{}'.format(valid_msg))
        if str.find(valid_msg, "不玩") != -1 or str.find(valid_msg, "退出") != -1:
            return 1, '@' + self.player_name + " 虽然半途而废不怎么好听,但有时候放弃也是一种聪明的选择", self.random_img(
                3)
        elif valid_msg != "石头" and valid_msg != "剪刀" and valid_msg != "布":
            return 0, '@' + self.player_name + " 你这是要跟我石头剪刀布吗?", self.random_img(
                3)
        random_num = random.randint(1, 3) - 1
        self.log.debug('random_num:{}'.format(random_num))
        self.log.debug('msg_code:{}'.format(self.msg_code[valid_msg]))
        # 1:玩家 -1:机器人 0:平局
        winer = (random_num - self.msg_code[valid_msg] + 4) % 3 - 1
        if winer == 0:
            return 0, '@' + self.player_name + self.draw_msg, self.random_img(
                random_num)
        else:
            can, res_msg = self.get_result(winer)
            return can, res_msg, self.random_img(random_num)
class AppConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10*1024*1024, 2)
        self._mysql_db_conn = None
        self._mongo_db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                          [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct app consumer error')
            return

        self._conn_db()
        if not self._mysql_db_conn or not self._mongo_db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug('Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                                  [ROUTING_KEY], self.logger)

    def _callback(self, channel, method, properties, package_name):
        self.logger.info(os.linesep)
        self.logger.info('----> Get body message %s and start get app detail... <-----' % package_name)
        try:
            url = '%s=%s' % (APP_HOST_URL, package_name)
            self.logger.info('Query app detail with url %s' % url)
            app_detail = self._parse_web_content(url)

            if not app_detail:
                self.logger.info('App detail extraction fail')
            else:
                self.logger.info('Store app detail...')
                app_detail.package_name = package_name
                self._store_app_detail(app_detail)

            self.logger.info('Insert package name %s into similar app table...' % package_name)
            self._store_package_name_similar(package_name)

            self.logger.info('Store app description...')
            self._store_app_description(app_detail)

            self.logger.info('Store app developer...')
            self._store_app_developer(app_detail)

        except Exception:
            self.logger.exception('Query app detail %s error' % package_name)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set package name %s as consumed' % package_name)
        self._set_package_consumed(package_name)

    def _conn_db(self):
        try:
            self._mysql_db_conn = util.conn_mysql_db()
            self._mongo_db_conn = util.conn_mongo_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _parse_web_content(self, url):
        app_detail = None
        try:
            response = requests.get(url)
        except Exception:
            self.logger.exception('Get web content from url %s error' % url)
            return app_detail
        try:
            web_content = util.decode_utf8(response.content)
        except Exception:
            self.logger.exception('Decode web content error')
            return app_detail
        if not web_content:
            self.logger.debug('Web content is empty, no need to parse')
            return app_detail
        else:
            self.logger.info('Get web content successfully,try to parse it...')

        app_detail_lxml_parser = AppDetailLxmlParser(web_content, self.logger)
        try:
            app_detail_lxml_parser.parse()
            app_detail = app_detail_lxml_parser.app_detail
        except Exception:
            self.logger.exception('Use lxml to parse the web content error, try to use the backup one beautiful soup...')
            app_detail_b4_parser = AppDetailB4Parser(response.content, self.logger)
            try:
                app_detail_b4_parser.parse()
                app_detail = app_detail_b4_parser.app_detail
            except Exception:
                self.logger.exception('Use beautiful soup to parse the web content error')

        return app_detail

    def _store_app_detail(self, app_detail):
        if not app_detail:
            self.logger.debug('No app detail content, cannot store into database')
            return
        app_detail_json = app_detail.to_json()
        try:
            MongoDBUtil.insert(app_detail_json, self._mongo_db_conn, 'app_detail')
        except Exception:
            self.logger.exception('Store app detail content into mongo db error')

    def _store_package_name_similar(self, package_name):
        query = 'INSERT IGNORE INTO similar_app (package_name) VALUES ("%s")' % package_name
        try:
            MySQLDBUtil.insert(query, None, self._mysql_db_conn)
        except Exception:
            self.logger.exception('Store package name into similar app database fail')

    def _store_app_description(self, app_detail):
        if not app_detail:
            return
        description = ' '.join(app_detail.description)
        description = description.replace('"', '').replace('\'', '')
        if not description:
            return
        query = 'INSERT INTO raw_text (text) VALUES ("%s")' % description
        try:
            MySQLDBUtil.insert(query, None, self._mysql_db_conn)
        except Exception:
            self.logger.exception('Store app description error')

    def _store_app_developer(self, app_detail):
        if not app_detail:
            return
        developer_link = app_detail.developer_link
        if not developer_link:
            return
        items = developer_link.split('id=')
        if len(items) == 2:
            developer_name = items[-1]
            query = 'INSERT IGNORE INTO developer (name) VALUES ("%s")' % developer_name
            try:
                MySQLDBUtil.insert(query, None, self._mysql_db_conn)
                self.logger.info('Stored app developer %s' % developer_name)
            except Exception:
                self.logger.exception('Store app developer error')
        else:
            return

    def _set_package_consumed(self, package_name):
        query = 'UPDATE package_name SET status=%s WHERE package_name=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, package_name), self._mysql_db_conn)
        except Exception:
            self.logger.exception('Set package name %s as consumed error' % package_name)
 def read_excel(self):
     Logger.debug(
         "TestDataReaderCommonUtil.read_excel file name is : " +
         self.file_name, self.__class__.__name__)
     excel = load_workbook(self.file_name)
     return excel
class DeveloperConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10 * 1024 * 1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
            EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct developer consumer error')
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug(
                    'Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(
                    EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT, [ROUTING_KEY],
                    self.logger)

    def _callback(self, channel, method, properties, developer):
        self.logger.info(os.linesep)
        self.logger.info(
            '----> Get body message %s and start query this developer... <----'
            % developer)
        try:
            if developer.isdigit():
                self.logger.info('Developer info is all digit numbers')
                url = '%s=%s' % (DEVELOPER_ID_HOST_URL, developer)
            else:
                self.logger.info('Developer info is non digit numbers')
                url = '%s=%s' % (DEVELOPER_NAME_HOST_URL, developer)
            self.logger.info('Query developer apps with url %s' % url)
            developer_web_driver = DeveloperWebDriver(url, self._db_conn,
                                                      self.logger)
            developer_web_driver.query()
        except Exception:
            self.logger.exception('Query developer %s error' % developer)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set developer %s as consumed' % developer)
        self._set_developer_consumed(developer)

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect database error')

    def _set_developer_consumed(self, developer):
        query = 'UPDATE developer SET status=%s WHERE name=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, developer), self._db_conn)
        except Exception:
            self.logger.exception('Set devloper %s as consumed error' %
                                  developer)
Example #40
0
def log2():
    while True:
        Logger.debug("test2")