class YcyReplier(object):
    def __init__(self):
        self.log = Logger()
        '''训练QA样本,获得QA映射表'''
        csv_file = csv.reader(open(RESOURCES_PATH, 'r', encoding='UTF-8'))
        for QA in csv_file:
            tags = jieba.analyse.extract_tags(QA[0], topK=3)
            key = ''
            for tag in tags:
                key += tag
            if (len(key)):
                QA_dict[key] = QA[1]
            else:
                QA_dict[QA[0]] = QA[1]
        self.log.info("Dict:{}".format(QA_dict))

    def reply_text(self, msg):
        tags = jieba.analyse.extract_tags(msg, topK=3)
        key = ''
        for tag in tags:
            key += tag
        self.log.info("KeyWords:{}".format(key))
        if (len(key)):
            if key in QA_dict:
                return (QA_dict[key])
            else:
                return
        else:
            if msg in QA_dict:
                return (QA_dict[msg])
            else:
                return
Exemplo n.º 2
0
def get_gsv_gvi_list_by_way_id(
        log: Logger, edge_gdf: GeoDataFrame,
        gsv_gvi_gdf: GeoDataFrame) -> Dict[int, List[float]]:
    """Returns a dictionary of lists of GSV point GVI values by edge id. 
    Only point GVI values within 30m from edge geometry are included in the lists. 
    """

    edges = edge_gdf[[E.id_way.name, 'geometry']].copy()
    gvi_points = gsv_gvi_gdf[['geometry', 'GVI']].copy()

    edges['geom_b_30'] = [geom.buffer(30) for geom in edges['geometry']]
    edges = edges.set_geometry('geom_b_30')

    edge_gvi_points = gpd.sjoin(edges,
                                gvi_points,
                                how='inner',
                                op='intersects')
    gvi_points_by_way_id = edge_gvi_points.groupby(E.id_way.name)

    gvi_list_by_way_id = {}
    for way_id, g_points in gvi_points_by_way_id:
        gvi_list_by_way_id[way_id] = list(g_points['GVI'])

    log.info(
        f'Found GVI point samples for {sample_ratio(len(edges), len(gvi_list_by_way_id))} % edges'
    )

    return gvi_list_by_way_id
Exemplo n.º 3
0
def sjoin_noise_values(gdf,
                       noise_layers: dict,
                       log: Logger = None) -> gpd.GeoDataFrame:
    sample_gdf = gdf.copy()
    sample_gdf['sample_idx'] = sample_gdf.index
    for name, noise_gdf in noise_layers.items():
        log.debug(f'joining noise layer [{name}] to sampling points')
        sample_gdf = gpd.sjoin(sample_gdf, noise_gdf, how='left',
                               op='within').drop(['index_right'], axis=1)

    if (len(sample_gdf.index) > len(gdf.index)):
        log.warning(
            f'joined multiple noise values for one or more sampling points ({len(sample_gdf.index)} != {len(gdf.index)})'
        )

    distinct_samples = remove_duplicate_samples(sample_gdf, 'sample_idx',
                                                noise_layers)

    if (len(distinct_samples.index) == len(gdf.index)):
        log.info('successfully removed duplicate samples')
    else:
        log.error('error in removing duplicate samples')

    if (list(sample_gdf.columns).sort() != list(
            distinct_samples.columns).sort()):
        log.error(
            'schema of the dataframe was altered during removing duplicate samples'
        )

    return distinct_samples.drop(columns=['sample_idx'])
Exemplo n.º 4
0
 def save_ckpt(cfg, state):
     #ckpt_best_method_year_month_day_time
     ckpt_name = cfg.get_project_name() + '_' + cfg.get_method(
     ) + '_' + cfg.get_start_time() + '.pth.tar'
     file_dir = os.path.join(cfg.get_backup_path(), ckpt_name)
     torch.save(state, file_dir)
     log.info('Saved at %s' % file_dir)
class Context:
    """
    上下文关联参数读写
    """
    def __init__(self):
        self._cg = configparser.ConfigParser()
        self._cg.read(constant.context_data_path.encode('utf-8'))
        self._log = Logger('关联参数').get_logger()

    def get(self, option):
        """
        获取参数值
        :param option: key
        :return: value
        """
        if option in self._cg.options(section='DATA'):
            value = self._cg.get('DATA', option=option)
            self._log.info('成功获取关联参数:{} = {}'.format(option, value))
            return value
        else:
            self._log.error('不存在关联参数:{}'.format(option))

    def write(self, option, value):
        """
        把传入的数据写入到文件
        :param option: key
        :param value: value
        :return: None
        """
        self._cg.set('DATA', option, value)
        with open(constant.context_data_path, 'w') as fp:
            self._cg.write(fp)
        self._log.info('成功写回关联参数:{} = {}'.format(option, value))
Exemplo n.º 6
0
    def __init__(self):
        """
        Manager server connection object
        """
        self.server = settings.config["tool"]["address"].encode("utf8")
        self.port = settings.config["tool"]["port"]
        self.token = settings.config["tool"]["token"].encode("utf8")
        self.uri_base = "/rest_api/v2"
        # Manager server connection object
        if settings.config["tool"]["proxy_connect"] == "1":
            self.manager_server = NetworkConnection(
                settings.config["tool"]["address"],
                settings.config["tool"]["port"],
                is_https=settings.config["tool"]["using_https"],
                proxies=settings.config["tool"]["http_proxy"])
        else:
            self.manager_server = NetworkConnection(
                settings.config["tool"]["address"],
                settings.config["tool"]["port"],
                is_https=settings.config["tool"]["using_https"])

        self.headers = {
            "token": self.token,
            "Content-Type": "application/json"
        }
        if self.check_service() is True:
            self.client = MsfRpcClient(self,
                                       server=self.server,
                                       port=self.port,
                                       token=self.token)
            Logger.info("Metasploit service ready!")
        else:
            Logger.error(
                ">> MetasploitAPI >> CAN NOT LOGIN TO METASPLOIT SERVICE")
            sys.exit()
def set_default_and_na_edge_noises(graph: ig.Graph, data_extent: Polygon,
                                   log: Logger) -> None:
    """Sets noise attributes of edges to their default values and None outside the extent of the noise data.
    """

    # first set noise attributes of all edges as nodata
    graph.es[E.noises.value] = None
    graph.es[E.noise_source.value] = None

    edge_gdf = ig_utils.get_edge_gdf(graph, attrs=[E.id_ig])
    data_extent_gdf = gpd.GeoDataFrame(data=[{
        'has_noise_data': 1
    }],
                                       geometry=[data_extent],
                                       crs=CRS.from_epsg(3879))
    joined = gpd.sjoin(edge_gdf, data_extent_gdf, how='left',
                       op='within').drop(['index_right'], axis=1)
    edges_within = joined[joined['has_noise_data'] == 1]

    real_edge_count = len([
        geom for geom in list(edge_gdf['geometry'])
        if isinstance(geom, LineString)
    ])
    log.info(
        f'found {real_edge_count - len(edges_within)} edges of {real_edge_count} outside noise data extent'
    )

    # set noise attributes of edges within the data extent to default values (no noise)
    for edge in edges_within.itertuples():
        graph.es[getattr(edge, E.id_ig.name)][E.noises.value] = {}
        graph.es[getattr(edge, E.id_ig.name)][E.noise_source.value] = ''
def start_export_process(args):
    args.body = read_request_body()
    export_filename = get_filename(args.index)
    line_number = 0
    csv_writer = None
    first_line = True
    with open(export_filename, 'wb') as json_file:
        if args.out == 'csv':
            csv_writer = csv.writer(json_file)
        for source_data in export_data_from_elasticsearch(
                args.http, args.index, args.body, args.size, args.skip,
                args.limit):
            if source_data:
                line_number += 1
                if args.out == 'json':
                    json_file.write(json.dumps(source_data) + '\n')
                elif args.out == 'csv':
                    if first_line:
                        first_line = False
                        csv_writer.writerow(tuple(source_data.keys()))

                    try:
                        csv_writer.writerow(tuple(source_data.values()))
                    except Exception as error:
                        Logger.error(u"CSV文件写入出错%s" % error)
                print u"当前导出数据量%s\r" % line_number,
        Logger.info(u"导出数据量%d" % (line_number))
    out_filename = 'data_list'
    if args.out == 'csv':
        out_filename = "%s.csv" % export_filename
    elif args.out == 'json':
        out_filename = "%s.json" % export_filename
    if rename_to_outfile(export_filename, out_filename):
        os.rename(export_filename, out_filename)
        Logger.info(u"文件生成成功 %s" % out_filename)
Exemplo n.º 9
0
    def save_file(json_dict, json_file):
        dir_name = os.path.dirname(json_file)
        if not os.path.exists(dir_name):
            Log.info('Json Dir: {} not exists.'.format(dir_name))
            os.makedirs(dir_name)

        with open(json_file, 'w') as write_stream:
            write_stream.write(json.dumps(json_dict))
Exemplo n.º 10
0
    def json2xml(json_file, xml_file):
        if not os.path.exists(json_file):
            Log.error('Json file: {} not exists.'.format(json_file))
            exit(1)

        xml_dir_name = os.path.dirname(xml_file)
        if not os.path.exists(xml_dir_name):
            Log.info('Xml Dir: {} not exists.'.format(xml_dir_name))
            os.makedirs(xml_dir_name)
Exemplo n.º 11
0
    def xml2json(xml_file, json_file):
        if not os.path.exists(xml_file):
            Log.error('Xml file: {} not exists.'.format(xml_file))
            exit(1)

        json_dir_name = os.path.dirname(json_file)
        if not os.path.exists(json_dir_name):
            Log.info('Json Dir: {} not exists.'.format(json_dir_name))
            os.makedirs(json_dir_name)
Exemplo n.º 12
0
class OperateConfig:
    """
    配置文件读写
    :param file_path: 配置文件路径
    """
    def __init__(self, file_path):
        self._file_name_path = file_path
        self._cg = configparser.ConfigParser()
        self._cg.read(self._file_name_path, encoding='utf-8')
        self._log = Logger('读取配置').get_logger()

    def get_str(self, section, option):
        """
        读取字符型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.get(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def get_int(self, section, option):
        """
        读取数值型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.getint(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def get_bool(self, section, option):
        """
        读取布尔型参数
        :param section: 类名
        :param option: key
        :return: value
        """
        value = self._cg.getboolean(section, option)
        self._log.debug('获取到:{} = {}'.format(option, value))
        return value

    def write_data(self, section, option, value):
        """
        把传入的参数写入到文件
        :param section: 类名
        :param option: key
        :param value: value
        :return: None
        """
        self._cg.set(section, option, value)
        with open(self._file_name_path, 'w') as fp:
            self._cg.write(fp)
        self._log.info('成功写入参数:{} = {}'.format(option, value))
Exemplo n.º 13
0
def add_unique_geom_id(point_gdf: gpd.GeoDataFrame,
                       log: Logger = None) -> gpd.GeoDataFrame:
    """Adds an unique identifier (string) to GeoDataFrame of points based on point locations (x/y). 
    """
    point_gdf[S.xy_id] = [
        f'{str(round(geom.x, 1))}_{str(round(geom.y, 1))}'
        for geom in point_gdf[S.geometry]
    ]
    unique_count = point_gdf[S.xy_id].nunique()
    unique_share = round(100 * unique_count / len(point_gdf.index), 2)
    log.info(f'found {unique_count} unique sampling points ({unique_share} %)')
    return point_gdf
Exemplo n.º 14
0
 def get_ckpt(self):
     ckpt_dir = glob.glob(
         os.path.join(self.get_backup_path(), self.net_info['ckpt'] + '.*'))
     if not len(ckpt_dir):
         log.error("{} is not found".format(ckpt_dir))
         sys.exit()
     if len(ckpt_dir) != 1:
         log.error("File {} conflict!".format(self.net_info['ckpt']))
         sys.exit()
     ckpt_dir = ckpt_dir[0]
     log.info(">>> loading ckpt from '{}'".format(ckpt_dir))
     return ckpt_dir
Exemplo n.º 15
0
class PortManager:

    # Port Manager: calls necessary managers and utilities to generate parameters for sql.
    # List of valid ports it can receive is taken from the Configuration setup.
    #
    validPortNumbers = ()

    def __init__(self):
        self.g_config = GlobalConfig()
        self.validPortNumbers = self.g_config.get_ports()
        self.date_time_field = self.g_config.get_db_datetime_name()
        self.log = Logger().get('reportserver.manager.PortManager.PortManager')


    def isPortValid(self, port_number):
        if (port_number in self.validPortNumbers):
            return True
        else:
            return False

    def getPort(self, port_number, uom, unit):
        self.log.info("Retrieving port:" + str(port_number) + "uom:" + uom + " size: " + str(unit))

        items = []

        if self.isPortValid(port_number):
            results = DatabaseHandler().get_json_by_time(port_number, uom, unit)
            items = utilities.process_data(results)

        port_json = {
            'port': str(port_number),
            'timespan': uom + "=" + str(unit),
            'items':items
        }

        return port_json


    def get_port_attack_count(self, tablename, unit, uom):
        fromDate = dateTimeUtility.get_begin_date_iso(unit, uom)

        sql = "select count(distinct session) as total_attacks from %s where %s >= '%s' " %(tablename, self.date_time_field, fromDate)
        self.log.debug("sql is:" + sql)
        result = DatabaseHandler().query_db(sql)[0]
        return int(result['total_attacks'])

    def get_unique_ips(self, tablename, unit, uom):
        fromDate = dateTimeUtility.get_begin_date_iso(unit, uom)
        sql = "select count(distinct peerAddress) as unique_ips from %s where %s >= '%s' " % (tablename, self.date_time_field, fromDate)
        self.log.debug("sql is:" + sql)
        result = DatabaseHandler().query_db(sql)[0]
        return int(result['unique_ips'])
Exemplo n.º 16
0
class Dictionary(customdict_trait):
    def __init__(self, default_params_filename='params.yaml', *args, **kwargs):

        # Extend the dictionary with the values passed in arguments.
        # Call the Dictionary constructor once the parameters file is set.
        arguments = Arguments(args, kwargs)
        if arguments.args.config_file is not None:
            parameters_file = arguments.args.config_file[0]
        else:
            parameters_file = default_params_filename
        super().__init__(parameters_file, *args)

        for possible_action in arguments.possible_actions:
            setattr(self, possible_action, False)
        setattr(self, arguments.args.action, True)

        # setattr(self, 'do_plot', arguments.args.plot)
        self.do_plot = arguments.args.plot
        self.save_predictions = arguments.args.save
        self.input_file = arguments.args.file[0]
        if arguments.args.window is not None:
            self.window_size = arguments.args.window[0]
        else:
            self.window_size = 10
        if arguments.args.epochs is not None:
            self.epochs = arguments.args.epochs[0]
        else:
            self.epochs = 1

        # Output filename specified
        if arguments.args.output is not None:
            self.output = arguments.args.output[0]
        else:
            self.output = None

        #
        # Extend the dictionary with custom meta-parameters
        #
        self.ohlc_tags = list(list(self.csv_dict.keys())[1:])

        #
        # Set log_level and start the logger
        #
        setattr(self, 'log_level',
                arguments.args.debug[0] if arguments.args.debug is not None \
                    else 3)
        if 'log_level' not in self:
            self.log_level = 3  # default value = WARNING
        self.log = Logger(self.log_level)

        self.log.info(
            'Using configuration parameters from: {}'.format(parameters_file))
Exemplo n.º 17
0
class IpsServiceHandler:
    def __init__(self):
        self.log = Logger().get("reportserver.manager.IpsServiceHandler.py")

    def process(self, rqst, path_tokens, query_tokens):
        uom = None
        units = None
        self.log.info("processing ipaddress request:" + str(path_tokens) + str(query_tokens))

        try:
            time_period = utilities.validate_time_period(query_tokens)
            uom = time_period[0]
            units = time_period[1]
        except ValueError:
            rqst.badRequest(units)
            return

        if len(path_tokens) == 5:
            ipaddress = path_tokens[4].strip()
            self.log.debug("requested: " + str(ipaddress))
            if ipaddress is not None or ipaddress is not "":
                try:
                    ipaddress = utilities.validate_ipaddress(ipaddress)
                    self.get_ips_data_by_time(rqst, ipaddress, uom, units)
                except ValueError:
                    rqst.badRequest(badIpAddress)
                    return
            elif ipaddress == None or ipaddress == "":
                self.get_ips_data_by_time(rqst, "", uom, units)
            else:
                rqst.badRequest()
                return
        elif len(path_tokens) == 4:
            self.get_ips_list_json(rqst, uom, units)
        else:
            rqst.badRequest()
            return

    def get_ips_data_by_time(self, rqst, ipaddress, uom, units):

        ips_manager = IpsManager()
        addressjsondata = ips_manager.get_data(ipaddress, uom, units)
        if addressjsondata is not None:
            # send response:
            rqst.sendJsonResponse(addressjsondata, 200)
        else:
            rqst.notFound()

    def get_ips_list_json(self, rqst, uom, units):
        response = "{not implemented yet.}"
        rqst.sendJsonResponse(response, 200)
Exemplo n.º 18
0
class DatabaseHandler:

    def __init__(self):
        self.global_config = GlobalConfig()
        self.db_path = self.global_config['Database']['path']
        self.log = Logger().get('reportserver.dao.DatabaseHandler.DatabaseHandler')

    # Connect to given database.
    # Defaults to the honeypot db, but another path can be passed in (mainly for testing).
    # Database needs to exist first.
    def connect(self, database_name):
        if (database_name == None):
            database_name = self.db_path

        if not os.path.exists(database_name):
            self.log.error("Database does not exist in path: " + database_name)
            return None
        try:
            conn = sqlite3.connect(database_name)
        except sqlite3.OperationalError as oe:
            self.log.error("****Problem connecting to database*** at: " + database_name)
            self.log.error(oe)
        else:
            return conn

    # Query DB and return JSON
    def query_db(self, query, args=(), one=False, db=None):
        #print ("#debug args are: " +str(args))
        cur = self.connect(db).cursor()
        cur.execute(query, args)
        r = [dict((cur.description[i][0], value) \
                for i, value in enumerate(row)) for row in cur.fetchall()]
        cur.connection.close()
        return (r[0] if r else None) if one else r

    # Unit of Measure could be "weeks", "days", "hours", "minutes".
    # Return all data from the DB within that measure of time as JSON.
    def get_json_by_time(self, portnumber, uom, units):
        begin_date_iso = dateTimeUtility.get_begin_date_iso(uom, units)
        tableName = self.global_config.get_plugin_config(portnumber)['table']
        date_time_field = self.global_config.get_db_datetime_name()

        #  query = query_db("SELECT * FROM %s where (datetime > '%s')" % (tableName, query_date_iso))
        queryString = "SELECT * FROM %s where %s >= '%s' order by id, %s" % (tableName, date_time_field, begin_date_iso, date_time_field)
        #args = (tableName, date_time_field, begin_date_iso)
        self.log.info("queryString is: " + str(queryString))
        #print ("args to use: " + str(args))
        results = self.query_db(queryString)
        self.log.debug("results: " + str(results))

        return results
Exemplo n.º 19
0
def create_test_suite(directory: str):
    """
    构建测试集
    :param directory: 测试用例目录
    :return: testSuite
    """
    # 引用日志类
    log = Logger('构建测试集').get_logger()
    log.info('递归 {} 目录下所有测试用例'.format(directory))
    discover = unittest.defaultTestLoader.discover(
        directory,
        pattern='test_*.py',
        top_level_dir=constant.all_case_top_path
    )
    log.info('搜索完毕,共加载测试用例计{}个'.format(discover.countTestCases()))
    return discover
Exemplo n.º 20
0
class IpsManager:

    # Ips Manager: calls necessary managers and utilities to generate parameters for sql.
    #
    validPortNumbers = ()

    def __init__(self):
        self.g_config = GlobalConfig()
        self.valid_port_numbers = self.g_config.get_ports()
        self.date_time_field = self.g_config.get_db_datetime_name()
        self.log = Logger().get('reportserver.manager.IpsManager.py')


    def get_data(self, ipaddress, uom, unit):
        self.log.info("Retrieving ipaddress data: " + str(ipaddress) + "  uom:  " + uom + " size: " + str(unit))

        port_data = []

        for port in self.valid_port_numbers:
            results = self.get_json_by_ip(port, ipaddress, uom, unit)
            items = utilities.process_data(results)
            port_data.append({port:items})

        port_json = {
            'ipaddress': str(ipaddress),
            'timespan': uom + "=" + str(unit),
            'ports':port_data
        }
        return port_json


    def get_json_by_ip(self, portnumber, ipaddress, uom, units):
        begin_date_iso = dateTimeUtility.get_begin_date_iso(uom, units)
        table_name = self.g_config.get_plugin_config(portnumber)['table']
        date_time_field = self.g_config.get_db_datetime_name()

        #  query = query_db("SELECT * FROM %s where (datetime > '%s')" % (tableName, query_date_iso))
        queryString = "SELECT * FROM %s where %s >= '%s' and peerAddress = '%s' order by id, %s" % (
            table_name, date_time_field, begin_date_iso, ipaddress, date_time_field)
        # args = (tableName, date_time_field, begin_date_iso)
        self.log.info("queryString is: " + str(queryString))
        # print ("args to use: " + str(args))
        results = DatabaseHandler().query_db(queryString)
        self.log.debug("results: " + str(results))

        return results
Exemplo n.º 21
0
def add_inside_nodata_zone_column(gdf,
                                  nodata_zone: gpd.GeoDataFrame,
                                  log: Logger = None) -> gpd.GeoDataFrame:
    """Adds boolean column (nodata_zone) indicating whether the points in the gdf are within the given nodata_zone polygon.

    Args:
        gdf: A GeoDataFrame object of sampling points. 
        nodata_zone: A GeoDataFrame object with one feature in it. It must have one attribute (nodata_zone) with value 1. 
    """
    joined = gpd.sjoin(gdf, nodata_zone, how='left',
                       op='within').drop(['index_right'], axis=1)
    if (log != None):
        nodata_zone_count = len(joined[joined[S.nodata_zone] == 1])
        nodata_zone_share = round(100 * nodata_zone_count / len(gdf.index), 2)
        log.info(
            f'found {nodata_zone_count} ({nodata_zone_share} %) sampling points inside potential nodata zone'
        )
    return joined
def noise_graph_update(graph: ig.Graph, noise_csv_dir: str,
                       log: Logger) -> None:
    """Updates attributes noises and noise_source to graph.
    """

    noise_csvs = os.listdir(noise_csv_dir)

    for csv_file in noise_csvs:
        edge_noises = pd.read_csv(noise_csv_dir + csv_file)
        edge_noises[E.noise_source.name] = edge_noises[
            E.noise_source.name].replace({np.nan: ''})
        log.info(f'updating {len(edge_noises)} edge noises from ' + csv_file)
        for edge in edge_noises.itertuples():
            graph.es[getattr(edge, E.id_ig.name)][E.noises.value] = getattr(
                edge, E.noises.name)
            graph.es[getattr(edge,
                             E.id_ig.name)][E.noise_source.value] = getattr(
                                 edge, E.noise_source.name)
def export_data_from_elasticsearch(http,
                                   index='*',
                                   json_body='',
                                   size_data=100,
                                   from_data=0,
                                   limit=10000000):
    elastic_search_connect = Elasticsearch(http,
                                           retry_on_timeout=True,
                                           max_retries=3,
                                           timeout=3600000)
    try:
        body = json.loads(json_body)
    except Exception as error:
        Logger.error(u"查询请求body出错%s" % error)
        body = {}
    if limit < size_data:
        size_data = limit
    Logger.info(u'查询body为 %s' % json.dumps(body))
    total_number = elastic_search_connect.count(index=index, body=body)
    Logger.info(u"数据总量为 %d" % int(total_number['count']))
    if not body.has_key('size'):
        body['size'] = size_data
    if not body.has_key('from'):
        body['from'] = from_data
    else:
        body['from'] = 0
    data_number = 0
    while True:
        response = elastic_search_connect.search(index=index, body=body)
        if len(response['hits']['hits']) > 0:
            data = response['hits']['hits']
            for _source in data:
                yield _source['_source']
                data_number += 1
                if data_number == limit:
                    break
            else:
                body['from'] += size_data
                continue
            break
        else:
            break
    yield None
Exemplo n.º 24
0
    def test_UpdateLogin(self):
        u"""修改登录密码"""
        print("*************执行修改登录密码用例**************")
        params = Params()
        self.driver.find_element_by_id("iv_head").click()    #点击进入个人中心页
        self.driver.find_element_by_id("rl_pwd_manage").click()   #点击密码管理
        try:
            self.driver.find_element_by_id("rl_reset_loginpwd").click()   #点击修改登录密码
            #异常情况
            self.driver.find_element_by_id("tv_confimchange").click()   #不输入,点击确认修改
            time.sleep(2)

            self.driver.find_element_by_id("et_originpwd").send_keys(params['loginPWD'])   # #输入原密码
            self.driver.find_element_by_id("et_pwd").send_keys(params['loginPWD'])    #输入新密码
            self.driver.find_element_by_id("et_queren_pwd").send_keys("111111")   #再次输入 不一致新密码
            self.driver.find_element_by_id("tv_confimchange").click()   #点击确认修改
            time.sleep(2)
            #正常情况
            self.driver.find_element_by_id("et_originpwd").send_keys(params['loginPWD'])   # #输入原密码
            time.sleep(2)
            self.driver.find_element_by_id("et_pwd").send_keys(params['loginPWD'])    #输入新密码
            time.sleep(2)
            self.driver.find_element_by_id("et_queren_pwd").send_keys(params['loginPWD'])   #再次输入新密码
            self.driver.find_element_by_id("tv_confimchange").click()   #点击确认修改
            time.sleep(2)

            exist = False
            try:
                self.driver.find_element_by_id("tv_confimchange").click()   #点击确认修改
                exist = True
            except:
                pass
        except:
            exist = True
            pic_name = get_current_function_name()
            #调用截图方法
            getScreen(self,pic_name)
            #写入日志
            logger=Logger(logname='log.txt',loglevel="INFO",logger=pic_name).getlog()
            logger.info("*************执行修改登录密码用例**************")
            logger.error(traceback.format_exc())
        self.assertEqual(exist,False)         #已跳转,不在修改页
Exemplo n.º 25
0
class LogMain():
    def __init__(self, title):
        self.flag = conf.logger_config
        print(self.flag)
        print(title)
        #self.logger = Logger(logger="FSD TEST").getlog()
        self.logger = Logger(logger=title).getlog()

    def run_info(self, message):
        """当为True时打印日志
        Usge:
        run_info('ceshi','message')

        """
        if self.flag:
            #logger = Logger(logger=title).getlog()
            self.logger.info(message)
        else:
            #
            pass
Exemplo n.º 26
0
def get_mean_gsv_gvi_by_way_id(log: Logger,
                               gvi_list_by_way_id: Dict[int, List[float]],
                               edge_gdf: GeoDataFrame) -> Dict[int, float]:
    """Calculate mean GSV GVI for edges. Only edges (way IDs) for which enough
    GSV GVI point samples are found are included in the returned dictionary. 
    """
    edge_length_by_way_id = get_col_by_col_dict(edge_gdf, E.id_way.name,
                                                E.length.name)

    mean_gsv_gvi_by_way_id = {
        way_id: get_mean_edge_gsv_gvi(edge_length_by_way_id[way_id],
                                      gvi_list_by_way_id[way_id])
        for way_id in gvi_list_by_way_id
    }
    na_filtered = {
        way_id: gvi
        for way_id, gvi in mean_gsv_gvi_by_way_id.items() if gvi is not None
    }
    log.info(
        f'Got mean point GVI for {sample_ratio(len(edge_gdf), len(na_filtered))} % edges'
    )
    return na_filtered
Exemplo n.º 27
0
 def test_UpdatePay(self):
     u"""修改支付密码"""
     print("*************执行修改支付密码用例**************")
     params = Params()
     self.driver.find_element_by_id("iv_head").click()
     self.driver.find_element_by_id("rl_pwd_manage").click()     #点击密码管理
     try:
         self.driver.find_element_by_id('tv_changeTranPwd').click()  #点击修改支付密码
         self.driver.find_element_by_id('tv_getAuthCode').click()     #点击获取支付密码
         zf_pwd = input("请输入短信验证码:")    #需要手动输入
         time.sleep(8)
         self.driver.find_element_by_id('et_authcode').send_keys(zf_pwd)
         self.driver.find_element_by_id('tv_next').click()  #点击下一步
         time.sleep(2)
         try:
             #输入支付密码
             inputText(self,params['applyPWD'])    #延时输入
             time.sleep(2)
             inputText(self,params['applyPWD'])    #再次确认输入
             time.sleep(2)
         except:
              pass
         exist = False
         try:
             self.driver.find_element_by_id("inputView").click()    #输入框是否隐藏
             exist = True
         except:
             print("两次输入不一致")
             pass
     except:
         exist = True
         pic_name = get_current_function_name()   #获取方法名
         getScreen(self,pic_name)   #调用截图方法
         #写入日志
         logger=Logger(logname='log.txt',loglevel="INFO",logger=pic_name).getlog()
         logger.info("*************执行修改支付密码用例**************")
         logger.error(traceback.format_exc())
     self.assertEqual(exist,False)         #已跳转,不在修改页
Exemplo n.º 28
0
    def login(self):
        data = {
            "email": settings.config["user"]["email"],
            "password": settings.config["user"]["password"]
        }
        try:
            r = self.manager_server.connect("POST",
                                            "/auth/login/",
                                            data=json.dumps(data))
            Logger.info("Before login")
            if r.status_code == 200:
                user_info = r.json()
                if "one-token" in user_info:
                    self.token = user_info["one-token"]
                    self.headers = {
                        "one-token": self.token,
                        "Content-Type": "application/json"
                    }
                    Logger.info("Login Successful!")
                return r

        except Exception, ex:
            Logger.error(" web_service_api >>  login >> error " + str(ex))
Exemplo n.º 29
0
class KeyWordConsumer:
    def __init__(self, log_file, log_name):
        self.logger = Logger(log_file, log_name, 10*1024*1024, 2)
        self._db_conn = None

    def start(self):
        rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                          [ROUTING_KEY], self.logger)
        if not rabbit_topic:
            self.logger.debug('Construct key word consumer error')
            return

        self._conn_db()
        if not self._db_conn:
            self.logger.exception('Connect to database error')
            return

        while 1:
            try:
                rabbit_topic.start_consuming(self._callback, QUEUE_NAME)
            except ConnectionClosed:
                self.logger.debug('Connection to rabbitmq server closed, re-connecting...')
                rabbit_topic = RabbitTopic.init_rabbitmq_consumer(EXCHANGE_NAME, QUEUE_NAME, QUEUE_LIMIT,
                                                                  [ROUTING_KEY], self.logger)

    def _callback(self, channel, method, properties, key_word):
        self.logger.info(os.linesep)
        self.logger.info('----> Get body message %s and start searching this key word...<----' % key_word)
        try:
            url = 'https://play.google.com/store/search?q=%s&c=apps' % key_word
            search_web_driver = SearchWebDriver(url, self._db_conn, self.logger)
            search_web_driver.search()
        except Exception:
            self.logger.exception('Search key word %s error' % key_word)

        channel.basic_ack(delivery_tag=method.delivery_tag)

        self.logger.info('Set key word %s as consumed' % key_word)
        self._set_key_word_consumed(key_word)

    def _conn_db(self):
        try:
            self._db_conn = util.conn_mysql_db()
        except Exception:
            self.logger.exception('Connect to database error')

    def _set_key_word_consumed(self, key_word):
        query = 'UPDATE key_word SET status=%s WHERE key_word=%s'
        try:
            MySQLDBUtil.update(query, (CONSUMED, key_word), self._db_conn)
        except Exception:
            self.logger.exception('Set key word %s as consumed error' % key_word)
Exemplo n.º 30
0
def exec_test_to_generate_report(present_title: str, test_suite_or_case):
    """
    执行测试,生成测试报告
    :param present_title: 测试主题
    :param test_suite_or_case: 测试套件或用例
    :return: None
    """
    # 引用日志类
    log = Logger('执行测试,生成测试报告').get_logger()
    repo_name, repo_path_name = _create_report_path_and_name(present_title)

    # 调用第三方模块HTMLTestRunner执行测试,生成报告
    fp = open(repo_path_name, 'wb')
    runner = HTMLTestReportCN(stream=fp,
                              title=present_title,
                              description='用例执行情况:')
    log.info('开始执行测试')
    runner.run(test_suite_or_case)
    fp.close()
    log.info('测试执行完毕.')
    log.info('测试报告文件:%s' % repo_name)
    log.info('测试报告路径:%s' % repo_path_name)
Exemplo n.º 31
0
def encrypt_by_hashlib(en_string: str, en_mode: str):
    """
    封装hashlib哈希算法
    :param en_string: 需要加密的字符串
    :param en_mode: 加密算法名
    :return: 加密后的值
    """
    log = Logger('哈希算法加密').get_logger()
    mode = str(en_mode).lower().strip()
    log.info('执行{}加密:{}'.format(mode, en_string))
    if mode == 'md5':
        m = hashlib.md5()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    elif mode == 'sha1':
        m = hashlib.sha1()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    elif mode == 'sha224':
        m = hashlib.sha224()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    elif mode == 'sha256':
        m = hashlib.sha256()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    elif mode == 'sha384':
        m = hashlib.sha384()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    elif mode == 'sha512':
        m = hashlib.sha512()
        m.update(str(en_string).encode('utf-8'))
        value = m.hexdigest()
    else:
        log.info('此函数目前还不支持{}算法,赶快去动手加入它吧!'.format(mode))
        sys.exit()
    log.info('{}加密成功,值为:{}'.format(mode, value))
    return value
Exemplo n.º 32
0
class Database:
    def __init__(self):
        self.global_config = GlobalConfig()
        self.log = Logger().get('database.database.Database')

    def create_default_database(self):
        """
        Calls methods needed to create the database.
        """
        self.create_db_dir()
        self.create_db()

        # Execute scripts BEFORE updating schema
        run_db_scripts(self.global_config)

        self.update_schema()
    
    def create_db_dir(self):
        """
        Creates the database directory if it doesn't already exist.
        """

        # if database directory does not exist create it
        db_path = self.global_config['Database']['path']
        (db_dir, db_name) = ntpath.split(db_path)
        if not os.path.isdir(db_dir):
            self.log.info("Database directory not found, "
                          "creating database directory...")
            os.mkdir(db_dir)
    
    def create_db(self):
        """
        Creates the database if it doesn't already exist.
        """

        # if database file does not exist in the directory, create it
        (db_dir, db_name) = ntpath.split(self.global_config['Database']['path'])
        if not os.path.exists(self.global_config['Database']['path']):
            self.log.info("Database file not found, creating database file...")

            # this actually creates the database file
            connection = sqlite3.connect(self.global_config['Database']['path'])
            connection.close()
    
    def update_schema(self):
        """
        Updates the database when columns have been added to, or
        removed from, the schema.
        """

        # Create any new tables that have been added to the plugin
        # config schema.
        db_tables = DataValidator().get_tables()
        cfg_tables = get_config_table_list(
            self.global_config.get_ports(),
            self.global_config.get_plugin_dictionary())
        table_diff = list(set(cfg_tables) - set(db_tables))
        self.create_non_exist_tables(table_diff)

        # Populate the newly created tables with their column
        # definitions.
        DataValidator().update_tables_and_schema()
        self.update_table_structure()

    def create_non_exist_tables(self, table_diff):
        """
        create tables that do not exist from the table difference between the current database and the configuration
        """
        if len(table_diff) > 0:
            for table in table_diff:
                Table_Init.create_table(table, self.global_config)
            self.log.info('Updated database schema, table names now match configuration.')
        else:
            self.log.info('Database Schema and Configuration table names already match.')
    
    def create_dict_config_column_list(self):
        """
        get a dictionary of tables and corresponding columns from the config
        """
        config_column_lists = {}
        for port in self.global_config.get_ports():
            value = self.global_config.get_plugin_dictionary().get(port)
            config_column_lists[value.get('table')] = value.get('tableColumns')
        return config_column_lists

    def create_dict_transformed_column_list(self, database_column_lists):
        """
        returns only custom plugin defined columns from database schema i.e.
        ignores default columns
        """
        transformed_db_column_list = {}
        for table in database_column_lists:
            col_list = database_column_lists[table]
            transformed_db_column_list[table] = []
            # default column ids to ignore
            default_list = []
            for default in default_columns:
                default_list.append(default[0])
            for column in col_list:
                # ignores the default columns
                if column[1] in default_list:
                    continue
                transformed_db_column_list[table].append([column[0],column[1],column[2]])
        return transformed_db_column_list
    
    def update_table_structure(self):
        cfg_schema = self.create_dict_config_column_list()
        db_schema = DataValidator().get_schema()
        db_schema_sans_defaults = self.create_dict_transformed_column_list(db_schema)

        for table in cfg_schema:
            if not [(x[1], x[2]) for x in cfg_schema[table]] == \
                   [(x[1], x[2]) for x in db_schema_sans_defaults[table]]:
                Table_Init.change_table_structure(
                    table, cfg_schema[table], db_schema[table],
                    self.global_config)
Exemplo n.º 33
0
    start_time = time.time()
    _datas, _labels = data_gen.__next__()
    ##print(np.mean(_datas),"   , ",np.max(_datas),"   , ",np.min(_datas))
    r_lab, r_log, _alfa, _, _dis_total_loss, _lr, _acc = sess.run(
        [
            real_labels, real_logits, alfa, dis_optimize_ops, dis_total_loss,
            lr, accuracy
        ],
        feed_dict={
            real_datas: _datas,
            real_labels: _labels
        })
    ##print(np.mean(_datas), "   , ", np.max(_datas), "   , ", np.min(_datas),"   :  ")
    ##print(r_lab)
    ##.print(r_log)
    logger.tick(iter)
    logger.info('klr', _lr * 1000)
    logger.info('acc', _acc * 100)
    logger.info('time', time.time() - start_time)

    logger.info('loss_dis_gan', _dis_total_loss)

    if (iter + 1) % 1000 == 0:
        logger.save()
        save_model(saver, sess, sCheckpointDir, step=iter)
        last_save_time = time.time()
        logger.log('Model Saved\n\n')

    logger.flush()
    last_log_time = time.time()
Exemplo n.º 34
0
from common.logger import Logger
from bs4 import BeautifulSoup
from time import sleep
import sys
import os
sys.path.append(os.getcwd())

HOST = 'localhost'
PORT = 3306
USER = '******'
PASS = '******'
DBNAME = 'selenium'
CHARSET = 'utf8'
# global db
logger = Logger(logger="test_by12306.py").getlog()
logger.info("初始化日志模块完成")


class TestCase(unittest.TestCase):
    """测试用例模块"""
    def setUp(self):
        """初始化unittest前置条件"""
        pass

    def tearDown(self):
        """初始化unittest后置条件"""
        logger.info("准备退出chromedriver")

    # def dbinit(self):
    def insert_data_sql(self, a, b):
        """将爬取到的数据插入到数据库中"""
Exemplo n.º 35
0
class AndroidEngine:
    """
    安卓引擎,自动化启动appium服务,自动连接设备,真机可自动切换WiFi连接,获取driver
    1、实例化前请确保模拟器已开启,真机已连接电脑,并且打开调试模式
    2、暂时只支持单机连接启动,若本机同时连接多个设备,则默认连接已连接设备列表第一个设备
    """
    def __init__(self):
        self._log = Logger('安卓引擎').get_logger()
        # 读取配置
        self._reader = OperateConfig(constant.config_pro_app)
        self._if_wifi = self._reader.get_bool('server', 'if_wifi')
        self._android_mode = self._reader.get_str('server', 'android_mode')

    def get_driver(self):
        """
        根据配置获取driver
        :return: driver对象
        """
        self._start_server()
        devices = self._get_device_names()
        version = self._get_android_version(devices[0])
        app_path = publicFunctions.get_apk_path()
        ports = eval(self._reader.get_str('connected', 'server_ports'))
        if self._android_mode == 'simulator':
            desired_caps = DesiredCaps.caps_android_simulator
            desired_caps['platformVersion'] = version
            desired_caps['deviceName'] = devices[0]
            desired_caps['app'] = app_path
            driver = self._get_driver(desired_caps, ports[0])
            return driver
        elif self._android_mode == 'machine':
            desired_caps = DesiredCaps.caps_android_machine
            desired_caps['platformVersion'] = version
            desired_caps['deviceName'] = devices[0]
            desired_caps['app'] = app_path
            driver = self._get_driver(desired_caps, ports[0])
            return driver
        else:
            self._log.error('启动模式配置有误,请确认:{}'.format(self._android_mode))
            self._kill_server()
            sys.exit()

    def quit_driver(self, driver):
        """
        退出驱动程序,断开模拟器连接,杀掉node进程
        :param driver: driver对象
        :return: None
        """
        if self._android_mode == 'simulator':
            self._disconnect_simulators()
        driver.quit()
        self._kill_server()
        sleep(3)
        self._log.info('已退出驱动')

    def _start_server(self):
        """
        使用命令行自动化启动appium server
        :return: driver对象
        """
        if self._android_mode == 'simulator':
            self._connect_simulators()
        devices = self._get_device_names()
        if self._if_wifi is True and self._android_mode == 'machine':
            self._switch_to_wifi()
            devices = [device for device in self._get_device_names() if ':' in device]
        commands = self._create_appium_commands(devices)
        for cmd in commands:
            cmd = r"start {}".format(cmd)
            os.system(cmd)
            sleep(3)
            self._log.info('appium server已启动:{}'.format(cmd))

    def _get_driver(self, desired_caps: dict, port: str):
        """
        获取driver
        :param desired_caps: 连接参数
        :param port: 服务端口
        :return: driver对象
        """
        try:
            driver = webdriver.Remote(command_executor='http://127.0.0.1:{}/wd/hub'.format(port),
                                      desired_capabilities=desired_caps)
            sleep(1)
            self._log.info('appium server已连接')
            return driver
        except WebDriverException as e:
            self._log.error('appium server连接失败:{}'.format(e))
            sys.exit()

    def _connect_simulators(self):
        """
        对于模拟器,在启动后可以调用此方法实现自动连接电脑
        :return: None
        """
        simulators = self._reader.get_str('server', 'simulator').split(';')
        for simulator in simulators:
            cmd = 'adb connect {}'.format(simulator)
            os.system(cmd)
            self._log.debug('模拟器({})已连接'.format(simulator))

    def _disconnect_simulators(self):
        """
        断开全部已连接模拟器设备
        :return: None
        """
        devices = self._reader.get_str('server', 'simulator').split(';')
        for device in devices:
            cmd = 'adb disconnect {}'.format(device)
            os.system(cmd)
            self._log.debug('设备({})已断开'.format(device))

    def _switch_to_wifi(self):
        """
        对于真机,若需要使用WiFi连接模式,在手机用USB线连接到电脑打开调试模式后,调用此方法可切换至WIFI连接
        :return: None
        """
        devices = self._get_device_names()
        simulators = self._reader.get_str('server', 'simulator').split(';')
        machines = list(set(devices) - set(simulators))
        ports = self._create_useful_ports(5555, machines)
        for machine, port in zip(machines, ports):
            if str(port) in '|'.join(self._get_device_names()):
                cmd_1 = 'adb -s {} shell ip -f inet addr show wlan0'.format(machine)
                result_1 = self._execute_command(cmd_1)
                ip = re.search(r"inet\s(\d+\.\d+\.\d+\.\d+)", result_1).group(1)
                cmd_2 = 'adb -s {} tcpip {}'.format(machine, port)
                os.system(cmd_2)
                cmd_3 = 'adb connect {}:{}'.format(ip, port)
                result_2 = self._execute_command(cmd_3)
                if 'connected' in result_2:
                    self._log.debug('设备({})成功切换至WIFI连接:{}'.format(machine, result_2.strip()))
                    self._log.warning('请拔掉设备({})USB线!!'.format(machine))
                else:
                    self._log.error('设备({})切换至WIFI连接失败:{}'.format(machine, result_2.strip()))

    def _get_device_names(self):
        """
        获取已连接安卓设备名
        :return: 安卓设备名列表
        """
        cmd = 'adb devices'
        result = self._execute_command(cmd)
        devices = re.findall(r"(.*[^\s])\s*device", result)
        devices.pop(0)
        if devices:
            self._log.debug('获取到已连接设备列表:{}'.format(devices))
            return devices
        else:
            self._log.error('未检测到安卓设备。')
            sys.exit()

    def _get_android_version(self, device: str):
        """
        获取已连接安卓设备版本号
        :param device: 设备名
        :return: 版本号
        """
        cmd = f'adb -s {device} shell getprop ro.build.version.release'
        result = self._execute_command(cmd)
        self._log.debug('获取到设备版本号:{}'.format(result))
        return result.strip()

    def _get_package_and_activity(self, apk_path=publicFunctions.get_apk_path()):
        """
        通过'aapt'命令自动获取appPackage和appActivity
        :param apk_path: apk路径
        :return: appPackage和appActivity
        """
        sdk_path = self._get_sdk_path()
        adb_disk = sdk_path.split(':')[0]
        build_tools_path = os.path.join(sdk_path, 'build-tools')
        aapt_path = os.path.join(build_tools_path, os.listdir(build_tools_path)[0])
        cmd = f'{adb_disk}:&cd {aapt_path}&aapt dump badging {apk_path}'
        result = self._execute_command(cmd)
        package = re.search(r"package: name='([\w\\.]+)'", result).group(1)
        activity = re.search(r"launch.*activity: name='([\w\\.]+)'", result).group(1)
        return package, activity

    def _get_sdk_path(self):
        """
        从PATH环境变量中提取Android SDK路径
        :return: Android SDK路径
        """
        path_env = os.environ['PATH']
        sdk_search = re.search(r'(.+?)\\platform-tools', path_env)
        if sdk_search:
            sdk_path = sdk_search.group(1).split(';')[-1]
            if '%' in sdk_path:
                sdk_path = os.environ[sdk_path.strip('%')]
            return sdk_path
        else:
            self._log.error('Android SDK环境变量未配置!!')
            exit()

    @staticmethod
    def _execute_command(cmd: str):
        """
        执行cmd命令
        :param cmd: cmd命令
        :return: 命令行输出
        """
        with os.popen(cmd) as f:
            result = f.read()
        return result

    def _kill_server(self):
        """
        用于每次执行完毕,杀掉进程
        :return: None
        """
        cmd1 = 'tasklist | find "node.exe"'
        if self._execute_command(cmd1):
            cmd2 = 'taskkill -F -PID node.exe'
            self._execute_command(cmd2)
            self._log.info('杀掉appium server进程')

    def _create_appium_commands(self, devices_list: list):
        """
        创建Appium命令行模式启动命令
        :param devices_list: 设备名列表
        :return: cmd命令列表
        """
        p_port_list = self._create_useful_ports(4723, devices_list)
        bp_port_list = self._create_useful_ports(4900, devices_list)
        self._reader.write_data('connected', 'server_ports', str(p_port_list))
        cmd_list = ['appium -a 127.0.0.1 -p {} -bp {}'.format(
            p_port_list[i], bp_port_list[i]
            ) for i in range(len(devices_list))
        ]
        self._log.debug('已生成启动命令:{}'.format(cmd_list))
        return cmd_list

    def _create_useful_ports(self, start_port: int, devices_list: list):
        """
        根据获取的已连接设备创建指定数量的可用端口
        :param start_port: 起始端口
        :param devices_list: 从命令行自动获取的设备列表
        :return: 可用端口列表
        """
        port_list = []
        cmd = 'netstat -ano | findstr {}'.format(start_port)
        while len(port_list) != len(devices_list):
            if not self._execute_command(cmd):
                port_list.append(start_port)
            start_port += 1
        self._log.debug('已生成可用端口:{}'.format(port_list))
        return port_list
Exemplo n.º 36
0
class WorldmapServiceHandler():
    def __init__(self):
        self.log = Logger().get('reportserver.manager.WorldmapServiceManager.py')
        self.global_config = GlobalConfig()
        self.global_config.read_plugin_config()
        self.global_config.read_global_config()

    def process(self, rqst, path_tokens, query_tokens):
        global have_basemap
        if not have_basemap:
            err_msg = \
                ('<html><head><title>WorldMap</title></head><body>'
                'To enable WorldMap generation, please visit '
                '<a href="https://recce7.github.io/">the documentation</a> and '
                'follow the directions for installing the Basemap library.'
                '</body></html>')
            rqst.send_response(200)

            #todo make this configurable for allow-origin
            rqst.send_header("Access-Control-Allow-Origin","http://localhost:8000")
            rqst.send_header('Content-Type', 'text/html')
            rqst.send_header('Content-Length', len(err_msg))
            rqst.end_headers()
            rqst.flush_headers()

            rqst.wfile.write(bytes(err_msg, "utf-8"))

            rqst.wfile.flush()

            return

        uom = None
        units = None
        self.log.info("processing ipaddress request:" + str(path_tokens) + str(query_tokens))


        try:
            time_period = utilities.validate_time_period(query_tokens)
            uom = time_period[0]
            units = time_period[1]
        except ValueError:
            rqst.badRequest(units)
            return


        if len(path_tokens) >= 5:
            rqst.badRequest()
            return
        else:
            self.construct_worldmap(rqst, uom, units)

    def construct_worldmap(self, rqst, uom, units):
        #call to construct port list
        #find unique ips by port
        #merge the results togoether
        #build the map
        #probably want to look at the PortsServiceHandler.py or IpsServiceHandler.py to follow those patterns.
        ip_map = pickle.loads(pickle_bytes)

        pts = self.get_point_list(uom, units)
        for pt in pts:
            srclat, srclong = pt
            x, y = ip_map(srclong, srclat)
            plt.plot(x, y, 'o', color='#ff0000', ms=2.7, markeredgewidth=1.0)

        plt.savefig('reportserver/worldmap.png', dpi=600)

        img = Image.open('reportserver/worldmap.png')
        draw = ImageDraw.Draw(img)

        font = ImageFont.truetype(
            "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", 175)
        draw.text((50, 50), "Unique IP addresses: last %s %s" % (units, uom),
                  (0, 0, 0), font=font)

        font = ImageFont.truetype(
            "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", 125)
        draw.text((50, 325), "Total: %s" % (len(pts)),
                  (0, 0, 0), font=font)

        # draw = ImageDraw.Draw(img)
        # draw = ImageDraw.Draw(img)
        img.save("reportserver/worldmap.png")

        rqst.sendPngResponse("reportserver/worldmap.png", 200)

    def get_point_list(self, uom, units):
        begin_date = dateTimeUtility.get_begin_date_iso(uom, units)
        query_string = ('select lat,long '
                        'from ('
                            'select distinct lat,long,timestamp, ip '
                            'from ipInfo '
                            'where lat is not null '
                            'and long is not null '
                            'and datetime(timestamp) > datetime(\'' + begin_date + '\')'
                            ');')
        connection = sqlite3.connect(self.global_config['Database']['path'])
        cursor = connection.cursor()
        return cursor.execute(query_string).fetchall()
Exemplo n.º 37
0
class PortsServiceHandler():
    def __init__(self):
        self.log = Logger().get('reportserver.server.PortServiceHandler.PortServiceHandler')

    def process(self, rqst, path_tokens, query_tokens):
        uom = None
        units = None
        self.log.info("processing ports request:" + str(path_tokens) + str(query_tokens))

        if len(query_tokens) > 0:
            try:
                time_period = utilities.validate_time_period(query_tokens)
                uom = time_period[0]
                units = time_period[1]
            except ValueError:
                rqst.badRequest(units)
                return

        # default if we aren't given valid uom and units
        if uom is None or units is None:
            uom = "days"
            units = 1

        if len(path_tokens) == 5:
            portNbr = utilities.validate_port_number(path_tokens[4])
            self.log.debug("requested: " + str(portNbr))
            if portNbr is not None and 0 < portNbr < 9000:
                self.get_port_data_by_time(rqst, portNbr, uom, units)
            elif portNbr == None or "":
                self.get_port_list_json(rqst, uom, units)
            else:
                rqst.badRequest()
                return
        elif len(path_tokens) == 4:
            self.get_port_list_json(rqst, uom, units)
        else:
            rqst.badRequest()
            return


    def get_port_list_json(self,rqst, uom, units):
        jsondata = self.construct_port_summary_list(rqst, uom, units)
        rqst.sendJsonResponse(jsondata, 200)

    def get_port_data_by_time(self, rqst, portnumber, uom, units):

        portmgr = PortManager()
        portjsondata = portmgr.getPort(portnumber, uom, units)
        if portjsondata is not None:
            # send response:
            rqst.sendJsonResponse(portjsondata, 200)
        else:
            rqst.notFound()

    def construct_port_summary_list(self, rqst, uom, units):
        g_config = GlobalConfig()
        plugins_dictionary = g_config.get_plugin_dictionary()

        json_list = []
        for key, val in plugins_dictionary.items():
            json_list.append(self.construct_port_summary(rqst, val['port'], val['table'], uom, units))

        return json_list

    def construct_port_summary(self, rqst, portnumber, tablename, uom, units):
        portmgr = PortManager()
        port_attacks = portmgr.get_port_attack_count(tablename, uom, units)
        unique_ips = portmgr.get_unique_ips(tablename, uom, units)
        timespan = uom+"="+str(units)

        response_json = {
            'port': str(portnumber),
            'total_attacks': str(port_attacks),
            'unique_ipaddresses': str(unique_ips),
            'timespan':timespan,
            'rel_link': rqst.get_full_url_path() + "/ports/" + str(portnumber)+"?" + timespan
        }

        return response_json
Exemplo n.º 38
0
    class _Framework:
        def __init__(self, plugin_cfg_path, global_cfg_path):
            self._global_config = GlobalConfig(plugin_cfg_path, global_cfg_path)
            self._plugin_imports = {}
            self._listener_list= {}
            self._running_plugins_list = []
            self._data_manager = None
            self._shutting_down = False
            self._log = None
            self._pid = os.getpid()

        def start(self):
            self.set_shutdown_hook()
            print('Press Ctrl+C to exit.')
            if not self.drop_permissions():
                return

            self._global_config.read_global_config()

            self.start_logging()

            self._global_config.read_plugin_config()
            self._data_manager = DataManager()
            self._data_manager.start()

            self.start_listeners()

        def start_logging(self):
            log_path = self._global_config['Framework']['logName']
            log_level = self._global_config['Framework']['logLevel']
            self._log = Logger(log_path, log_level).get('framework.frmwork.Framework')
            self._log.info('RECCE7 started (PID %d)' % self._pid)

        @staticmethod
        def drop_permissions():
            if os.getuid() != 0:
                return True

            dist_name = os.getenv('RECCE7_OS_DIST')
            users_dict = {
                'centos': ('nobody', 'nobody'),
                'debian': ('nobody', 'nogroup')
            }
            if dist_name not in users_dict:
                print(
                    'Unable to lower permission level - not continuing as\n'
                    'superuser. Please set the environment variable\n'
                    'RECCE7_OS_DIST to one of:\n\tcentos\n\tdebian\n'
                    'or rerun as a non-superuser.')
                return False
            lowperm_user = users_dict[dist_name]
            nobody_uid = pwd.getpwnam(lowperm_user[0]).pw_uid
            nogroup_gid = grp.getgrnam(lowperm_user[1]).gr_gid

            os.setgroups([])
            os.setgid(nogroup_gid)
            os.setuid(nobody_uid)
            os.umask(0o077)

            return True

        def create_import_entry(self, port, name, clsname):
            imp = import_module('plugins.' + name)
            self._plugin_imports[port] = getattr(imp, clsname)

        def start_listeners(self):
            ports = self._global_config.get_ports()
            for port in ports:
                plugin_config = self._global_config.get_plugin_config(port)
                module = plugin_config['module']
                clsname = plugin_config['moduleClass']
                self.create_import_entry(port, module, clsname)

                address = self._global_config['Framework']['listeningAddress']
                listener = NetworkListener(address, plugin_config, self)
                listener.start()
                self._listener_list[port] = listener

        def set_shutdown_hook(self):
            signal.signal(signal.SIGINT, self.shutdown)

        def shutdown(self, *args):
            self._shutting_down = True

            self._log.debug('Shutting down network listeners')
            for listener in self._listener_list.values():
                listener.shutdown()

            self._log.debug('Shutting down plugins')
            for plugin in self._running_plugins_list:
                plugin.shutdown()

            self._log.debug('Shutting down data manager')
            self._data_manager.shutdown()

            print('Goodbye!')

        #
        # Framework API
        #

        def get_config(self, port):
            """
            Returns the configuration dictionary for the plugin
            running on the specified port.

            :param port: a port number associated with a loaded plugin
            :return: a plugin configuration dictionary
            """
            return self._global_config.get_plugin_config(port)

        def spawn(self, socket, config):
            """
            Spawns the plugin configured by 'config' with the provided
            (accepted) socket.

            :param socket: an open, accepted socket returned by
                           socket.accept()
            :param config: the plugin configuration dictionary describing
                           the plugin to spawn
            :return: a reference to the plugin that was spawned
            """
            # ToDo Throw exception if plugin class not found
            plugin_class = self._plugin_imports[config['port']]
            plugin = plugin_class(socket, config, self)
            plugin.start()
            self._running_plugins_list.append(plugin)
            return plugin

        def insert_data(self, data):
            """
            Inserts the provided data into the data queue so that it can
            be pushed to the database.

            :param data: data object to add to the database
            """
            self._data_manager.insert_data(data)

        def plugin_stopped(self, plugin):
            """
            Tells the framework that the specified plugin has stopped
            running and doesn't need to be shutdown explicitly on program
            exit.

            :param plugin: a reference to a plugin
            """
            if self._shutting_down:
                return

            self._running_plugins_list.remove(plugin)
Exemplo n.º 39
0
class NetworkListener(Thread):
    def __init__(self, listening_address, config, framework):
        super().__init__()
        self._config = config
        self._listening_address = listening_address
        self._port = config['port']
        self._framework = framework
        self._session_socket = None
        self._lock = Lock()
        self._running = False
        self.__connection_count = 0
        self._logger = Logger().get('framework.networklistener.NetworkListener')

    @property
    def connection_count(self):
        with self._lock:
            return self.__connection_count

    @connection_count.setter
    def connection_count(self, val):
        with self._lock:
            self.__connection_count = val

    # Override
    def run(self):
        self._running = True
        self._logger.info('%s plugin listener started on port %d'
                          % (self._config['moduleClass'], self._port))
        while self._running:
            self._session_socket = socket.socket(
                socket.AF_INET, socket.SOCK_STREAM)
            self._session_socket.setsockopt(
                socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            self._session_socket.bind((self._listening_address, self._port))
            self._session_socket.listen(1)
            self.start_listening(self._session_socket)
            self._session_socket.close()
            self.connection_count += 1
        self._logger.info('%s plugin listener on port %d shutting down'
                          % (self._config['moduleClass'], self._port))
        self._session_socket = None

    def start_listening(self, local_socket):
        try:
            (new_socket, addr) = local_socket.accept()
            if self._running:
                self._logger.info('New connection from %s on port %d'
                                  % (addr, self._port))
                self._framework.spawn(new_socket, self._config)
        except ConnectionAbortedError as e:
            if not self._running:
                return
            raise e
        except OSError as e:
            if e.errno == 22 and not self._running:
                return
            raise e
        except Exception as e:
            self._logger.error('Error on connection: %s' % e)
            raise e

    def shutdown(self):
        self._running = False
        if self._session_socket:
            if platform.system() == 'Linux':
                self._session_socket.shutdown(socket.SHUT_RDWR)
            else:
                self._session_socket.close()
        self.join()