def logger(): return log_helper.get_logger(config.log_run_path)
def get_run_logger(self): ''' 获取运行日志对象 ''' return log_helper.get_logger(self.runLogPath)
def logger(): global _id, _ip logPath = os.path.join(config.log_collect_floder, str(_id)+'_'+_ip+'py.log') # logPath = os.path.join(config.log_collect_floder, 'py.log') return log_helper.get_logger(logPath)
import numpy as np from sklearn import datasets from sklearn.datasets import fetch_openml from helper.log_helper import get_logger logger = get_logger(__name__) def get_mnist_data(n: int = 70000) -> (np.ndarray, np.ndarray): """ Get mnist data from openml.org by sklearn.datasets :return: data as x and target as y. """ logger.info("collecting data...") mnist = fetch_openml('mnist_784') x, y = mnist["data"], mnist["target"] logger.info("data collected.") n = min(n, 70000) return x[:n], y[:n] def get_iris_data(n: int = 150): logger.info("collecting data...") iris = datasets.load_iris() x, y = iris["data"], iris["target"] logger.info("data collected.") n = min(n, 70000) return x[:n], y[:n]
def get_logger(): if None == _logger: _logger = log_helper.get_logger(config.log_path) return _logger
import facebook import pika import configparser import json import arrow import pprint from helper import facebook_helper as fb from helper import log_helper from helper import rabbitmq_helper as rabbit from helper import mongo_helper as mongodb from pymongo.errors import DuplicateKeyError logger = log_helper.get_logger('facebook-comments-fetcher') config = configparser.ConfigParser() config.read('config/production.ini') mongo_client = mongodb.get_mongo_client(config['mongodb']['uri']) comments_collection = mongo_client.guess_what_facebook.comments reply_comments_collection = mongo_client.guess_what_facebook.reply_comments queue_channel = rabbit.get_rabbit_channel( user=config['rabbitmq']['user'], password=config['rabbitmq']['pass'], host=config['rabbitmq']['host'], port=int(config['rabbitmq']['port']), ) graph = fb.get_facebook_graph(access_token=config['facebook']['user_token'])
def get_alarm_logger(self): ''' 获取采集数据日志对象 ''' return log_helper.get_logger(self.logPath)
def get_logger(): if None == _logger: _logger = log_helper.get_logger(config.log_path) return _logger
import facebook import pika import configparser import json import arrow import pprint from helper import facebook_helper as fb from helper import log_helper from helper import rabbitmq_helper as rabbit from helper import mongo_helper as mongodb from pymongo.errors import DuplicateKeyError logger = log_helper.get_logger('facebook-posts-fetcher') QUEUE_EXCHANGE = "fb:posts" LOWER_POST_DATE = arrow.get('2017-06-30 17:00:00', 'YYYY-MM-DD HH:mm:ss') UPPER_POST_DATE = arrow.get('2017-08-31 17:00:00', 'YYYY-MM-DD HH:mm:ss') # LOWER_POST_DATE = arrow.get('2017-08-31 17:00:00', 'YYYY-MM-DD HH:mm:ss') # UPPER_POST_DATE = arrow.get('2017-10-01 17:00:00', 'YYYY-MM-DD HH:mm:ss') config = configparser.ConfigParser() config.read('config/production.ini') mongo_client = mongodb.get_mongo_client(config['mongodb']['uri']) posts_collection = mongo_client.guess_what_facebook.posts queue_channel = rabbit.get_rabbit_channel( user=config['rabbitmq']['user'], password=config['rabbitmq']['pass'], host=config['rabbitmq']['host'], port=int(config['rabbitmq']['port']),