def main(): # Reads graph data. with open(FLAGS.config_filename) as f: supervisor_config = json.load(f) logger = log_helper.get_logger(supervisor_config.get('base_dir'), 'info.log') logger.info('Loading graph from: ' + FLAGS.graph_pkl_filename) sensor_ids, sensor_id_to_ind, adj_mx = load_graph_data( FLAGS.graph_pkl_filename) adj_mx[adj_mx < 0.1] = 0 logger.info('Loading traffic data from: ' + FLAGS.traffic_df_filename) traffic_df_filename = FLAGS.traffic_df_filename traffic_reading_df = pd.read_csv(traffic_df_filename) #modify by AG sensors_ids = [ 'time_stamp', 'aqi_W San Gabriel Vly', 'aqi_E San Fernando Vly', 'aqi_SW Coastal LA', 'aqi_San Gabriel Mts', 'aqi_SW San Bernardino', 'aqi_Southeast LA CO', 'aqi_South Coastal LA', 'aqi_Central LA CO', 'aqi_NW Coastal LA', 'aqi_Santa Clarita Vly', 'aqi_W San Fernando Vly', 'aqi_E San Gabriel V-2' ] #test test = pd.read_hdf('data/df_highway_2012_4mon_sample.h5') test = test.ix[:, ] traffic_reading_df = traffic_reading_df.ix[:, sensor_ids] supervisor_config['use_cpu_only'] = FLAGS.use_cpu_only if FLAGS.log_dir: supervisor_config['log_dir'] = FLAGS.log_dir if FLAGS.use_curriculum_learning is not None: supervisor_config[ 'use_curriculum_learning'] = FLAGS.use_curriculum_learning if FLAGS.loss_func: supervisor_config['loss_func'] = FLAGS.loss_func if FLAGS.filter_type: supervisor_config['filter_type'] = FLAGS.filter_type # Overwrites space with specified parameters. for name in [ 'batch_size', 'cl_decay_steps', 'epochs', 'horizon', 'learning_rate', 'l1_decay', 'lr_decay', 'lr_decay_epoch', 'lr_decay_interval', 'learning_rate', 'min_learning_rate', 'patience', 'seq_len', 'test_every_n_epochs', 'verbose' ]: if getattr(FLAGS, name) >= 0: supervisor_config[name] = getattr(FLAGS, name) tf_config = tf.ConfigProto() if FLAGS.use_cpu_only: tf_config = tf.ConfigProto(device_count={'GPU': 0}) tf_config.gpu_options.allow_growth = True with tf.Session(config=tf_config) as sess: supervisor = DCRNNSupervisor(traffic_reading_df=traffic_reading_df, adj_mx=adj_mx, config=supervisor_config) supervisor.train(sess=sess)
def _init_logging(self): base_dir = self._get_config('base_dir') log_dir = self._get_config('log_dir') if log_dir is None: run_id = self._generate_run_id(self._config) log_dir = os.path.join(base_dir, run_id) if not os.path.exists(log_dir): os.makedirs(log_dir) else: run_id = os.path.basename(os.path.normpath(log_dir)) self._log_dir = log_dir self._logger = log_helper.get_logger(self._log_dir, run_id) self._writer = tf.summary.FileWriter(self._log_dir)
def main(): # Reads graph data. with open(FLAGS.config_filename) as f: supervisor_config = yaml.load(f) logger = log_helper.get_logger(supervisor_config.get('base_dir'), 'info.log') logger.info('Loading graph from: ' + FLAGS.graph_pkl_filename) sensor_ids, sensor_id_to_ind, adj_mx = load_graph_data( FLAGS.graph_pkl_filename) adj_mx[adj_mx < 0.1] = 0 logger.info('Loading traffic data from: ' + FLAGS.traffic_df_filename) traffic_df_filename = FLAGS.traffic_df_filename traffic_reading_df = pd.read_hdf(traffic_df_filename) traffic_reading_df = traffic_reading_df.ix[:, sensor_ids] supervisor_config['use_cpu_only'] = FLAGS.use_cpu_only if FLAGS.log_dir: supervisor_config['log_dir'] = FLAGS.log_dir if FLAGS.use_curriculum_learning is not None: supervisor_config[ 'use_curriculum_learning'] = FLAGS.use_curriculum_learning if FLAGS.loss_func: supervisor_config['loss_func'] = FLAGS.loss_func if FLAGS.filter_type: supervisor_config['filter_type'] = FLAGS.filter_type # Overwrites space with specified parameters. for name in [ 'batch_size', 'cl_decay_steps', 'epochs', 'horizon', 'learning_rate', 'l1_decay', 'lr_decay', 'lr_decay_epoch', 'lr_decay_interval', 'learning_rate', 'min_learning_rate', 'patience', 'seq_len', 'test_every_n_epochs', 'verbose' ]: if getattr(FLAGS, name) >= 0: supervisor_config[name] = getattr(FLAGS, name) tf_config = tf.ConfigProto() if FLAGS.use_cpu_only: tf_config = tf.ConfigProto(device_count={'GPU': 0}) tf_config.gpu_options.allow_growth = True with tf.Session(config=tf_config) as sess: supervisor = DCRNNSupervisor(traffic_reading_df=traffic_reading_df, adj_mx=adj_mx, config=supervisor_config) supervisor.train(sess=sess)
from etc.config import ( API_TIMEOUT, EXIT_WHEN_REQUEST_EXCEPTION, EXIT_WHEN_WHEN_SUCCESS, NORMAL_SLEEPING_TIME, SLEEPING_TIME_WHEN_SUCCESS, TICKETS_LEFT_API, TLS_VERIFY, TLS_WARNING, ) from etc.rule_settings import SETTINGS from lib.log_helper import get_logger from lib.parser import parse_response from lib.rule import Rule, filter_rule_settings LOG = get_logger("daemon") if not TLS_WARNING: urllib3.disable_warnings() def main(): rule_settings = filter_rule_settings(SETTINGS) rule = Rule(rule_settings) while True: try: res = requests.get(TICKETS_LEFT_API, verify=TLS_VERIFY, timeout=API_TIMEOUT) except RequestException as e:
# -*- coding:utf-8 -*- import smtplib from email.mime.text import MIMEText from etc.config import ( MAIL_HOST, MAIL_PORT, MAIL_SENDER_USERNAME, MAIL_SENDER_AUTH_KEY, MAIL_RECEIVER_USERNAME, ) from lib.log_helper import get_logger LOG = get_logger("email") def send_email(content): """ 默认使用126smtp, etc/config中修改 :param content: email content :return: """ msg = MIMEText(content) msg['Subject'] = "[抢票器] 发现剩余号源" # title msg['From'] = MAIL_SENDER_USERNAME msg['To'] = MAIL_RECEIVER_USERNAME smtp = None try:
# coding: utf-8 import sys from lib.log_helper import get_logger LOG = get_logger("task") def filter_rule_settings(rule_settings): """过滤掉SETTINGS中的空白字段 :type rule_settings: dict :return: rule_settings without blank fields :type: dict """ try: return dict( filter(lambda key_value: len(key_value[1]) > 0, rule_settings.items())) except Exception as e: LOG.error( "[lib task] wrong field type in task_conf.py, please check the config! error: {}" .format(e)) return dict() class Rule(object): field_func_mapping_table = { "sorts": "positive_selection",
# coding: utf-8 import sys from bs4 import BeautifulSoup from etc.config import EXIT_WHEN_PARSE_EXCEPTION from lib.email_helper import send_email from lib.log_helper import get_logger LOG = get_logger("html_parser") def parse_response(html_text, rule): """ :param html_text: text/html (bytes or unicode) :param rule: instance of lib.rule.Rule :rtype: bool """ try: soup = BeautifulSoup(html_text, "html.parser") except Exception as e: LOG.error("[html parser] error: {}".format(e)) if EXIT_WHEN_PARSE_EXCEPTION: sys.exit() return False # 根据class标签匹配所有剩余票号 element_set = soup.find_all( "div", attrs={'class': ['rbk-listitem', 'rbk-table-view-cell']})