def make_output_dir(args):
    assert args.run_mode == 'train', '仅在train模式下保存各种输出文件'
    output_dir = pathlib.Path(args.output_dir)
    assert output_dir.is_dir()
    time_str = datetime.now().strftime('_%Y-%m-%d-%H-%M-%S')
    output_dir = output_dir / (pathlib.Path(args.config_file).stem + time_str)
    if output_dir.exists():
        raise RuntimeError(f'{output_dir} exists! (maybe file or dir)')
    else:
        output_dir.mkdir()
        # 复制对应的配置文件到保存的文件夹下,保持配置和输出结果的一致
        shutil.copyfile(args.config_file,
                        str(output_dir / pathlib.Path(args.config_file).name))
        # 复制graphVocab到输出文件下:
        shutil.copyfile(
            args.graph_vocab_file,
            str(output_dir / pathlib.Path(args.graph_vocab_file).name))
        (output_dir / 'model').mkdir()
        args.output_dir = str(output_dir)
        args.dev_output_path = str(output_dir / 'dev_output_conllu.txt')
        args.dev_result_path = str(output_dir / 'dev_best_metrics.txt')
        args.test_output_path = str(output_dir / 'test_output_conllu.txt')
        args.test_result_path = str(output_dir / 'test_metrics.txt')
        args.output_model_dir = str(output_dir / 'model')
        args.summary_dir = str(output_dir / 'summary')
        init_logger(args.log_name, str(output_dir / 'parser.log'))
def main():
    delay = 15
    logger.init_logger(f'logs/{settings.NAME}.log')
    s = requests.Session()
    software_names = [SoftwareName.CHROME.value]
    operating_systems = [
        OperatingSystem.WINDOWS.value, OperatingSystem.LINUX.value
    ]

    user_agent_rotator = UserAgent(software_names=software_names,
                                   operating_systems=operating_systems,
                                   limit=100)
    # user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 ' \
    #              'Safari/537.36 '
    user_agent = user_agent_rotator.get_random_user_agent()
    s.headers.update({'User-Agent': user_agent})
    proxy = settings.PROXIES if settings.USE_PROXY else None
    try:
        with open('assets/cookie', 'rb') as f:
            s.cookies.update(pickle.load(f))
    except Exception as e:
        logging.warning(str(e))

    films = []
    r = re.compile('\d+')
    page_num = 1
    while True:

        page = s.get(
            f'https://www.kinopoisk.ru/popular/films/2018/?page={page_num}&quick_filters=films&tab=all',
            proxies=proxy)
        tree = html.fromstring(page.text)
        films_buf = [
            r.findall(film.attrib['href'])[0]
            for film in tree.find_class('selection-film-item-meta__link')
        ]
        logging.debug(f'Добавлено {len(films_buf)} фильмов')
        if len(films_buf) == 0:
            break
        films.extend(films_buf)
        page_num += 1
        time.sleep(delay)

    for i in range(len(films)):
        try:
            data = get_film_data(s, films[i], delay, proxy)
            insert_film(conn.conn, data)
            logging.debug(
                f'Фильм {data["title"]} обработан  {i}/{len(films)} ')
        except:
            logging.debug(f'Фильм не обработан, ошибка! {i}/{len(films)} ')

        time.sleep(delay)
    with open('assets/cookie', 'wb') as f:
        pickle.dump(s.cookies, f)
Exemple #3
0
def main():
    logger.init_logger(f'logs/{settings.NAME}.log')
    updater = Updater(BOT_TOKEN, use_context=True)
    dispatcher = updater.dispatcher

    load_handlers(updater.dispatcher)

    # # dispatcher.add_handler(CommandHandler(GET_NOTIFICATIONS, get_notifications_callback))

    if settings.WEBHOOK:
        # signal.signal(signal.SIGINT, graceful_exit)
        updater.start_webhook(**settings.WEBHOOK_OPTIONS)
        # updater.bot.set_webhook(url=settings.WEBHOOK_URL)
        updater.bot.set_webhook(os.environ.get("APP_NAME") + BOT_TOKEN)
    else:
        updater.start_polling()

    # Run the bot until you press Ctrl-C or the process receives SIGINT,
    # SIGTERM or SIGABRT. This should be used most of the time, since
    # start_polling() is non-blocking and will stop the bot gracefully.
    updater.idle()
Exemple #4
0
def eval(args):
    logger = init_logger(args.log_dir, args)

    if args.phase == 'protoeval':
        learner = ProtoLearner(args, mode='test')
    elif args.phase == 'mptieval':
        learner = MPTILearner(args, mode='test')

    #Init dataset, dataloader
    TEST_DATASET = MyTestDataset(args.data_path, args.dataset, cvfold=args.cvfold,
                                 num_episode_per_comb=args.n_episode_test,
                                 n_way=args.n_way, k_shot=args.k_shot, n_queries=args.n_queries,
                                 num_point=args.pc_npts, pc_attribs=args.pc_attribs,  mode='test')
    TEST_CLASSES = list(TEST_DATASET.classes)
    TEST_LOADER = DataLoader(TEST_DATASET, batch_size=1, shuffle=False, collate_fn=batch_test_task_collate)

    test_loss, mean_IoU = test_few_shot(TEST_LOADER, learner, logger, TEST_CLASSES)

    logger.cprint('\n=====[TEST] Loss: %.4f | Mean IoU: %f =====\n' %(test_loss, mean_IoU))
class CitiesController(object):
    """
    :type dao: cities.interfaces.IDao
    :type downloader: cities.interfaces.IDownloader
    """
    DOWNLOAD_URL = 'http://bulk.openweathermap.org/sample/city.list.json.gz'
    SEARCH_MIN_CHARS = 3
    SEARCH_LIMIT = 8

    adapter = OpenWeatherMongoAdapter()
    async_controller = AsyncController()
    dao = NotImplemented
    downloader = NotImplemented
    logger = init_logger('cities_updater')

    def update_cities_asynchronously(self):
        from cities.tasks import update_cities
        async_result = update_cities.delay()
        return self.async_controller.get_async_task_status_by_result(
            async_result)

    def update_cities(self):
        cities_json = self.downloader.download(self.DOWNLOAD_URL)
        self._update_db(cities_json)

    def _update_db(self, cities_json):
        timestamp = time()
        cities_json = self.adapter.prepare_to_mongo(cities_json, timestamp)
        # TODO: Create the Cities model in the future, since we will probably need it in more places
        for city in cities_json:
            try:
                self.dao.upsert_city(city)
            except Exception as e:
                self.logger.error('Error upserting {} - {}: {}'.format(
                    city['_id'], e.__class__.__name__, e))

        self.dao.delete_old_records(timestamp)

    def search_cities(self, query):
        if len(query) < self.SEARCH_MIN_CHARS:
            return []
        cities = self.dao.get_by_partial_name(query, self.SEARCH_LIMIT)
        return self.adapter.prepare_to_open_weather(cities)
Exemple #6
0
    def __init__(self):
        self.cfg = self.init_config()
        print('Using config:')
        pprint.pprint(self.cfg)

        self.logDir = Path(self.cfg.root).joinpath('experiment/log').joinpath(self.cfg.name)
        self.logDir.mkdir(exist_ok=True)
        self.logger = init_logger(cfg, self.logDir)

        self.best_prec1 = 0

        # model
        self.model = get_model(self.cfg, num_classes=self.cfg.nbr_class, pool_size=self.cfg.Trans.crop_size // 32)
        # todo: model parallel

        # data
        self.train_loader, self.val_loader = get_dataloader(self.cfg)

        # optimizer
        self.optimizer = paddle.optimizer.Momentum(learning_rate=self.cfg.Train.lr,
                                                   parameters=self.model.parameters(),
                                                   weight_decay=self.cfg.Train.weight_decay,
                                                   momentum=self.cfg.Train.momentum)

        # self.scheduler = paddle.optimizer.lr.StepDecay(learning_rate=self.cfg.Train.lr, step_size=30,
        #                                                gamma=self.cfg.Train.step_ratio, verbose=True)

        self.scheduler = paddle.optimizer.lr.NaturalExpDecay(learning_rate=self.cfg.Train.lr, gamma=0.95, verbose=True)
        # criterion
        self.criterion = paddle.nn.CrossEntropyLoss()

        if self.cfg.Train.resume is not None:
            # load
            layer_state_dict = paddle.load(self.cfg.Train.resume[0])
            opt_state_dict = paddle.load(self.cfg.Train.resume[1])

            self.model.set_state_dict(layer_state_dict)
            self.optimizer.set_state_dict(opt_state_dict)

        # log files
        shutil.copytree('../dla/', self.logDir, dirs_exist_ok=True)
Exemple #7
0
def main():
    args = get_args()
    logger = init_logger()
    redis = StrictRedis(REDIS_HOST)

    while True:
        imgs = redis.lrange(IMAGE_QUEUE, 0, args.batch_size - 1)

        if not imgs:
            time.sleep(args.sleep)
            continue

        redis.ltrim(IMAGE_QUEUE, len(imgs), -1)
        img_ids, img_list = [], []

        for img in imgs:
            img = json.loads(img.decode('utf-8'))
            img_dec = image.base64_decode(img['b64'])
            img_list.append(img_dec)
            img_ids.append(img['id'])
            logger.info(f'received image {img["id"]}')

        res_lists = predictor.predict(img_list, TOP_LABEL_NUM)

        for img_id, res_list in zip(img_ids, res_lists):
            results = {
                'predictions': [],
                'predicted_at': time.strftime('%Y%m%d_%H%M%S')
            }

            for _, label, proba in res_list:
                pred = {'label': label, 'proba': float(proba)}
                results['predictions'].append(pred)

            redis.set(img_id, json.dumps(results))
            logger.info(f'store results for image {img_id}')
Exemple #8
0
    # init parameters
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config', default='configs/sky_timelapse.json')
    args = parser.parse_args()
    with open(args.config) as f:
        cfg = EasyDict(json.load(f))
    # init seed
    init_seed(cfg.train.seed)

    # init logger
    if cfg.save_root == '':
        curr_time = datetime.datetime.now().strftime("%y%m%d%H%M%S")
        cfg.save_root = Path(
            cfg.train.checkpoints) / cfg.train.name / curr_time
        cfg.save_root.makedirs_p()
    logger = init_logger(log_dir=cfg.save_root)
    logger.info('=> training: will save everything to {}'.format(
        cfg.save_root))

    # show configurations
    cfg_str = pprint.pformat(cfg)
    logger.info('=> configurations \n ' + cfg_str)

    # create datasets
    train_loader, valid_loader = get_dataset(cfg)

    # train
    TrainFramework = get_trainer(cfg.trainer)
    trainer = TrainFramework(cfg, train_loader, valid_loader, logger)
    trainer.train()
import time
import boto3
from utils.config import ENDPOINT_URL, AWS_CONF
from utils.logger import init_logger

log = init_logger(__name__)

def queue_ready(queue_name):
    """
    function that checks if the sqs queue exists in localstack
    """
    sqs = boto3.client('sqs', config=AWS_CONF, endpoint_url=ENDPOINT_URL, verify=False)
    try:
        sqs.get_queue_url(QueueName=queue_name)
        log.debug("Queue is alive, at long last!")
        return True
    except:
        return False

def stream_ready(stream_name):
    """
    function that checks if the kinesis stream exists in localstack
    """
    kinesis = boto3.client('kinesis', config=AWS_CONF, endpoint_url=ENDPOINT_URL, verify=False)
    try:
        kinesis.describe_stream(StreamName=stream_name)
        log.debug("Stream is alive, at long last!")
        return True
    except:
        return False
import itertools
import pickle as pk
from multiprocessing import Lock, Process, Queue, cpu_count

import cvxopt as opt
import yaml
from cvxopt import blas, solvers
from scipy.special import comb
from sqlalchemy import create_engine

from utils import logger as lg
from utils.decorator import *
from utils.utils import *

solvers.options['show_progress'] = False  # Turn off progress printing
logger = lg.init_logger('./log/3-genPort_%s.log' %
                        dt.now().strftime('%Y-%m-%d'))


class HarryMarkowitz:
    def __init__(self, cfg_path='config.yaml'):
        config = yaml.load(open(cfg_path))

        # global params
        global risk_free, fit_frequency, portfolio_nbr, proc_nbr, lock_in, lock_out
        [self.risk_free, self.fit_frequency, self.portfolio_nbr,
         self.ytg] = [*config['Portfolio'].values()]
        self.proc_nbr = int(input('Number of process: '))
        self.lock_in = Lock()
        self.lock_out = Lock()

        # local params
Exemple #11
0
import sys
import threading
import time
from datetime import datetime as dt

import pandas as pd
import requests
import urllib3
import yaml
from bs4 import BeautifulSoup
from fake_useragent import UserAgent

from utils import PyMySQL, logger, utils
from utils.utils import exportQuery

logger = logger.init_logger('./log/1-scrapeData_%s.log' %
                            dt.now().strftime('%Y-%m-%d'))
cfp = yaml.load(open('./config.yaml', 'r'))
fund_file_path = "./dep/1-fundCode&Name.csv"
db = [*cfp['MySQL'].values()]  # unpack dict to get dict value


class FundSpiders():
    def getCurrentTime(self):
        """获取当前时间"""
        return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

    def updateFundCodesFromCsv(self):
        # get all possible code
        allCode = []
        for i in range(0, 1000000):
            len_ = len(str(i))
Exemple #12
0
@author: JON7390
交易一期(基金)数据处理
"""

import itertools
import pickle as pk

import yaml
from scipy.special import comb
from sqlalchemy import create_engine

from utils import logger as lg
from utils.decorator import *
from utils.utils import *

logger = lg.init_logger('./log/2-dataProcess_%s.log' %
                        dt.now().strftime('%Y-%m-%d'))


@try_exception
def processManaHis(mana_his):
    logger.info('\nProcessing manager history...')

    # clean weired name case
    mana_his = replaceManaName(mana_his, 'manager_name')

    # deal w till-now
    mana_his.loc[mana_his['end_date'] == '至今',
                 'end_date'] = mana_his.loc[mana_his['end_date'] == '至今',
                                            'updated_date']
    mana_his['end_date'] = mana_his['end_date'].astype('datetime64[D]')
import argparse
import os
import torch
from utils.logger import init_logger
from transformers import BertConfig, BertTokenizer
from data_prepare import CoNLL2003NerProcessor
from bert_ner_nodel import BertNer
from train import fit
from data_prepare import convert_examples_to_features, prepare_data_loader

logger = init_logger(__name__, os.getcwd())


def main():
    parser = argparse.ArgumentParser()

    # Required parameters
    parser.add_argument(
        "--data_dir",
        default=None,
        type=str,
        required=True,
        help=
        "The input data dir. Should contain the .tsv files (or other data files) for the task."
    )
    parser.add_argument(
        "--bert_model",
        default=None,
        type=str,
        required=True,
        help="Bert pre-trained model selected in the list: bert-base-uncased, "
Exemple #14
0
# -*- coding: utf-8 -*-
import os
from importlib import import_module
import inspect

from ask_sdk_core.skill_builder import SkillBuilder
from ask_sdk_core.dispatch_components import AbstractExceptionHandler
from ask_sdk_core.dispatch_components import AbstractRequestInterceptor
from ask_sdk_core.dispatch_components import AbstractRequestHandler

from utils import logger

logger.init_logger()

sb = SkillBuilder()

logger.get_logger().info('SkillBuilder created')

base_path = os.path.join(os.path.dirname(__file__), 'handlers')
files = os.listdir(base_path)

for file_name in files:
    handler, _ = os.path.splitext(file_name)

    HandlerClass = getattr(import_module('handlers.{}'.format(handler)),
                           handler)
    handler_parent_classes = inspect.getmro(HandlerClass)

    logger.get_logger().info('Adding {} to SkillBuilder'.format(handler))

    if AbstractRequestHandler in handler_parent_classes:
Exemple #15
0
def run(logdir, config, port, public):
    web_port = port
    logger.init_logger(logdir)

    if not os.path.exists(logdir):
        logging.warn(f'logdir {logdir} not found. creating...')
        os.makedirs(logdir)
        raise ValueError("Invalid logdir.")
    if not os.path.exists(config):
        raise ValueError("Invalid config file.")

    config = load_config(config)

    # In hours, 0 for no limit
    experiment_time_limit = config.experiment_time_limit
    initial_tf_port = config.initial_tf_port
    # We could run up to one tf server per device on one worker, so we need to
    # have that many ports
    tf_ports = list(range(initial_tf_port, initial_tf_port + 7))
    hosts = config.host_addresses

    workers = {
        host: worker_interface.WorkerInterface(
            host=host,
            tf_ports=tf_ports,
            logdir=logdir,
            resource_folder=config.resource_folder,
            docker_resource_folder=config.docker_resource_folder,
        )
        for host in hosts
    }

    experiment_scheduler = scheduler.Scheduler(
        workers=workers,
        logdir=logdir,
        experiment_time_limit=experiment_time_limit,
    )
    experiment_scheduler.load_session()

    # Register shutdown callback
    atexit.register(experiment_scheduler.shutdown)

    web_interface = wi.WebInterface(
        scheduler_ref=experiment_scheduler,
        resource_folder=config.resource_folder,
        docker_resource_folder=config.docker_resource_folder,
    )

    # Start web server thread
    web_thread = threading.Thread(
        target=web_interface.run,
        args=(public, web_port),
        daemon=True,
    )
    web_thread.start()
    logging.info('Web Interface Thread started.')

    # Specify updates per second
    ups = config.ups
    frame_time = 1.0 / ups
    t0 = time.time()
    t_accumulated = 0
    while True:
        t1 = time.time()
        t_accumulated += (t1 - t0)
        t0 = t1

        while t_accumulated > frame_time:
            t_accumulated -= frame_time
            experiment_scheduler.update()

        time.sleep(frame_time - t_accumulated)
Exemple #16
0
def train(args):
    logger = init_logger(args.log_dir, args)

    # os.system('cp models/mpti_learner.py %s' % (args.log_dir))
    # os.system('cp models/mpti.py %s' % (args.log_dir))
    # os.system('cp models/dgcnn.py %s' % (args.log_dir))

    # init model and optimizer
    MPTI = MPTILearner(args)

    #Init datasets, dataloaders, and writer
    PC_AUGMENT_CONFIG = {
        'scale': args.pc_augm_scale,
        'rot': args.pc_augm_rot,
        'mirror_prob': args.pc_augm_mirror_prob,
        'jitter': args.pc_augm_jitter
    }

    TRAIN_DATASET = MyDataset(args.data_path,
                              args.dataset,
                              cvfold=args.cvfold,
                              num_episode=args.n_iters,
                              n_way=args.n_way,
                              k_shot=args.k_shot,
                              n_queries=args.n_queries,
                              phase=args.phase,
                              mode='train',
                              num_point=args.pc_npts,
                              pc_attribs=args.pc_attribs,
                              pc_augm=args.pc_augm,
                              pc_augm_config=PC_AUGMENT_CONFIG)

    VALID_DATASET = MyTestDataset(args.data_path,
                                  args.dataset,
                                  cvfold=args.cvfold,
                                  num_episode_per_comb=args.n_episode_test,
                                  n_way=args.n_way,
                                  k_shot=args.k_shot,
                                  n_queries=args.n_queries,
                                  num_point=args.pc_npts,
                                  pc_attribs=args.pc_attribs)
    VALID_CLASSES = list(VALID_DATASET.classes)

    TRAIN_LOADER = DataLoader(TRAIN_DATASET,
                              batch_size=1,
                              collate_fn=batch_test_task_collate)
    VALID_LOADER = DataLoader(VALID_DATASET,
                              batch_size=1,
                              collate_fn=batch_test_task_collate)

    WRITER = SummaryWriter(log_dir=args.log_dir)

    # train
    best_iou = 0
    for batch_idx, (data, sampled_classes) in enumerate(TRAIN_LOADER):

        if torch.cuda.is_available():
            data = cast_cuda(data)

        loss, accuracy = MPTI.train(data)

        logger.cprint('==[Train] Iter: %d | Loss: %.4f |  Accuracy: %f  ==' %
                      (batch_idx, loss, accuracy))
        WRITER.add_scalar('Train/loss', loss, batch_idx)
        WRITER.add_scalar('Train/accuracy', accuracy, batch_idx)

        if (batch_idx + 1) % args.eval_interval == 0:

            valid_loss, mean_IoU = test_few_shot(VALID_LOADER, MPTI, logger,
                                                 VALID_CLASSES)
            logger.cprint('\n=====[VALID] Loss: %.4f | Mean IoU: %f  =====\n' %
                          (valid_loss, mean_IoU))
            WRITER.add_scalar('Valid/loss', valid_loss, batch_idx)
            WRITER.add_scalar('Valid/meanIoU', mean_IoU, batch_idx)
            if mean_IoU > best_iou:
                best_iou = mean_IoU
                logger.cprint(
                    '*******************Model Saved*******************')
                save_dict = {
                    'iteration': batch_idx + 1,
                    'model_state_dict': MPTI.model.state_dict(),
                    'optimizer_state_dict': MPTI.optimizer.state_dict(),
                    'loss': valid_loss,
                    'IoU': best_iou
                }
                torch.save(save_dict,
                           os.path.join(args.log_dir, 'checkpoint.tar'))

    WRITER.close()
Exemple #17
0
            inference(args, seq2seq, test_iter, tgt_vocab_size, fields.get('src', None), fields['tgt'])
        model_helper.write_results(args, file_prefix, raw_queries, raw_responses, output_scores, output_generations,
                                   raw_fields=raw_fields)

    logger.info('[STD Evaluation] Evaluating the test generations...')
    res_dict = std_eval_with_args(args, file_prefix, 'test')
    score_manager.update_group(generation_name, res_dict)

    if args.beam_width == 0 and args.skip_infer is False:
        test_ppl = math.exp(test_loss)
        score_manager.update('infer_b%d_test_loss' % args.beam_width, test_loss)
        score_manager.update('infer_b%d_test_ppl' % args.beam_width, test_ppl)
        score_manager.update('infer_b%d_test_macro_ppl' % args.beam_width, test_ppl_macro)


if __name__ == "__main__":
    try:
        init_logger()
        args = parse_arguments()
        random_seed = 12345
        torch.manual_seed(random_seed)
        if args.mode == 'train':
            train(args)
            eval(args)
        elif args.mode == 'eval':
            eval(args)
        elif args.mode == 'infer':
            infer(args)
    except KeyboardInterrupt as e:
        print("[STOP]", e)
Exemple #18
0
SEED = 2323
FOLD_ID = 2

TEST_PATH = "/input/input.txt"
OUTPUT_PATH = "/output/output.txt"
# TEST_PATH = "datasets/input.txt"  #
# TEST_PATH = "datasets/SCM_5k.json"
TEST_PATH = f"datasets/bigfolds/fold{FOLD_ID}_valid.txt"  #
OUTPUT_PATH = "output/output.txt"  #
LOG_DIR = "output/logs"

MAX_SEQ_LENGTH = 445
BATCH_SIZE = 16

seed_everything(SEED)
logger = init_logger(log_name=ARCH, log_dir=LOG_DIR)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

print("---------- Bert Eval ... ----------")
start_time = timer()

# bert_config.json, pytorch_model.bin vocab.txt in chpts
BERT_MODEL_PATH = "output/ckpts6920"
BERT_VOCAB_PATH = "output/ckpts6920/vocab.txt"

test_dataset = CAILDataset(
    data_path=TEST_PATH,
    max_seq_len=MAX_SEQ_LENGTH,
    vocab_path=BERT_VOCAB_PATH,
    seed=SEED,
    mode="test",
    base_path = os.path.join(os.path.dirname(__file__), "bot")
    files = os.listdir(base_path)

    for file_name in files:
        handler_module, _ = os.path.splitext(file_name)

        module = import_module(f".{handler_module}", "bot")
        module.init(dispatcher)


def graceful_exit(*_, **__):
    """Provide a graceful exit from a webhook server."""
    if updater is not None:
        updater.bot.delete_webhook()

    sys.exit(0)


if __name__ == "__main__":
    logger.init_logger(f"logs/{settings.NAME}.log")

    updater = Updater(token=settings.TOKEN)

    load_handlers(updater.dispatcher)

    if settings.WEBHOOK:
        signal.signal(signal.SIGINT, graceful_exit)
        updater.start_webhook(**settings.WEBHOOK_OPTIONS)
    else:
        updater.start_polling()
Exemple #20
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by spider3 on 2018/3/9
# Copyright (c) 2018 spider3. All rights reserved.
import json
import requests
import time
from pymongo import MongoClient
from utils.logger import init_logger
from datetime import datetime
"""高德地图poi: http://lbs.amap.com/api/webservice/guide/api/search/"""
# addr_ids = [440301,440303,440304,440305,440306,440307,440308,440309,440310]
# addr_ids = [110000, 110100, 110101, 110102, 110105, 110106, 110107, 110108, 110109, 110111, 110112, 110113, 110114, 110115, 110116, 110117, 110118, 110119, 120000, 120100, 120101, 120102, 120103, 120104, 120105, 120106, 120110, 120111, 120112, 120113, 120114, 120115, 120116, 120117, 120118, 120119, 130100, 130101, 130102, 130104, 130105, 130107, 130108, 130109, 130110, 130111, 130121, 130123, 130125, 130126, 130127, 130128, 130129, 130130, 130131, 130132, 130133, 130181, 130183, 130184, 130200, 130201, 130202, 130203, 130204, 130205, 130207, 130208, 130209, 130223, 130224, 130225, 130227, 130229, 130281, 130283, 130300, 130301, 130302, 130303, 130304, 130321, 130322, 130306, 130324, 130400, 130401, 130402, 130403, 130404, 130406, 130423, 130424, 130425, 130426, 130427, 130407, 130408, 130430, 130431, 130432, 130433, 130434, 130435, 130481, 130500, 130501, 130502, 130503, 130521, 130522, 130523, 130524, 130525, 130526, 130527, 130528, 130529, 130530, 130531, 130532, 130533, 130534, 130535, 130581, 130582, 130600, 130601, 130602, 130606, 130607, 130608, 130623, 130624, 130609, 130626, 130627, 130628, 130629, 130630, 130631, 130632, 130633, 130634, 130635, 130636, 130637, 130638, 130681, 130682, 130683, 130684, 130700, 130701, 130702, 130703, 130705, 130706, 130722, 130723, 130724, 130725, 130726, 130727, 130728, 130708, 130730, 130731, 130732, 130709, 130800, 130801, 130802, 130803, 130804, 130821, 130822, 130881, 130824, 130825, 130826, 130827, 130828, 130900, 130901, 130902, 130903, 130921, 130922, 130923, 130924, 130925, 130926, 130927, 130928, 130929, 130930, 130981, 130982, 130983, 130984, 131000, 131001, 131002, 131003, 131022, 131023, 131024, 131025, 131026, 131028, 131081, 131082, 131100, 131101, 131102, 131121, 131122, 131123, 131124, 131125, 131126, 131127, 131128, 131103, 131182, 140100, 140101, 140105, 140106, 140107, 140108, 140109, 140110, 140121, 140122, 140123, 140181, 140200, 140201, 140202, 140203, 140211, 140212, 140221, 140222, 140223, 140224, 140225, 140226, 140227, 140300, 140301, 140302, 140303, 140311, 140321, 140322, 140400, 140401, 140402, 140411, 140421, 140423, 140424, 140425, 140426, 140427, 140428, 140429, 140430, 140431, 140481, 140500, 140501, 140502, 140521, 140522, 140524, 140525, 140581, 140600, 140601, 140602, 140603, 140621, 140622, 140623, 140624, 140700, 140701, 140702, 140721, 140722, 140723, 140724, 140725, 140726, 140727, 140728, 140729, 140781, 140800, 140801, 140802, 140821, 140822, 140823, 140824, 140825, 140826, 140827, 140828, 140829, 140830, 140881, 140882, 140900, 140901, 140902, 140921, 140922, 140923, 140924, 140925, 140926, 140927, 140928, 140929, 140930, 140931, 140932, 140981, 141000, 141001, 141002, 141021, 141022, 141023, 141024, 141025, 141026, 141027, 141028, 141029, 141030, 141031, 141032, 141033, 141034, 141081, 141082, 141100, 141101, 141102, 141121, 141122, 141123, 141124, 141125, 141126, 141127, 141128, 141129, 141130, 141181, 141182, 150100, 150101, 150102, 150103, 150104, 150105, 150121, 150122, 150123, 150124, 150125, 150200, 150201, 150202, 150203, 150204, 150205, 150206, 150207, 150221, 150222, 150223, 150300, 150301, 150302, 150303, 150304, 150400, 150401, 150402, 150403, 150404, 150421, 150422, 150423, 150424, 150425, 150426, 150428, 150429, 150430, 150500, 150501, 150502, 150521, 150522, 150523, 150524, 150525, 150526, 150581, 150600, 150601, 150602, 150603, 150621, 150622, 150623, 150624, 150625, 150626, 150627, 150700, 150701, 150702, 150703, 150721, 150722, 150723, 150724, 150725, 150726, 150727, 150781, 150782, 150783, 150784, 150785, 150800, 150801, 150802, 150821, 150822, 150823, 150824, 150825, 150826, 150900, 150901, 150902, 150921, 150922, 150923, 150924, 150925, 150926, 150927, 150928, 150929, 150981, 152200, 152201, 152202, 152221, 152222, 152223, 152224, 152500, 152501, 152502, 152522, 152523, 152524, 152525, 152526, 152527, 152528, 152529, 152530, 152531, 152900, 152921, 152922, 152923, 210100, 210101, 210102, 210103, 210104, 210105, 210106, 210111, 210112, 210113, 210114, 210115, 210123, 210124, 210181, 210200, 210201, 210202, 210203, 210204, 210211, 210212, 210213, 210224, 210281, 210214, 210283, 210300, 210301, 210302, 210303, 210304, 210311, 210321, 210323, 210381, 210400, 210401, 210402, 210403, 210404, 210411, 210421, 210422, 210423, 210500, 210501, 210502, 210503, 210504, 210505, 210521, 210522, 210600, 210601, 210602, 210603, 210604, 210624, 210681, 210682, 210700, 210701, 210702, 210703, 210711, 210726, 210727, 210781, 210782, 210800, 210801, 210802, 210803, 210804, 210811, 210881, 210882, 210900, 210901, 210902, 210903, 210904, 210905, 210911, 210921, 210922, 211000, 211001, 211002, 211003, 211004, 211005, 211011, 211021, 211081, 211100, 211101, 211102, 211103, 211104, 211122, 211200, 211201, 211202, 211204, 211221, 211223, 211224, 211281, 211282, 211300, 211301, 211302, 211303, 211321, 211322, 211324, 211381, 211382, 211400, 211401, 211402, 211403, 211404, 211421, 211422, 211481, 220100, 220101, 220102, 220103, 220104, 220105, 220106, 220112, 220113, 220122, 220182, 220183, 220200, 220201, 220202, 220203, 220204, 220211, 220221, 220281, 220282, 220283, 220284, 220300, 220301, 220302, 220303, 220322, 220323, 220381, 220382, 220400, 220401, 220402, 220403, 220421, 220422, 220500, 220501, 220502, 220503, 220521, 220523, 220524, 220581, 220582, 220600, 220601, 220602, 220605, 220621, 220622, 220623, 220681, 220700, 220701, 220702, 220721, 220722, 220723, 220781, 220800, 220801, 220802, 220821, 220822, 220881, 220882, 222400, 222401, 222402, 222403, 222404, 222405, 222406, 222424, 222426, 230100, 230101, 230102, 230103, 230104, 230108, 230109, 230110, 230111, 230112, 230113, 230123, 230124, 230125, 230126, 230127, 230128, 230129, 230183, 230184, 230200, 230201, 230202, 230203, 230204, 230205, 230206, 230207, 230208, 230221, 230223, 230224, 230225, 230227, 230229, 230230, 230231, 230281, 230300, 230301, 230302, 230303, 230304, 230305, 230306, 230307, 230321, 230381, 230382, 230400, 230401, 230402, 230403, 230404, 230405, 230406, 230407, 230421, 230422, 230500, 230501, 230502, 230503, 230505, 230506, 230521, 230522, 230523, 230524, 230600, 230601, 230602, 230603, 230604, 230605, 230606, 230621, 230622, 230623, 230624, 230700, 230701, 230702, 230703, 230704, 230705, 230706, 230707, 230708, 230709, 230710, 230711, 230712, 230713, 230714, 230715, 230716, 230722, 230781, 230800, 230801, 230803, 230804, 230805, 230811, 230822, 230826, 230828, 230883, 230881, 230882, 230900, 230901, 230902, 230903, 230904, 230921, 231000, 231001, 231002, 231003, 231004, 231005, 231086, 231025, 231081, 231083, 231084, 231085, 231100, 231101, 231102, 231121, 231123, 231124, 231181, 231182, 231200, 231201, 231202, 231221, 231222, 231223, 231224, 231225, 231226, 231281, 231282, 231283, 232700, 232701, 232721, 232722, 232723, 310000, 310100, 310101, 310104, 310105, 310106, 310107, 310109, 310110, 310112, 310113, 310114, 310115, 310116, 310117, 310118, 310120, 310151, 320100, 320101, 320102, 320104, 320105, 320106, 320111, 320113, 320114, 320115, 320116, 320117, 320118, 320200, 320201, 320213, 320214, 320205, 320206, 320211, 320281, 320282, 320300, 320301, 320302, 320303, 320305, 320311, 320312, 320321, 320322, 320324, 320381, 320382, 320400, 320401, 320402, 320404, 320411, 320412, 320481, 320413, 320500, 320501, 320505, 320506, 320507, 320508, 320509, 320581, 320582, 320583, 320585, 320600, 320601, 320602, 320611, 320612, 320621, 320623, 320681, 320682, 320684, 320700, 320701, 320703, 320706, 320707, 320722, 320723, 320724, 320800, 320801, 320812, 320803, 320804, 320826, 320813, 320830, 320831, 320900, 320901, 320902, 320903, 320921, 320922, 320923, 320924, 320925, 320981, 320904, 321000, 321001, 321002, 321003, 321012, 321023, 321081, 321084, 321100, 321101, 321102, 321111, 321112, 321181, 321182, 321183, 321200, 321201, 321202, 321203, 321204, 321281, 321282, 321283, 321300, 321301, 321302, 321311, 321322, 321323, 321324, 330100, 330101, 330102, 330103, 330104, 330105, 330106, 330108, 330109, 330110, 330111, 330122, 330127, 330182, 330185, 330200, 330201, 330203, 330205, 330206, 330211, 330212, 330225, 330226, 330281, 330282, 330213, 330300, 330301, 330302, 330303, 330304, 330305, 330324, 330326, 330327, 330328, 330329, 330381, 330382, 330400, 330401, 330402, 330411, 330421, 330424, 330481, 330482, 330483, 330500, 330501, 330502, 330503, 330521, 330522, 330523, 330600, 330601, 330602, 330603, 330604, 330624, 330681, 330683, 330700, 330701, 330702, 330703, 330723, 330726, 330727, 330781, 330782, 330783, 330784, 330800, 330801, 330802, 330803, 330822, 330824, 330825, 330881, 330900, 330901, 330902, 330903, 330921, 330922, 331000, 331001, 331002, 331003, 331004, 331021, 331022, 331023, 331024, 331081, 331082, 331100, 331101, 331102, 331121, 331122, 331123, 331124, 331125, 331126, 331127, 331181, 340100, 340101, 340102, 340103, 340104, 340111, 340121, 340122, 340123, 340124, 340181, 340200, 340201, 340202, 340203, 340207, 340208, 340221, 340222, 340223, 340225, 340300, 340301, 340302, 340303, 340304, 340311, 340321, 340322, 340323, 340400, 340401, 340402, 340403, 340404, 340405, 340406, 340421, 340422, 340500, 340501, 340503, 340504, 340506, 340521, 340522, 340523, 340600, 340601, 340602, 340603, 340604, 340621, 340700, 340701, 340705, 340711, 340706, 340722, 340800, 340801, 340802, 340803, 340811, 340822, 340824, 340825, 340826, 340827, 340828, 340881, 341000, 341001, 341002, 341003, 341004, 341021, 341022, 341023, 341024, 341100, 341101, 341102, 341103, 341122, 341124, 341125, 341126, 341181, 341182, 341200, 341201, 341202, 341203, 341204, 341221, 341222, 341225, 341226, 341282, 341300, 341301, 341302, 341321, 341322, 341323, 341324, 341500, 341501, 341502, 341503, 341504, 341522, 341523, 341524, 341525, 341600, 341601, 341602, 341621, 341622, 341623, 341700, 341701, 341702, 341721, 341722, 341723, 341800, 341801, 341802, 341821, 341822, 341823, 341824, 341825, 341881, 350100, 350101, 350102, 350103, 350104, 350105, 350111, 350121, 350122, 350123, 350124, 350125, 350128, 350181, 350182, 350200, 350201, 350203, 350205, 350206, 350211, 350212, 350213, 350300, 350301, 350302, 350303, 350304, 350305, 350322, 350400, 350401, 350402, 350403, 350421, 350423, 350424, 350425, 350426, 350427, 350428, 350429, 350430, 350481, 350500, 350501, 350502, 350503, 350504, 350505, 350521, 350524, 350525, 350526, 350527, 350581, 350582, 350583, 350600, 350601, 350602, 350603, 350622, 350623, 350624, 350625, 350626, 350627, 350628, 350629, 350681, 350700, 350701, 350702, 350703, 350721, 350722, 350723, 350724, 350725, 350781, 350782, 350783, 350800, 350801, 350802, 350803, 350821, 350823, 350824, 350825, 350881, 350900, 350901, 350902, 350921, 350922, 350923, 350924, 350925, 350926, 350981, 350982, 360100, 360101, 360102, 360103, 360104, 360105, 360111, 360121, 360112, 360123, 360124, 360200, 360201, 360202, 360203, 360222, 360281, 360300, 360301, 360302, 360313, 360321, 360322, 360323, 360400, 360401, 360402, 360403, 360421, 360423, 360424, 360425, 360426, 360483, 360428, 360429, 360430, 360481, 360482, 360500, 360501, 360502, 360521, 360600, 360601, 360602, 360622, 360681, 360700, 360701, 360702, 360703, 360704, 360722, 360723, 360724, 360725, 360726, 360727, 360728, 360729, 360730, 360731, 360732, 360733, 360734, 360735, 360781, 360800, 360801, 360802, 360803, 360821, 360822, 360823, 360824, 360825, 360826, 360827, 360828, 360829, 360830, 360881, 360900, 360901, 360902, 360921, 360922, 360923, 360924, 360925, 360926, 360981, 360982, 360983, 361000, 361001, 361002, 361021, 361022, 361023, 361024, 361025, 361026, 361027, 361028, 361003, 361030, 361100, 361101, 361102, 361103, 361121, 361123, 361124, 361125, 361126, 361127, 361128, 361129, 361130, 361181, 370100, 370101, 370102, 370103, 370104, 370105, 370112, 370113, 370124, 370125, 370126, 370114, 370200, 370201, 370202, 370203, 370211, 370212, 370213, 370214, 370281, 370282, 370283, 370285, 370300, 370301, 370302, 370303, 370304, 370305, 370306, 370321, 370322, 370323, 370400, 370401, 370402, 370403, 370404, 370405, 370406, 370481, 370500, 370501, 370502, 370503, 370505, 370522, 370523, 370600, 370601, 370602, 370611, 370612, 370613, 370634, 370681, 370682, 370683, 370684, 370685, 370686, 370687, 370700, 370701, 370702, 370703, 370704, 370705, 370724, 370725, 370781, 370782, 370783, 370784, 370785, 370786, 370800, 370801, 370811, 370812, 370826, 370827, 370828, 370829, 370830, 370831, 370832, 370881, 370883, 370900, 370901, 370902, 370911, 370921, 370923, 370982, 370983, 371000, 371001, 371002, 371003, 371082, 371083, 371100, 371101, 371102, 371103, 371121, 371122, 371200, 371201, 371202, 371203, 371300, 371301, 371302, 371311, 371312, 371321, 371322, 371323, 371324, 371325, 371326, 371327, 371328, 371329, 371400, 371401, 371402, 371403, 371422, 371423, 371424, 371425, 371426, 371427, 371428, 371481, 371482, 371500, 371501, 371502, 371521, 371522, 371523, 371524, 371525, 371526, 371581, 371600, 371601, 371602, 371603, 371621, 371622, 371623, 371625, 371626, 371700, 371701, 371702, 371721, 371722, 371723, 371724, 371725, 371726, 371703, 371728, 410100, 410101, 410102, 410103, 410104, 410105, 410106, 410108, 410122, 410181, 410182, 410183, 410184, 410185, 410200, 410201, 410202, 410203, 410204, 410205, 410212, 410221, 410222, 410223, 410225, 410300, 410301, 410302, 410303, 410304, 410305, 410306, 410311, 410322, 410323, 410324, 410325, 410326, 410327, 410328, 410329, 410381, 410400, 410401, 410402, 410403, 410404, 410411, 410421, 410422, 410423, 410425, 410481, 410482, 410500, 410501, 410502, 410503, 410505, 410506, 410522, 410523, 410526, 410527, 410581, 410600, 410601, 410602, 410603, 410611, 410621, 410622, 410700, 410701, 410702, 410703, 410704, 410711, 410721, 410724, 410725, 410726, 410727, 410728, 410781, 410782, 410800, 410801, 410802, 410803, 410804, 410811, 410821, 410822, 410823, 410825, 410882, 410883, 410900, 410901, 410902, 410922, 410923, 410926, 410927, 410928, 411000, 411001, 411002, 411003, 411024, 411025, 411081, 411082, 411100, 411101, 411102, 411103, 411104, 411121, 411122, 411200, 411201, 411202, 411203, 411221, 411224, 411281, 411282, 411300, 411301, 411302, 411303, 411321, 411322, 411323, 411324, 411325, 411326, 411327, 411328, 411329, 411330, 411381, 411400, 411401, 411402, 411403, 411421, 411422, 411423, 411424, 411425, 411426, 411481, 411500, 411501, 411502, 411503, 411521, 411522, 411523, 411524, 411525, 411526, 411527, 411528, 411600, 411601, 411602, 411621, 411622, 411623, 411624, 411625, 411626, 411627, 411628, 411681, 411700, 411701, 411702, 411721, 411722, 411723, 411724, 411725, 411726, 411727, 411728, 411729, 419001, 420100, 420101, 420102, 420103, 420104, 420105, 420106, 420107, 420111, 420112, 420113, 420114, 420115, 420116, 420117, 420200, 420201, 420202, 420203, 420204, 420205, 420222, 420281, 420300, 420301, 420302, 420303, 420304, 420322, 420323, 420324, 420325, 420381, 420500, 420501, 420502, 420503, 420504, 420505, 420506, 420525, 420526, 420527, 420528, 420529, 420581, 420582, 420583, 420600, 420601, 420602, 420606, 420607, 420624, 420625, 420626, 420682, 420683, 420684, 420700, 420701, 420702, 420703, 420704, 420800, 420801, 420802, 420804, 420821, 420822, 420881, 420900, 420901, 420902, 420921, 420922, 420923, 420981, 420982, 420984, 421000, 421001, 421002, 421003, 421022, 421023, 421024, 421081, 421083, 421087, 421100, 421101, 421102, 421121, 421122, 421123, 421124, 421125, 421126, 421127, 421181, 421182, 421200, 421201, 421202, 421221, 421222, 421223, 421224, 421281, 421300, 421301, 421303, 421321, 421381, 422800, 422801, 422802, 422822, 422823, 422825, 422826, 422827, 422828, 429004, 429005, 429006, 429021, 430100, 430101, 430102, 430103, 430104, 430105, 430111, 430112, 430121, 430124, 430181, 430200, 430201, 430202, 430203, 430204, 430211, 430221, 430223, 430224, 430225, 430281, 430300, 430301, 430302, 430304, 430321, 430381, 430382, 430400, 430401, 430405, 430406, 430407, 430408, 430412, 430421, 430422, 430423, 430424, 430426, 430481, 430482, 430500, 430501, 430502, 430503, 430511, 430521, 430522, 430523, 430524, 430525, 430527, 430528, 430529, 430581, 430600, 430601, 430602, 430603, 430611, 430621, 430623, 430624, 430626, 430681, 430682, 430700, 430701, 430702, 430703, 430721, 430722, 430723, 430724, 430725, 430726, 430781, 430800, 430801, 430802, 430811, 430821, 430822, 430900, 430901, 430902, 430903, 430921, 430922, 430923, 430981, 431000, 431001, 431002, 431003, 431021, 431022, 431023, 431024, 431025, 431026, 431027, 431028, 431081, 431100, 431101, 431102, 431103, 431121, 431122, 431123, 431124, 431125, 431126, 431127, 431128, 431129, 431200, 431201, 431202, 431221, 431222, 431223, 431224, 431225, 431226, 431227, 431228, 431229, 431230, 431281, 431300, 431301, 431302, 431321, 431322, 431381, 431382, 433100, 433101, 433122, 433123, 433124, 433125, 433126, 433127, 433130, 440100, 440101, 440103, 440104, 440105, 440106, 440111, 440112, 440113, 440114, 440115, 440117, 440118, 440200, 440201, 440203, 440204, 440205, 440222, 440224, 440229, 440232, 440233, 440281, 440282, 440300, 440301, 440303, 440304, 440305, 440306, 440307, 440308, 440309, 440310, 440400, 440401, 440402, 440403, 440404, 440500, 440501, 440507, 440511, 440512, 440513, 440514, 440515, 440523, 440600, 440601, 440604, 440605, 440606, 440607, 440608, 440700, 440701, 440703, 440704, 440705, 440781, 440783, 440784, 440785, 440800, 440801, 440802, 440803, 440804, 440811, 440823, 440825, 440881, 440882, 440883, 440900, 440901, 440902, 440904, 440981, 440982, 440983, 441200, 441201, 441202, 441203, 441223, 441224, 441225, 441226, 441204, 441284, 441300, 441301, 441302, 441303, 441322, 441323, 441324, 441400, 441401, 441402, 441403, 441422, 441423, 441424, 441426, 441427, 441481, 441500, 441501, 441502, 441521, 441523, 441581, 441600, 441601, 441602, 441621, 441622, 441623, 441624, 441625, 441700, 441701, 441702, 441704, 441721, 441781, 441800, 441801, 441802, 441803, 441821, 441823, 441825, 441826, 441881, 441882, 441900, 442000, 445100, 445101, 445102, 445103, 445122, 445200, 445201, 445202, 445203, 445222, 445224, 445281, 445300, 445301, 445302, 445303, 445321, 445322, 445381, 450100, 450101, 450102, 450103, 450105, 450107, 450108, 450109, 450110, 450123, 450124, 450125, 450126, 450127, 450200, 450201, 450202, 450203, 450204, 450205, 450206, 450222, 450223, 450224, 450225, 450226, 450300, 450301, 450302, 450303, 450304, 450305, 450311, 450312, 450321, 450323, 450324, 450325, 450326, 450327, 450328, 450329, 450330, 450331, 450332, 450400, 450401, 450403, 450405, 450406, 450421, 450422, 450423, 450481, 450500, 450501, 450502, 450503, 450512, 450521, 450600, 450601, 450602, 450603, 450621, 450681, 450700, 450701, 450702, 450703, 450721, 450722, 450800, 450801, 450802, 450803, 450804, 450821, 450881, 450900, 450901, 450902, 450903, 450921, 450922, 450923, 450924, 450981, 451000, 451001, 451002, 451021, 451022, 451023, 451024, 451081, 451026, 451027, 451028, 451029, 451030, 451031, 451100, 451101, 451102, 451103, 451121, 451122, 451123, 451200, 451201, 451202, 451221, 451222, 451223, 451224, 451225, 451226, 451227, 451228, 451229, 451203, 451300, 451301, 451302, 451321, 451322, 451323, 451324, 451381, 451400, 451401, 451402, 451421, 451422, 451423, 451424, 451425, 451481, 460100, 460101, 460105, 460106, 460107, 460108, 460200, 460201, 460202, 460203, 460204, 460205, 460300, 460301, 460321, 460322, 460323, 460400, 469001, 469002, 469005, 469006, 469007, 469021, 469022, 469023, 469024, 469025, 469026, 469027, 469028, 469029, 469030, 500000, 500100, 500101, 500102, 500103, 500104, 500105, 500106, 500107, 500108, 500109, 500110, 500111, 500112, 500113, 500114, 500115, 500116, 500117, 500118, 500119, 500120, 500151, 500152, 500153, 500154, 500155, 500156, 500200, 500229, 500230, 500231, 500233, 500235, 500236, 500237, 500238, 500240, 500241, 500242, 500243, 510100, 510101, 510104, 510105, 510106, 510107, 510108, 510112, 510113, 510114, 510115, 510121, 510116, 510117, 510129, 510131, 510132, 510185, 510181, 510182, 510183, 510184, 510300, 510301, 510302, 510303, 510304, 510311, 510321, 510322, 510400, 510401, 510402, 510403, 510411, 510421, 510422, 510500, 510501, 510502, 510503, 510504, 510521, 510522, 510524, 510525, 510600, 510601, 510603, 510623, 510626, 510681, 510682, 510683, 510700, 510701, 510703, 510704, 510722, 510723, 510705, 510725, 510726, 510727, 510781, 510800, 510801, 510802, 510811, 510812, 510821, 510822, 510823, 510824, 510900, 510901, 510903, 510904, 510921, 510922, 510923, 511000, 511001, 511002, 511011, 511024, 511025, 511028, 511100, 511101, 511102, 511111, 511112, 511113, 511123, 511124, 511126, 511129, 511132, 511133, 511181, 511300, 511301, 511302, 511303, 511304, 511321, 511322, 511323, 511324, 511325, 511381, 511400, 511401, 511402, 511403, 511421, 511423, 511424, 511425, 511500, 511501, 511502, 511503, 511521, 511523, 511524, 511525, 511526, 511527, 511528, 511529, 511600, 511601, 511602, 511603, 511621, 511622, 511623, 511681, 511700, 511701, 511702, 511703, 511722, 511723, 511724, 511725, 511781, 511800, 511801, 511802, 511803, 511822, 511823, 511824, 511825, 511826, 511827, 511900, 511901, 511902, 511903, 511921, 511922, 511923, 512000, 512001, 512002, 512021, 512022, 513200, 513221, 513222, 513223, 513224, 513225, 513226, 513227, 513228, 513201, 513230, 513231, 513232, 513233, 513300, 513301, 513322, 513323, 513324, 513325, 513326, 513327, 513328, 513329, 513330, 513331, 513332, 513333, 513334, 513335, 513336, 513337, 513338, 513400, 513401, 513422, 513423, 513424, 513425, 513426, 513427, 513428, 513429, 513430, 513431, 513432, 513433, 513434, 513435, 513436, 513437, 520100, 520101, 520102, 520103, 520111, 520112, 520113, 520115, 520121, 520122, 520123, 520181, 520200, 520201, 520203, 520221, 520222, 520300, 520301, 520302, 520303, 520304, 520322, 520323, 520324, 520325, 520326, 520327, 520328, 520329, 520330, 520381, 520382, 520400, 520401, 520402, 520403, 520422, 520423, 520424, 520425, 520500, 520502, 520521, 520522, 520523, 520524, 520525, 520526, 520527, 520600, 520602, 520603, 520621, 520622, 520623, 520624, 520625, 520626, 520627, 520628, 522300, 522301, 522322, 522323, 522324, 522325, 522326, 522327, 522328, 522600, 522601, 522622, 522623, 522624, 522625, 522626, 522627, 522628, 522629, 522630, 522631, 522632, 522633, 522634, 522635, 522636, 522700, 522701, 522702, 522722, 522723, 522725, 522726, 522727, 522728, 522729, 522730, 522731, 522732, 530100, 530101, 530102, 530103, 530111, 530112, 530113, 530114, 530115, 530124, 530125, 530126, 530127, 530128, 530129, 530181, 530300, 530301, 530302, 530321, 530322, 530323, 530324, 530325, 530326, 530303, 530381, 530400, 530401, 530402, 530403, 530422, 530423, 530424, 530425, 530426, 530427, 530428, 530500, 530501, 530502, 530521, 530581, 530523, 530524, 530600, 530601, 530602, 530621, 530622, 530623, 530624, 530625, 530626, 530627, 530628, 530629, 530630, 530700, 530701, 530702, 530721, 530722, 530723, 530724, 530800, 530801, 530802, 530821, 530822, 530823, 530824, 530825, 530826, 530827, 530828, 530829, 530900, 530901, 530902, 530921, 530922, 530923, 530924, 530925, 530926, 530927, 532300, 532301, 532322, 532323, 532324, 532325, 532326, 532327, 532328, 532329, 532331, 532500, 532501, 532502, 532503, 532504, 532523, 532524, 532525, 532527, 532528, 532529, 532530, 532531, 532532, 532600, 532601, 532622, 532623, 532624, 532625, 532626, 532627, 532628, 532800, 532801, 532822, 532823, 532900, 532901, 532922, 532923, 532924, 532925, 532926, 532927, 532928, 532929, 532930, 532931, 532932, 533100, 533102, 533103, 533122, 533123, 533124, 533300, 533301, 533323, 533324, 533325, 533400, 533401, 533422, 533423, 540100, 540101, 540102, 540121, 540122, 540123, 540124, 540103, 540126, 540127, 540200, 540202, 540221, 540222, 540223, 540224, 540225, 540226, 540227, 540228, 540229, 540230, 540231, 540232, 540233, 540234, 540235, 540236, 540237, 540300, 540302, 540321, 540322, 540323, 540324, 540325, 540326, 540327, 540328, 540329, 540330, 540500, 540502, 540521, 540522, 540523, 540524, 540525, 540526, 540527, 540528, 540529, 540530, 540531, 542400, 542421, 542422, 542423, 542424, 542425, 542426, 542427, 542428, 542429, 542430, 542431, 542500, 542521, 542522, 542523, 542524, 542525, 542526, 542527, 540400, 540402, 540421, 540422, 540423, 540424, 540425, 540426, 610100, 610101, 610102, 610103, 610104, 610111, 610112, 610113, 610114, 610115, 610116, 610117, 610122, 610124, 610118, 610200, 610201, 610202, 610203, 610204, 610222, 610300, 610301, 610302, 610303, 610304, 610322, 610323, 610324, 610326, 610327, 610328, 610329, 610330, 610331, 610400, 610401, 610402, 610403, 610404, 610422, 610423, 610424, 610425, 610426, 610427, 610428, 610429, 610430, 610431, 610481, 610500, 610501, 610502, 610503, 610522, 610523, 610524, 610525, 610526, 610527, 610528, 610581, 610582, 610600, 610601, 610602, 610621, 610622, 610623, 610603, 610625, 610626, 610627, 610628, 610629, 610630, 610631, 610632, 610700, 610701, 610702, 610721, 610722, 610723, 610724, 610725, 610726, 610727, 610728, 610729, 610730, 610800, 610801, 610802, 610881, 610822, 610803, 610824, 610825, 610826, 610827, 610828, 610829, 610830, 610831, 610900, 610901, 610902, 610921, 610922, 610923, 610924, 610925, 610926, 610927, 610928, 610929, 611000, 611001, 611002, 611021, 611022, 611023, 611024, 611025, 611026, 620100, 620101, 620102, 620103, 620104, 620105, 620111, 620121, 620122, 620123, 620200, 620201, 620300, 620301, 620302, 620321, 620400, 620401, 620402, 620403, 620421, 620422, 620423, 620500, 620501, 620502, 620503, 620521, 620522, 620523, 620524, 620525, 620600, 620601, 620602, 620621, 620622, 620623, 620700, 620701, 620702, 620721, 620722, 620723, 620724, 620725, 620800, 620801, 620802, 620821, 620822, 620823, 620824, 620825, 620826, 620900, 620901, 620902, 620921, 620922, 620923, 620924, 620981, 620982, 621000, 621001, 621002, 621021, 621022, 621023, 621024, 621025, 621026, 621027, 621100, 621101, 621102, 621121, 621122, 621123, 621124, 621125, 621126, 621200, 621201, 621202, 621221, 621222, 621223, 621224, 621225, 621226, 621227, 621228, 622900, 622901, 622921, 622922, 622923, 622924, 622925, 622926, 622927, 623000, 623001, 623021, 623022, 623023, 623024, 623025, 623026, 623027, 630100, 630101, 630102, 630103, 630104, 630105, 630121, 630122, 630123, 630200, 630202, 630203, 630222, 630223, 630224, 630225, 632200, 632221, 632222, 632223, 632224, 632300, 632321, 632322, 632323, 632324, 632500, 632521, 632522, 632523, 632524, 632525, 632600, 632621, 632622, 632623, 632624, 632625, 632626, 632700, 632701, 632722, 632723, 632724, 632725, 632726, 632800, 632801, 632802, 632821, 632822, 632823, 632825, 640100, 640101, 640104, 640105, 640106, 640121, 640122, 640181, 640200, 640201, 640202, 640205, 640221, 640300, 640301, 640302, 640303, 640323, 640324, 640381, 640400, 640401, 640402, 640422, 640423, 640424, 640425, 640500, 640501, 640502, 640521, 640522, 650100, 650101, 650102, 650103, 650104, 650105, 650106, 650107, 650109, 650121, 650200, 650201, 650202, 650203, 650204, 650205, 650400, 650402, 650421, 650422, 650500, 650502, 650521, 650522, 652300, 652301, 652302, 652323, 652324, 652325, 652327, 652328, 652700, 652701, 652702, 652722, 652723, 652800, 652801, 652822, 652823, 652824, 652825, 652826, 652827, 652828, 652829, 652900, 652901, 652922, 652923, 652924, 652925, 652926, 652927, 652928, 652929, 653000, 653001, 653022, 653023, 653024, 653100, 653101, 653121, 653122, 653123, 653124, 653125, 653126, 653127, 653128, 653129, 653130, 653131, 653200, 653201, 653221, 653222, 653223, 653224, 653225, 653226, 653227, 654000, 654002, 654003, 654004, 654021, 654022, 654023, 654024, 654025, 654026, 654027, 654028, 654200, 654201, 654202, 654221, 654223, 654224, 654225, 654226, 654300, 654301, 654321, 654322, 654323, 654324, 654325, 654326, 659001, 659002, 659003, 659004, 659005, 659006, 659007, 659008, 659009, 710000, 810000, 810001, 810002, 810003, 810004, 810005, 810006, 810007, 810008, 810009, 810010, 810011, 810012, 810013, 810014, 810015, 810016, 810017, 810018, 820000, 820001, 820002, 820003, 820004, 820005, 820006, 820007, 820008, 900000]
addr_ids = [
    110000,
    110100,
    110101,
    110102,
    110105,
    110106,
    110107,
    110108,
    110109,
    110111,
    110112,
    110113,
    110114,
    110115,
    110116,
    110117,
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by spider3 on 2018/3/9
# Copyright (c) 2018 spider3. All rights reserved.
"""高德地图poi: http://lbs.amap.com/api/webservice/guide/api/search/"""
import json
import requests
import time
from pymongo import MongoClient
from retry import retry
from requests.exceptions import ConnectionError
from utils.logger import init_logger
from queue import Queue
from threading import Thread
# 所有地区id
addr_ids = [110100, 110101, 110102, 110105, 110106, 110107, 110108, 110109, 110111, 110112, 110113, 110114,
            110115, 110116, 110117, 110118, 110119, 120100, 120101, 120102, 120103, 120104, 120105, 120106,
            120110, 120111, 120112, 120113, 120114, 120115, 120116, 120117, 120118, 120119, 130100, 130101, 130102,
            130104, 130105, 130107, 130108, 130109, 130110, 130111, 130121, 130123, 130125, 130126, 130127, 130128,
            130129, 130130, 130131, 130132, 130133, 130181, 130183, 130184, 130200, 130201, 130202, 130203, 130204,
            130205, 130207, 130208, 130209, 130223, 130224, 130225, 130227, 130229, 130281, 130283, 130300, 130301,
            130302, 130303, 130304, 130321, 130322, 130306, 130324, 130400, 130401, 130402, 130403, 130404, 130406,
            130423, 130424, 130425, 130426, 130427, 130407, 130408, 130430, 130431, 130432, 130433, 130434, 130435,
            130481, 130500, 130501, 130502, 130503, 130521, 130522, 130523, 130524, 130525, 130526, 130527, 130528,
            130529, 130530, 130531, 130532, 130533, 130534, 130535, 130581, 130582, 130600, 130601, 130602, 130606,
            130607, 130608, 130623, 130624, 130609, 130626, 130627, 130628, 130629, 130630, 130631, 130632, 130633,
            130634, 130635, 130636, 130637, 130638, 130681, 130682, 130683, 130684, 130700, 130701, 130702, 130703,
            130705, 130706, 130722, 130723, 130724, 130725, 130726, 130727, 130728, 130708, 130730, 130731, 130732,
            130709, 130800, 130801, 130802, 130803, 130804, 130821, 130822, 130881, 130824, 130825, 130826, 130827,
            130828, 130900, 130901, 130902, 130903, 130921, 130922, 130923, 130924, 130925, 130926, 130927, 130928,
Exemple #22
0
        self.docker_api.client.images.remove(image.short_id, force=True)


if __name__ == '__main__':
    started_time = timeit.default_timer()
    exit_code = ExitCode.success
    try:
        product_name = 'Intel(R) Distribution of OpenVINO(TM) toolkit'
        des = f'DockerHub CI framework for {product_name}'
        args = parse_args(name=os.path.basename(__file__), description=des)
        logdir: pathlib.Path = pathlib.Path(os.path.realpath(
            __file__), ).parent / 'logs' / args.tags[0].replace(
                '/', '_').replace(':', '_')
        if not logdir.parent.exists():
            logdir.parent.mkdir()
        logfile = logger.init_logger(logdir)
        if hasattr(args, 'image_json_path') and not args.image_json_path:
            args.image_json_path = logdir / 'image_data.json'
        launcher = Launcher(product_name, args, logdir)

        log.info(logger.LINE_DOUBLE)
        log.info(f'{des} v{__version__}')
        log.info(logger.LINE_DOUBLE)
        log.info(f'Log:         {logfile}')
        log.info(f'Command:     {" ".join(sys.argv)}')
        log.info(f'Machine:     {platform.node()}')
        log.info(
            f'System:      {platform.system().lower()}; {platform.release()}; {platform.machine()}'
        )
        log.info(f'Python:      "{sys.executable}" {sys.version}')
        log.info(logger.LINE_DOUBLE)
            module = import_module(f'.{handler_module}', 'views')
            module.init(dispatcher)


def graceful_exit(*args, **kwargs):
    """Provide a graceful exit from a webhook server."""
    if updater is not None:
        updater.bot.delete_webhook()

    sys.exit(1)


if __name__ == "__main__":
    # set logger
    global updater
    logger.init_logger(f'logs/{settings.NAME}.log', logging.WARNING)
    # Make the bot persistence
    persistence = PicklePersistence(filename='assets/data.pickle')

    updater = Updater(token=settings.TOKEN, persistence=persistence, use_context=True)
    # Set of notifiable users
    if updater.dispatcher.bot_data.get('notify_set', None) is None:
        updater.dispatcher.bot_data['notify_set'] = set()
    #
    schedule = pd.read_csv('assets/schedule.csv').astype(
        {'name': 'string', 'teacher': 'string', 'start': np.datetime64, 'end': np.datetime64, 'period': np.uint8})
    updater.dispatcher.bot_data['schedule'] = schedule
    # Load views from /views/*
    load_views(updater.dispatcher)
    # Create db
    create_db('connectors/ddl.sql')
Exemple #24
0
#!/usr/bin/python3

from collections      import OrderedDict

from utils.test_utils import *
from utils.cmd_args   import args
from utils.cli_wallet import CliWallet
from utils.logger     import log, init_logger

if __name__ == "__main__":
    try:
        init_logger(__file__)
        error = False
        wallet = CliWallet( args.path,
                            args.server_rpc_endpoint,
                            args.cert_auth,
                            #args.rpc_endpoint,
                            args.rpc_tls_endpoint,
                            args.rpc_tls_cert,
                            args.rpc_http_endpoint,
                            args.deamon, 
                            args.rpc_allowip,
                            args.wallet_file,
                            args.chain_id  )
        wallet.set_and_run_wallet()

        active          = ["active", "inactive", "all"]
        order_by        = ["creator", "start_date", "end_date", "total_votes"]
        order_direction = ["asc", "desc"]

        for by in order_by:
Exemple #25
0
        command, _ = os.path.splitext(file_name)

        module = import_module('.%s' % (command, ), 'bot')

        module.main(dispatcher)


def graceful_exit(signum, frame):
    if (updater is not None):
        updater.bot.delete_webhook()

    sys.exit(1)


if __name__ == "__main__":
    logger.init_logger('logs/%s.log' % (bot_config.NAME, ))

    updater = Updater(token=bot_config.TOKEN)

    dispatcher = updater.dispatcher

    load_commands(dispatcher)

    if (bot_config.WEBHOOK):
        signal.signal(signal.SIGINT, graceful_exit)
        updater.start_webhook(listen=bot_config.IP,
                              port=bot_config.PORT,
                              url_path=bot_config.URL_PATH)
        updater.bot.set_webhook(url=bot_config.WEBHOOK_URL)
    else:
        updater.start_polling()
import torch
import torch.nn as nn
from config import Config
from attention import SourceTargetAttention, SelfAttention
from ffn import FFN
from utils.logger import init_logger,logger_var

logger = init_logger(__name__, './data/weight.log')


def build_decoder(num_layer=6, heads=8, d_model=512, d_ff=2048, drop_rate=0.1):
    decoder_layers = [DecoderLayer(heads, d_model, d_ff, drop_rate) for _ in range(num_layer)]
    decoder = Decoder(nn.ModuleList(decoder_layers), d_model)
    return decoder


class Decoder(nn.Module):

    def __init__(self, layers, d_model):
        super(Decoder, self).__init__()
        # decoder layers
        self.layers = layers
        self.norm = nn.LayerNorm(d_model)
        self.utterance_attention = LocalAttention(Config.dim_model)
        self.token_attention = LocalAttention(Config.dim_model)
        if Config.pointer_gen:
            self.p_gen_linear = nn.Linear(2 * Config.dim_model, 1, bias=True)

        self.out1 = nn.Linear(2 * Config.dim_model, Config.dim_model)
        self.out2 = nn.Linear(Config.dim_model, Config.vocab_size)
Exemple #27
0
'''
main.py

Created by Ale Sanchez on 2018-07-23

Copyright (c) 2018. All rights reserved.
'''

from telegram.ext import Updater, CommandHandler

import utils.logger as logger
import configurations.bot_config as bot_config
from commands import commands

if __name__ == "__main__":
    logger.init_logger(f'logs/{bot_config.NAME}.log')

    updater = Updater(token=bot_config.TOKEN)

    dispatcher = updater.dispatcher

    for command in commands:
        command_handler = CommandHandler(command['command'],
                                         command['function'])

        dispatcher.add_handler(command_handler)
    if (bot_config.WEBHOOK):
        updater.start_webhook(listen=bot_config.IP,
                              port=bot_config.PORT,
                              url_path=bot_config.URL_PATH)
        updater.bot.set_webhook(webhook_url=bot_config.WEBHOOK_ULR,
#!/usr/bin/python3

from utils.test_utils import *
from utils.cmd_args import args
from utils.cli_wallet import CliWallet
from utils.logger import log, init_logger

if __name__ == "__main__":
    try:
        init_logger(__file__)
        error = False
        wallet = CliWallet(args.path, args.server_rpc_endpoint, args.cert_auth,
                           args.rpc_tls_endpoint, args.rpc_tls_cert,
                           args.rpc_http_endpoint, args.deamon,
                           args.rpc_allowip, args.wallet_file, args.chain_id)
        wallet.set_and_run_wallet()

        error_msg_x = "The value `x` for `_last_id` argument is invalid, it should be integer type."
        resp_error_x = wallet.list_voter_proposals(args.creator, "creator",
                                                   "asc", 50, "all", "x")
        log.info(resp_error_x)
        if resp_error_x.find(error_msg_x) == -1:
            raise ArgsCheckException(
                "Assertion `{0}` is required.".format(error_msg_x))

        error_msg_y = "The value `y` for `_last_id` argument is invalid, it should be integer type."
        resp_error_y = wallet.list_voter_proposals(args.creator, "creator",
                                                   "asc", 50, "all", "y")
        log.info(resp_error_y)
        if resp_error_y.find(error_msg_y) == -1:
            raise ArgsCheckException(
Exemple #29
0
@app.route('/<path:path>')
def proxy(path):
    conf = Config()
    if conf.params['enabled'] and conf.params['direction'] != "outbound":
        if not inspect_request():
            return render_template("403.html")
    resp = get(f'{request.base_url}{path}').content
    if conf.params['enabled'] and conf.params['direction'] != "inbound":
        if not inspect_response():
            return render_template("403.html")
    return resp


def inspect_request():
    pl_mgr = PluginManager()
    return pl_mgr.filter(request, True)


def inspect_response():
    pl_mgr = PluginManager()
    return pl_mgr.filter(request, True)


if __name__ == '__main__':
    # TODO set logs relative to current folder
    init_logger("", "/home/shai/repos/web-filter/logs/filter.log")
    logger = Logger("main")
    logger.info("Filter Started")
    init_plugins()
    app.run(host='0.0.0.0', port=8080)
Exemple #30
0
    parser.add_argument("--load_weights_dir", type=str)
    parser.add_argument("--op_check", action="store_true")

    args = parser.parse_args()

    # --------------------------
    # Load and update settings
    # --------------------------
    merge_config_from_toml(f"./config/{args.config_name}.toml")

    if args.lr:
        S.optim.lr = args.lr
    S.load_weights_dir = args.load_weights_dir

    begin_time = datetime.now().strftime("%y%m%d_%H%M%S")
    if args.op_check:
        begin_time = "TEST_" + begin_time
    S.agent.log_dir = os.path.join(ROOT_DIR, "Logs", args.config_name, begin_time)

    # --------------------
    # Setting Root Logger
    # --------------------
    init_logger(S.agent.log_dir, no_file_logging=args.op_check)

    logging.info(S)  # summarize settings

    # -------
    # Run
    # -------
    main(args.op_check)
Exemple #31
0
def pretrain(args):
    logger = init_logger(args.log_dir, args)

    # Init datasets, dataloaders, and writer
    PC_AUGMENT_CONFIG = {
        'scale': args.pc_augm_scale,
        'rot': args.pc_augm_rot,
        'mirror_prob': args.pc_augm_mirror_prob,
        'jitter': args.pc_augm_jitter
    }

    if args.dataset == 's3dis':
        from dataloaders.s3dis import S3DISDataset
        DATASET = S3DISDataset(args.cvfold, args.data_path)
    elif args.dataset == 'scannet':
        from dataloaders.scannet import ScanNetDataset
        DATASET = ScanNetDataset(args.cvfold, args.data_path)
    else:
        raise NotImplementedError('Unknown dataset %s!' % args.dataset)

    CLASSES = DATASET.train_classes
    NUM_CLASSES = len(CLASSES) + 1
    CLASS2SCANS = {c: DATASET.class2scans[c] for c in CLASSES}

    TRAIN_DATASET = MyPretrainDataset(args.data_path,
                                      CLASSES,
                                      CLASS2SCANS,
                                      mode='train',
                                      num_point=args.pc_npts,
                                      pc_attribs=args.pc_attribs,
                                      pc_augm=args.pc_augm,
                                      pc_augm_config=PC_AUGMENT_CONFIG)

    VALID_DATASET = MyPretrainDataset(args.data_path,
                                      CLASSES,
                                      CLASS2SCANS,
                                      mode='test',
                                      num_point=args.pc_npts,
                                      pc_attribs=args.pc_attribs,
                                      pc_augm=args.pc_augm,
                                      pc_augm_config=PC_AUGMENT_CONFIG)

    logger.cprint(
        '=== Pre-train Dataset (classes: {0}) | Train: {1} blocks | Valid: {2} blocks ==='
        .format(CLASSES, len(TRAIN_DATASET), len(VALID_DATASET)))

    TRAIN_LOADER = DataLoader(TRAIN_DATASET,
                              batch_size=args.batch_size,
                              num_workers=args.n_workers,
                              shuffle=True,
                              drop_last=True)

    VALID_LOADER = DataLoader(VALID_DATASET,
                              batch_size=args.batch_size,
                              num_workers=args.n_workers,
                              shuffle=False,
                              drop_last=True)

    WRITER = SummaryWriter(log_dir=args.log_dir)

    # Init model and optimizer
    model = DGCNNSeg(args, num_classes=NUM_CLASSES)
    print(model)
    if torch.cuda.is_available():
        model.cuda()

    optimizer = optim.Adam([{'params': model.encoder.parameters(), 'lr': args.pretrain_lr}, \
                           {'params': model.segmenter.parameters(), 'lr': args.pretrain_lr}], \
                            weight_decay=args.pretrain_weight_decay)
    # Set learning rate scheduler
    lr_scheduler = optim.lr_scheduler.StepLR(optimizer,
                                             step_size=args.pretrain_step_size,
                                             gamma=args.pretrain_gamma)

    # train
    best_iou = 0
    global_iter = 0
    for epoch in range(args.n_iters):
        mode.train()
        for batch_idx, (ptclouds, labels) in enumerate(TRAIN_LOADER):
            if torch.cuda.is_available():
                ptclouds = ptclouds.cuda()
                labels = labels.cuda()

            logits = model(ptclouds)
            loss = F.cross_entropy(logits, labels)

            # Loss backwards and optimizer updates
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            WRITER.add_scalar('Train/loss', loss, global_iter)
            logger.cprint(
                '=====[Train] Epoch: %d | Iter: %d | Loss: %.4f =====' %
                (epoch, batch_idx, loss.item()))
            global_iter += 1

        lr_scheduler.step()

        if (epoch + 1) % args.eval_interval == 0:
            pred_total = []
            gt_total = []
            model.eval()
            with torch.no_grad():
                for i, (ptclouds, labels) in enumerate(VALID_LOADER):
                    gt_total.append(labels.detach())

                    if torch.cuda.is_available():
                        ptclouds = ptclouds.cuda()
                        labels = labels.cuda()

                    logits = model(ptclouds)
                    loss = F.cross_entropy(logits, labels)

                    #  Compute predictions
                    _, preds = torch.max(logits.detach(), dim=1, keepdim=False)
                    pred_total.append(preds.cpu().detach())

                    WRITER.add_scalar('Valid/loss', loss, global_iter)
                    logger.cprint(
                        '=====[Valid] Epoch: %d | Iter: %d | Loss: %.4f ====='
                        % (epoch, i, loss.item()))

            pred_total = torch.stack(pred_total, dim=0).view(-1, args.pc_npts)
            gt_total = torch.stack(gt_total, dim=0).view(-1, args.pc_npts)
            accuracy, mIoU, iou_perclass = metric_evaluate(
                pred_total, gt_total, NUM_CLASSES)
            logger.cprint('===== EPOCH [%d]: Accuracy: %f | mIoU: %f =====\n' %
                          (epoch, accuracy, mIoU))
            WRITER.add_scalar('Valid/overall_accuracy', accuracy, global_iter)
            WRITER.add_scalar('Valid/meanIoU', mIoU, global_iter)

            if mIoU > best_iou:
                best_iou = mIoU
                logger.cprint(
                    '*******************Model Saved*******************')
                save_pretrain_checkpoint(model, args.log_dir)

    WRITER.close()