class Respondent(Resource):
    #method_decorators = [auth] # If you want apply to some method use: {'post': [auth],'put': [auth]}
    def __init__(self):
        self.log = Logger()
        self.db = DB().client

    def get(self, name=None):
        if (name):
            typeGet = "GET ONE"
            respondents = self.db.respondents.find_one({"firstName": name})
        else:
            typeGet = "GET ALL"
            respondents = self.db.respondents.find({})

        if (typeGet == "GET ALL"
                and respondents.count() > 0) or (typeGet == "GET ONE"
                                                 and respondents):
            return jsonify(code=200, type=typeGet, data=dumps(respondents))
        else:
            return None, 400

    def post(self):
        self.log.info('This a example info')
        self.log.debug('This a example debug')
        self.log.silly(request.form)
        self.log.warn('This is a example warn')
        self.log.error('This is a example error')
        return request.form
Exemple #2
0
def onClose(sig):
    global MAIN_PATH, END_NAME, DOWN_NUM, FAIL_NUM
    os.chdir(MAIN_PATH)
    lastTitle = MAIN_PATH + '\\Download\\' + purifyName(END_NAME)
    rmFile(lastTitle)
    with open('last.txt', 'w') as f:
        f.write(END_NAME)
    output_log = Logger(log_name='Output.log',
                        logger_name='output').get_logger()
    output_log.info('下载完成%d张' % DOWN_NUM)
    output_log.info('下载失败%d张' % FAIL_NUM)
    return True
Exemple #3
0
def execute_output():
    Logger()
    Logger.debug('Start Output Process')
    while True:
        try:
            result = Queues.get_output()
            Logger.info(result)
            #Your output logic go there

        except:
            break
    Logger.debug('End Output Process')
Exemple #4
0
def execute_output():
    Logger()
    Logger.debug('Start Output Process')
    while True:
        try:
            result = Queues.get_output()
            Logger.info(result)
            #Your output logic go there


        except:
            break
    Logger.debug('End Output Process')
Exemple #5
0
def execute(module, name):
    #Execute the nested crawl method
    Logger.info('Start Execute')
    if not hasattr(module, 'collect'):
        raise NotImplementedError('You must implement collect()')

    #Create the url queue and send to collect and crawl process
    url_queue = Queue()
    collect_process = Process(target=collect, args=(module, url_queue),
                              name=name + '_CollectProcess')
    collect_process.start()

    crawl_process = Process(target=crawl, args=(module, url_queue),
                            name=name + '_CrawlProcess')
    crawl_process.start()

    #Both collect and crawl are safe
    collect_process.join()
    crawl_process.join()
    Logger.info('Execute done')
Exemple #6
0
class Respondent(Resource):
    #method_decorators = [auth] # If you want apply to some method use: {'post': [auth],'put': [auth]}
    def __init__(self):
        self.log = Logger()
        self.db = DB().client

    def get(self, name=None):
        self.log.info('example info')
        self.log.debug('example debug')
        self.log.silly(name)
        self.log.warn('example warn')
        self.log.error('example error')
        match = {}
        if name:
            match = {"firstName": name}

        respondents = self.db.respondents.find(match)
        if respondents:
            return jsonify(code=200, data=dumps(respondents))
        else:
            return None, 400
Exemple #7
0
def crawl(module, url_queue):
    #Execute the crawl process
    Logger.info('Start Crawler')
    while True:
        try:
            url = url_queue.get(timeout=config.NESTED_CRAWL_TIMEOUT)
        except:
            #If all threads are done then break the loop, Otherwise continue.
            #Why 2 ? because its need to deduct by the main thread and queue thread,
            # You can comment out the enumerate() line to see what is going on
            #Logger.debug(str(enumerate()))
            if activeCount() <= 2:
                Logger.info('Break crawl')
                break
            else:
                Logger.debug('There are ' + str(activeCount() - 2) +
                             ' threads left')
                continue

        #Spawn a new threads immediate after get the url
        thread = Thread(target=crawl_thread,
                        args=(url, module),
                        name='CrawlThread')
        thread.start()

    Logger.info('Crawl done')
Exemple #8
0
def execute(module, name):
    #Execute the nested crawl method
    Logger.info('Start Execute')
    if not hasattr(module, 'collect'):
        raise NotImplementedError('You must implement collect()')

    #Create the url queue and send to collect and crawl process
    url_queue = Queue()
    collect_process = Process(target=collect,
                              args=(module, url_queue),
                              name=name + '_CollectProcess')
    collect_process.start()

    crawl_process = Process(target=crawl,
                            args=(module, url_queue),
                            name=name + '_CrawlProcess')
    crawl_process.start()

    #Both collect and crawl are safe
    collect_process.join()
    crawl_process.join()
    Logger.info('Execute done')
Exemple #9
0
def crawl(module, url_queue):
    #Execute the crawl process
    Logger.info('Start Crawler')
    while True:
        try:
            url = url_queue.get(timeout=config.NESTED_CRAWL_TIMEOUT)
        except:
            #If all threads are done then break the loop, Otherwise continue.
            #Why 2 ? because its need to deduct by the main thread and queue thread,
            # You can comment out the enumerate() line to see what is going on
            #Logger.debug(str(enumerate()))
            if activeCount() <= 2:
                Logger.info('Break crawl')
                break
            else:
                Logger.debug('There are ' + str(activeCount() - 2) + ' threads left')
                continue

        #Spawn a new threads immediate after get the url
        thread = Thread(target=crawl_thread, args=(url, module), name='CrawlThread')
        thread.start()

    Logger.info('Crawl done')
from flask import Flask, request
from flask_restful import Api
from resources.respondent import Respondent
from libs.logger import Logger
from config.config import general as config
app = Flask(__name__)
api = Api(app)
log = Logger()

# Routes
api.add_resource(Respondent, '/respondent', '/respondent/<name>')
#-------
if __name__ == '__main__':
    log.info('Server listen in %s:%s' % (config['host'], config['port']))
    app.run(debug=True, host=config['host'], port=config['port'])
Exemple #11
0
            img_title = purifyName(tit[0])
            win32api.SetConsoleCtrlHandler(onClose, True)
            downnum, failnum = download(title=img_title,
                                        links=links[index][0],
                                        names=names[index][0],
                                        downnum=DOWN_NUM,
                                        failnum=FAIL_NUM,
                                        delta=delta,
                                        done=done)
            index += 1
            DOWN_NUM += downnum
            FAIL_NUM += failnum
    except (BaseException, Exception) as e:
        os.chdir(MAIN_PATH)
        lastTitle = MAIN_PATH + '\\Download\\' + purifyName(END_NAME)
        rmFile(lastTitle)
        with open('last.txt', 'w') as f:
            f.write(END_NAME)
        err_log = Logger(log_name='Error.log', logger_name='err').get_logger()
        err_log.error(e)
    finally:
        if os.getcwd() != MAIN_PATH:
            os.chdir(MAIN_PATH)
        output_log = Logger(log_name='Output.log',
                            logger_name='output').get_logger()
        output_log.info('下载完成%d张' % DOWN_NUM)
        output_log.info('下载失败%d张' % FAIL_NUM)
        print('>>>The log file in %s.<<<' % MAIN_PATH)
    print('Done!')
    input('\n\nPress any key to quit.')
Exemple #12
0
def collect(module, url_queue):
    #Excute the collect process
    Logger.info('Start Collector')
    #Send the url queue to bot via args
    module.collect(url_queue)
    Logger.info('Collect done')
Exemple #13
0
def collect(module, url_queue):
    #Excute the collect process
    Logger.info('Start Collector')
    #Send the url queue to bot via args
    module.collect(url_queue)
    Logger.info('Collect done')