コード例 #1
0
def create_database():
    """创建数据库以及表"""
    models.get_datebase('mysql').execute_sql(
        "CREATE DATABASE {}".format('SpiderMan'))
    [models.get_datebase().execute_sql(sql) for sql in sum_sql]
    # 创建数据库表
    username, password, email = input_user()
    User.create(username=username,
                password=password,
                email=email,
                isadmin=True).save()
コード例 #2
0
ファイル: migrate.py プロジェクト: perrorteston2e/SpiderMan
def create_database(username=None, password=None, email="*****@*****.**"):
    """创建数据库以及表"""
    try:
        models.get_datebase('mysql').execute_sql(
            "CREATE DATABASE {}".format('SpiderMan'))
        [models.get_datebase().execute_sql(sql) for sql in sum_sql]
        # 创建数据库表
        if not username or not password:
            username, password, email = input_user()
        User.create(username=username,
                    password=password,
                    email=email,
                    isadmin=True).save()
    except peewee.ProgrammingError:
        pass
コード例 #3
0
ファイル: main.py プロジェクト: pythonerdd/SpiderMan
def main(**kwargs):
    if not kwargs.get('dom'):
        kwargs['host'] = SpiderManConf.HOST
        kwargs['port'] = SpiderManConf.PORT
    else:
        kwargs['host'] = kwargs['dom'].split(':')[0]
        kwargs['port'] = kwargs['dom'].split(':')[1]
    app_log.error(hello.format(host=kwargs['host'], port=kwargs['port']))
    app = url_conf()
    app.objects = MyManager(get_datebase())
    http_server = tornado.httpserver.HTTPServer(app)
    http_server.listen(kwargs['port'], address=kwargs['host'])
    tornado.ioloop.PeriodicCallback(timing, 5000).start()
    tornado.ioloop.IOLoop.instance().start()
コード例 #4
0
# -*- coding:utf-8 -*-
import time

from tornado.log import app_log
from SpiderMan.utils.DataBase import MyManager
from SpiderMan.server.web.models import Timing, Host, get_datebase
from SpiderMan.Scrapyd_api.client import ScrapyApi

app = MyManager(get_datebase())


def scrapyd_object(host_info, ismodels=False, timeout=10):
    """get scrapy_spi object
        cache scrapy_api object. cache time : 60
    """
    if not host_info:
        return None
    if ismodels is True:
        return ScrapyApi(target="http://{}:{}".format(host_info.host,
                                                      host_info.port),
                         timeout=timeout)
    return ScrapyApi(target="http://{}:{}".format(host_info.host,
                                                  host_info.port),
                     timeout=timeout)


cache = {}
try:
    for i in Host.select():
        cache[i.id_] = scrapyd_object(i)
except: