Пример #1
0
def list_projects():
    """
    显示项目
    """
    server_host = request.args.get("server_host")
    server_name = request.args.get("server_name")

    scrapyd = ScrapydAPI(server_host)
    projects = scrapyd.list_projects()

    lst = []
    for project in projects:
        versions = scrapyd.list_versions(project)
        for version in versions:
            item = {
                "project_name": project,
                "human_version": scrapyd_utils.format_version(version),
                "version": version
            }
            lst.append(item)

    data = {
        "server_name": server_name,
        "server_host": server_host,
        "projects": lst
    }

    return jsonify(data)
Пример #2
0
def get_server_status(server_list):
    """
    获取服务器状态  版本不一致
    scrapyd=1.2.0
    服务器使用 scrapyd=1.1.0 没有接口 daemon_status

    :param server_list:
    :return:
    """
    servers = []
    count = 0

    for item in server_list:

        server_name = item["server_name"]
        server_host = item["server_host"]

        count += 1
        scrapyd = ScrapydAPI(server_host)
        server_status = scrapyd.daemon_status()

        # 兼容老版本
        if server_status.get("status") == "error":

            projects = scrapyd.list_projects()
            print("{}: {}".format(server_host, projects))

            if len(projects) == 0:
                status = "error"
            else:
                status = "ok"

            server_status = {
                "status": status,
            }

            status = defaultdict(int)
            for project in set(projects):
                jobs = scrapyd.list_jobs(project)

                for key, value in jobs.items():
                    status[key] += len(value)

            server_status.update(status)

        item = {
            "index": count,
            "server_name": server_name,
            "server_host": server_host,
            "server_status": server_status,
        }
        servers.append(item)
    return servers
Пример #3
0
def cancel_all_spider(server):
    """
    取消服务器上所有的爬虫任务
    :param server:
    :return:
    """
    scrapyd = ScrapydAPI(server)
    projects = scrapyd.list_projects()
    for project in projects:
        jobs = scrapyd.list_jobs(project)
        for job, value in jobs.items():
            print(job, value)
            for status in value:
                uid = status.get("id")
                print("{}: {}".format(project, uid))

                scrapyd.cancel(project, uid)