def cancel_all_spider(server):
    """
    取消服务器上所有的爬虫任务
    :param server:
    :return:
    """
    scrapyd = ScrapydAPI(server)
    projects = scrapyd.list_projects()
    for project in projects:
        jobs = scrapyd.list_jobs(project)
        for job, value in jobs.items():
            print(job, value)
            for status in value:
                uid = status.get("id")
                print("{}: {}".format(project, uid))

                scrapyd.cancel(project, uid)
Beispiel #2
0
def cancel():
    """
    取消爬虫运行
    """
    server_host = request.args.get("server_host")
    server_name = request.args.get("server_name")
    project_name = request.args.get("project_name")
    job_id = request.args.get("job_id")

    scrapyd = ScrapydAPI(server_host)
    result = scrapyd.cancel(project_name, job_id)

    return jsonify({"message": result})