コード例 #1
0
ファイル: serializers.py プロジェクト: yangyu1987/umbrella
    def get_jobs(self,server,crawler):
        client = Server.objects.get(id=server)
        scrapyd = get_scrapyd(client)
        try:
            result = scrapyd.list_jobs(crawler)
            jobs = []
            statuses = ['pending', 'running', 'finished']
            for statu in statuses:
                for job in result.get(statu):
                    job['status'] = statu
                    jobs.append(job)

            res = {
                "code": '1',
                "message": '请求成功',
                "result": {
                    'jobs': jobs
                }
            }
            return res
        except ConnectionError:
            res = {
                "code": '0',
                "message": '服务器响应超时',
                "result": {}
            }
            return res
コード例 #2
0
ファイル: views.py プロジェクト: yangyu1987/umbrella
    def create(self, request, *args, **kwargs):
        # 获取基本信息
        crawler = Crawler.objects.get(id=request.data['crawler'])
        server = Server.objects.get(id=request.data['server'])
        crawlerName = crawler.crawlerName
        # scrapyd_url = 'http://{0}:{1}'.format(server.ip,server.port)

        # 获取工程文件夹
        path = os.path.abspath(join(os.getcwd(), CEAWLER_PROJECTS_FOLDER))
        crawler_path = join(path, crawlerName)

        # 获取egg文件
        egg = find_egg(crawler_path)
        if not egg:
            return Response({'message': 'egg not found'},
                            status=status.HTTP_201_CREATED)
        egg_file = open(join(crawler_path, egg), 'rb')

        # 部署爬虫到主机
        scrapyd = get_scrapyd(server)
        scrapyd.add_version(crawlerName, int(time.time()), egg_file.read())

        # 更新字段
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        self.perform_create(serializer)
        headers = self.get_success_headers(serializer.data)
        return Response(serializer.data,
                        status=status.HTTP_201_CREATED,
                        headers=headers)
コード例 #3
0
ファイル: serializers.py プロジェクト: yangyu1987/umbrella
 def get_spiders(self,server,crawler):
     client = Server.objects.get(id=server)
     scrapyd = get_scrapyd(client)
     try:
         spiders = scrapyd.list_spiders(crawler)
         spiders = [{'spider': spider, 'id': index + 1} for index, spider in enumerate(spiders)]
         res = {
             "code": '1',
             "message": '请求成功',
             "result": spiders
         }
         return res
     except ConnectionError:
         res = {
             "code": '0',
             "message": '服务器响应超时',
             "result": []
         }
         return res
コード例 #4
0
ファイル: serializers.py プロジェクト: yangyu1987/umbrella
    def spiders_start(self,server,crawler,spider):
        client = Server.objects.get(id=server)
        scrapyd = get_scrapyd(client)
        try:
            job = scrapyd.schedule(crawler, spider)
            res = {
                "code": '1',
                "message": '请求成功',
                "result": {
                    'job': job
                }
            }
            return res

        except ConnectionError:
            res = {
                "code": '0',
                "message": '服务器响应超时',
                "result": {}
            }
            return res