Ejemplo n.º 1
0
def project_versions(request, id, project):
    if request.method == 'GET':
        client_model = Client.objects.get(id=id)
        project_model = Project.objects.get(name=project)
        scrapyd = ScrapydAPI(scrapyd_url(client_model.ip, client_model.port))
        if Deploy.objects.filter(client=client_model, project=project_model):
            deploy = Deploy.objects.get(client=client_model,
                                        project=project_model)
            timestamp = deploy.deployed_at.timestamp()
            localtime = time.localtime(timestamp)
            datetime = time.strftime('%Y-%m-%d %H:%M:%S', localtime)
            return HttpResponse(
                json.dumps({
                    'datetime': datetime,
                    'description': deploy.description
                }))
        else:
            versions = scrapyd.list_versions(project)
            if len(versions) > 0:
                version = versions[-1]
                localtime = time.localtime(int(version))
                datetime = time.strftime('%Y-%m-%d %H:%M:%S', localtime)
            else:
                datetime = None
        return HttpResponse(json.dumps({'datetime': datetime}))
Ejemplo n.º 2
0
def project_version(request, client_id, project_name):
    """
    get project deploy version
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :return: deploy version of project
    """
    if request.method == 'GET':
        # get client and project model
        client = Client.objects.get(id=client_id)
        project = Project.objects.get(name=project_name)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        # if deploy info exists in db, return it
        if Deploy.objects.filter(client=client, project=project):
            deploy = Deploy.objects.get(client=client, project=project)
        # if deploy info does not exists in db, create deploy info
        else:
            try:
                versions = scrapyd.list_versions(project_name)
            except ConnectionError:
                return JsonResponse({'message': 'Connect Error'}, status=500)
            if len(versions) > 0:
                version = versions[-1]
                deployed_at = timezone.datetime.fromtimestamp(int(version), tz=pytz.timezone(TIME_ZONE))
            else:
                deployed_at = None
            deploy, result = Deploy.objects.update_or_create(client=client, project=project, deployed_at=deployed_at)
        # return deploy json info
        return JsonResponse(model_to_dict(deploy))
Ejemplo n.º 3
0
def version(client, project):
    url = 'http://{ip}:{port}'.format(ip=client.ip, port=client.port)
    try:
        scrapyd = ScrapydAPI(url)
        versions = scrapyd.list_versions(project)
        if (len(versions) > 0):
            return versions[-1]
        return ''
    except (ConnectionError, InvalidURL, UnicodeError):
        return ''
Ejemplo n.º 4
0
def get_project_version(request,project,client_id):
    if request.method == 'GET':
        client = Node.objects.get(id=client_id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip,client.port))
        try:
            versions = scrapyd.list_versions(project)
            versions = [{'name': version, 'id': index + 1} for index, version in enumerate(versions)]
            return JsonResponse(versions)
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Ejemplo n.º 5
0
class Scrapyd_Control(object):
    def __init__(self):
        scrapyd_url = input('请输入scrapyd地址: ')
        project = input('请输入项目名称: ')
        self.project = project
        self.scrapyd = ScrapydAPI(scrapyd_url)

    # 启动爬虫
    def schedule(self):
        spider = input('请输入爬虫名称: ')
        return {
            'project': self.project,
            'spider': spider,
            'jobid': self.scrapyd.schedule(self.project, spider)
        }
    
    start, run = schedule, schedule

    # 取消爬虫
    def cancel(self):
        jobid = input('请粘贴要取消的爬虫jobid: ')
        return self.scrapyd.cancel(self.project, jobid)

    # 查看项目
    def listprojects(self):
        return self.scrapyd.list_projects()

    # 查看爬虫
    def listspiders(self):
        return self.scrapyd.list_spiders(self.project)

    # 列出所有jobs
    def listjobs(self):
        return self.scrapyd.list_jobs(self.project)

    # 查看job状态
    def jobstatus(self):
        jobid = input('请粘贴要查看的jobid: ')
        return self.scrapyd.job_status(self.project, jobid)

    # 查看版本
    def listversions(self):
        return self.scrapyd.list_versions(self.project)

    # 删除版本
    def delversion(self):
        version_name = input('请粘贴要删除的版本: ')
        yes = input('是否确认删除该版本{},请输yes否则回车跳过删除\n'.format(version_name))
        if yes == 'yes':
            return self.scrapyd.delete_version(self.project, version_name)
        else:
            pass

    # 删除项目
    def delproject(self):
        yes = input('是否确认删除该项目{},请输yes否则回车跳过删除\n'.format(self.project))
        if yes == 'yes':
            return self.scrapyd.delete_project(self.project)
        else:
            pass
        
    # 列出所有命令
    def help(self):
        print("""
        启动爬虫 schedule|start|run
        取消爬虫 cancel
        查看项目 listprojects
        查看爬虫 listspiders
        列出所有jobs listjobs 
        查看job状态 jobstatus
        查看版本 listversions
        删除版本 delversion
        删除项目 deleproject
        列出所有命令 help
        """)