Пример #1
0
def index_status(request):
    """
    index statistics
    :param request: request object
    :return: json
    """
    if request.method == 'GET':
        clients = Client.objects.all()
        data = {
            'success': 0,
            'error': 0,
            'project': 0,
        }
        # clients info
        for client in clients:
            try:
                requests.get(scrapyd_url(client.ip, client.port), timeout=1)
                data['success'] += 1
            except ConnectionError:
                data['error'] += 1
        path = os.path.abspath(join(os.getcwd(), PROJECTS_FOLDER))
        files = os.listdir(path)
        # projects info
        for file in files:
            if os.path.isdir(join(path, file)) and not file in IGNORES:
                data['project'] += 1
        return JsonResponse(data)
Пример #2
0
def project_deploy(request, client_id, project_name):
    """
    deploy project operation
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :return: json of deploy result
    """
    if request.method == 'POST':
        # get project folder
        path = os.path.abspath(join(os.getcwd(), PROJECTS_FOLDER))
        project_path = join(path, project_name)
        # find egg file
        egg = find_egg(project_path)
        egg_file = open(join(project_path, egg), 'rb')
        # get client and project model
        client = Client.objects.get(id=client_id)
        project = Project.objects.get(name=project_name)
        # execute deploy operation
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        try:
            scrapyd.add_version(project_name, int(time.time()), egg_file.read())
            # update deploy info
            deployed_at = timezone.now()
            Deploy.objects.filter(client=client, project=project).delete()
            deploy, result = Deploy.objects.update_or_create(client=client, project=project, deployed_at=deployed_at,
                                                             description=project.description)
            return JsonResponse(model_to_dict(deploy))
        except Exception:
            return JsonResponse({'message': get_traceback()}, status=500)
Пример #3
0
def project_version(request, client_id, project_name):
    """
    get project deploy version
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :return: deploy version of project
    """
    if request.method == 'GET':
        # get client and project model
        client = Client.objects.get(id=client_id)
        project = Project.objects.get(name=project_name)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        # if deploy info exists in db, return it
        if Deploy.objects.filter(client=client, project=project):
            deploy = Deploy.objects.get(client=client, project=project)
        # if deploy info does not exists in db, create deploy info
        else:
            try:
                versions = scrapyd.list_versions(project_name)
            except ConnectionError:
                return JsonResponse({'message': 'Connect Error'}, status=500)
            if len(versions) > 0:
                version = versions[-1]
                deployed_at = timezone.datetime.fromtimestamp(int(version), tz=pytz.timezone(TIME_ZONE))
            else:
                deployed_at = None
            deploy, result = Deploy.objects.update_or_create(client=client, project=project, deployed_at=deployed_at)
        # return deploy json info
        return JsonResponse(model_to_dict(deploy))
Пример #4
0
def project_list(request, client_id):
    """
    project deployed list on one client
    :param request: request object
    :param client_id: client id
    :return: json
    """
    if request.method == 'GET':
        client = Client.objects.get(id=client_id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        try:
            projects = scrapyd.list_projects()
            return JsonResponse(projects)
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Пример #5
0
def client_status(request, client_id):
    """
    get client status
    :param request: request object
    :param client_id: client id
    :return: json
    """
    if request.method == 'GET':
        # get client object
        client = Client.objects.get(id=client_id)
        try:
            requests.get(scrapyd_url(client.ip, client.port), timeout=3)
            return JsonResponse({'result': '1'})
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Пример #6
0
def job_cancel(request, client_id, project_name, job_id):
    """
    cancel a job
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :param job_id: job id
    :return: json of cancel
    """
    if request.method == 'GET':
        client = Client.objects.get(id=client_id)
        try:
            scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
            result = scrapyd.cancel(project_name, job_id)
            return JsonResponse(result)
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'})
Пример #7
0
def spider_start(request, client_id, project_name, spider_name):
    """
    start a spider
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :param spider_name: spider name
    :return: json
    """
    if request.method == 'GET':
        client = Client.objects.get(id=client_id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        try:
            job = scrapyd.schedule(project_name, spider_name)
            return JsonResponse({'job': job})
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Пример #8
0
def spider_list(request, client_id, project_name):
    """
    get spider list from one client
    :param request: request Object
    :param client_id: client id
    :param project_name: project name
    :return: json
    """
    if request.method == 'GET':
        client = Client.objects.get(id=client_id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        try:
            spiders = scrapyd.list_spiders(project_name)
            spiders = [{'name': spider, 'id': index + 1} for index, spider in enumerate(spiders)]
            return JsonResponse(spiders)
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Пример #9
0
def project_deploy(request, id, project):
    if request.method == 'GET':
        path = os.path.abspath(merge(os.getcwd(), PROJECTS_FOLDER))
        project_path = merge(path, project)
        egg = find_egg(project_path)
        egg_file = open(merge(project_path, egg), 'rb')
        deploy_version = time.time()

        client_model = Client.objects.get(id=id)
        project_model = Project.objects.get(name=project)
        Deploy.objects.filter(client=client_model,
                              project=project_model).delete()
        deploy = Deploy.objects.update_or_create(
            client=client_model,
            project=project_model,
            description=project_model.description)
        scrapyd = ScrapydAPI(scrapyd_url(client_model.ip, client_model.port))
        result = scrapyd.add_version(project, int(deploy_version),
                                     egg_file.read())
        return HttpResponse(result)
Пример #10
0
def index_status(request):
    if request.method == 'GET':
        clients = Client.objects.all()
        data = {
            'success': 0,
            'error': 0,
            'project': 0,
        }
        for client in clients:
            try:
                requests.get(scrapyd_url(client.ip, client.port), timeout=1)
                data['success'] += 1
            except ConnectionError:
                data['error'] += 1
        path = os.path.abspath(merge(os.getcwd(), PROJECTS_FOLDER))
        files = os.listdir(path)
        for file in files:
            if os.path.isdir(merge(path, file)) and not file in IGNORES:
                data['project'] += 1
        return HttpResponse(json.dumps(data))
Пример #11
0
def project_versions(request, id, project):
    if request.method == 'GET':
        client_model = Client.objects.get(id=id)
        project_model = Project.objects.get(name=project)
        scrapyd = ScrapydAPI(scrapyd_url(client_model.ip, client_model.port))
        if Deploy.objects.filter(client=client_model, project=project_model):
            deploy = Deploy.objects.get(client=client_model, project=project_model)
            timestamp = deploy.deployed_at.timestamp()
            localtime = time.localtime(timestamp)
            datetime = time.strftime('%Y-%m-%d %H:%M:%S', localtime)
            return HttpResponse(json.dumps({'datetime': datetime, 'description': deploy.description}))
        else:
            versions = scrapyd.list_versions(project)
            if len(versions) > 0:
                version = versions[-1]
                localtime = time.localtime(int(version))
                datetime = time.strftime('%Y-%m-%d %H:%M:%S', localtime)
            else:
                datetime = None
        return HttpResponse(json.dumps({'datetime': datetime}))
Пример #12
0
def job_list(request, client_id, project_name):
    """
    get job list of project from one client
    :param request: request object
    :param client_id: client id
    :param project_name: project name
    :return: list of jobs
    """
    if request.method == 'GET':
        client = Client.objects.get(id=client_id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        try:
            result = scrapyd.list_jobs(project_name)
            jobs = []
            statuses = ['pending', 'running', 'finished']
            for status in statuses:
                for job in result.get(status):
                    job['status'] = status
                    jobs.append(job)
            return JsonResponse(jobs)
        except ConnectionError:
            return JsonResponse({'message': 'Connect Error'}, status=500)
Пример #13
0
def scheduler_job():
    """
    每分钟检查一次定时任务
    :return:
    """
    models = Task.objects.all()
    for model in models:
        scheduler_at = model.scheduler_at
        updated_at = model.updated_at
        scheduler_at_time_stamp = scheduler_at * 60
        updated_at_time_stamp = time.mktime(updated_at.timetuple())
        if time.time() - updated_at_time_stamp > scheduler_at_time_stamp:
            client_id = model.client_id
            project_name = model.project_name
            spider_name = model.spider_name
            client = Client.objects.get(id=client_id)
            scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
            try:
                job = scrapyd.schedule(project_name, spider_name)
                model.success = 1
            except ConnectionError:
                model.success = 0
            finally:
                model.save()
Пример #14
0
def scheduler_job():
    """
    每分钟检查一次定时任务
    :return:
    """
    models = Task.objects.all()
    for model in models:
        scheduler_at = model.scheduler_at
        updated_at = model.updated_at
        scheduler_at_time_stamp = scheduler_at * 60
        updated_at_time_stamp = time.mktime(updated_at.timetuple())
        if time.time() - updated_at_time_stamp > scheduler_at_time_stamp:
            client_id = model.client_id
            project_name = model.project_name
            spider_name = model.spider_name
            client = Client.objects.get(id=client_id)
            scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
            try:
                job = scrapyd.schedule(project_name, spider_name)
                model.success = 1
            except ConnectionError:
                model.success = 0
            finally:
                model.save()
Пример #15
0
def work_func(client, project, spider):
    ip_port = Client.objects.get(id=client)
    scrapyd = ScrapydAPI(scrapyd_url(ip_port.ip, ip_port.port))
    scrapyd.schedule(project, spider)
Пример #16
0
def job_cancel(request, id, project, job):
    if request.method == 'GET':
        client = Client.objects.get(id=id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        result = scrapyd.cancel(project, job)
        return HttpResponse(json.dumps(result))
Пример #17
0
def project_list(request, id):
    if request.method == 'GET':
        client = Client.objects.get(id=id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        projects = scrapyd.list_projects()
        return HttpResponse(json.dumps(projects))
Пример #18
0
def spider_start(request, id, project, spider):
    if request.method == 'GET':
        client = Client.objects.get(id=id)
        scrapyd = ScrapydAPI(scrapyd_url(client.ip, client.port))
        result = scrapyd.schedule(project, spider)
        return HttpResponse(json.dumps({'job': result}))