def project_deploy(request, client_id, project_name): """ deploy project operation :param request: request object :param client_id: client id :param project_name: project name :return: json of deploy result """ if request.method == 'POST': # get project folder path = os.path.abspath(join(os.getcwd(), PROJECTS_FOLDER)) project_path = join(path, project_name) # find egg file egg = find_egg(project_path) if not egg: return JsonResponse({'message': 'egg not found'}, status=500) egg_file = open(join(project_path, egg), 'rb') # get client and project model client = Client.objects.get(id=client_id) project = Project.objects.get(name=project_name) # execute deploy operation scrapyd = get_scrapyd(client) scrapyd.add_version(project_name, int(time.time()), egg_file.read()) # update deploy info deployed_at = timezone.now() Deploy.objects.filter(client=client, project=project).delete() deploy, result = Deploy.objects.update_or_create( client=client, project=project, deployed_at=deployed_at, description=project.description) return JsonResponse(model_to_dict(deploy))
def project_version(request, client_id, project_name): """ get project deploy version :param request: request object :param client_id: client id :param project_name: project name :return: deploy version of project """ if request.method == 'GET': # get client and project model client = Client.objects.get(id=client_id) project = Project.objects.get(name=project_name) scrapyd = get_scrapyd(client) # if deploy info exists in db, return it if Deploy.objects.filter(client=client, project=project): deploy = Deploy.objects.get(client=client, project=project) # if deploy info does not exists in db, create deploy info else: try: versions = scrapyd.list_versions(project_name) except ConnectionError: return JsonResponse({'message': 'Connect Error'}, status=500) if len(versions) > 0: version = versions[-1] deployed_at = timezone.datetime.fromtimestamp( int(version), tz=pytz.timezone(TIME_ZONE)) else: deployed_at = None deploy, result = Deploy.objects.update_or_create( client=client, project=project, deployed_at=deployed_at) # return deploy json info return JsonResponse(model_to_dict(deploy))
def del_project(request, client_id, project): if request.method == 'GET': client = Client.objects.get(id=client_id) try: scrapyd = get_scrapyd(client) result = scrapyd.delete_project(project=project) return JsonResponse(result) except ConnectionError: return JsonResponse({'message': 'Connect Error'})
def work_func(client, project, spider): ip_port = Client.objects.get(id=client) scrapyd = get_scrapyd(ip_port) logger.warning(f"Run {project}: {spider} on server{ip_port.ip}") try: jobs = scrapyd.schedule(project, spider) logger.warning(f"运行{project}-{spider}成功;作业ID为:{jobs}") except Exception as err: logger.error(f"Please deploy the project to:{ip_port.ip}")
def work_func(client, project, spider): ip_port = Client.objects.get(id=client) scrapyd = get_scrapyd(ip_port) logger.warning("Run {}: {} on server{}".format(project, spider, ip_port.ip)) try: jobs = scrapyd.schedule(project, spider) logger.warning("{}: {};Jobs:{}".format(project, spider, jobs)) except Exception as err: logger.error("Please deploy the project to:{}".format(ip_port.ip))
def project_list(request, client_id): """ project deployed list on one client :param request: request object :param client_id: client id :return: json """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) projects = scrapyd.list_projects() return JsonResponse(projects)
def job_cancel(request, client_id, project_name, job_id): """ cancel a job :param request: request object :param client_id: client id :param project_name: project name :param job_id: job id :return: json of cancel """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) result = scrapyd.cancel(project_name, job_id) return JsonResponse(result)
def spider_start(request, client_id, project_name, spider_name): """ start a spider :param request: request object :param client_id: client id :param project_name: project name :param spider_name: spider name :return: json """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) job = scrapyd.schedule(project_name, spider_name) return JsonResponse({'job': job})
def execute(client, project_name, spider_name): """ schedule deployed task :param client: client object :param project_name: project name :param spider_name: spider name :return: None """ logger.info('execute job of client %s, project %s, spider %s', client.name, project_name, spider_name) # don not add any try except, apscheduler can catch traceback to database ip_port = Client.objects.get(id=client.id) scrapyd = get_scrapyd(ip_port) scrapyd.schedule(project_name, spider_name)
def execute(client, project_name, spider_name): """ schedule deployed task :param client: client object :param project_name: project name :param spider_name: spider name :return: None """ print('Execute', 'Client', client.name, 'Project Name', project_name, 'Spider Name', spider_name) # don not add any try except, apscheduler can catch traceback to database ip_port = Client.objects.get(id=client.id) scrapyd = get_scrapyd(ip_port) scrapyd.schedule(project_name, spider_name)
def project_list(request, client_id): """ project deployed list on one client :param request: request object :param client_id: client id :return: json """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) try: projects = scrapyd.list_projects() return JsonResponse(projects) except ConnectionError: return JsonResponse({'message': 'Connect Error'}, status=500)
def job_cancel(request, client_id, project_name, job_id): """ cancel a job :param request: request object :param client_id: client id :param project_name: project name :param job_id: job id :return: json of cancel """ if request.method == 'GET': client = Client.objects.get(id=client_id) try: scrapyd = get_scrapyd(client) result = scrapyd.cancel(project_name, job_id) return JsonResponse(result) except ConnectionError: return JsonResponse({'message': 'Connect Error'})
def spider_start(request, client_id, project_name, spider_name): """ start a spider :param request: request object :param client_id: client id :param project_name: project name :param spider_name: spider name :return: json """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) try: job = scrapyd.schedule(project_name, spider_name) return JsonResponse({'job': job}) except ConnectionError: return JsonResponse({'message': 'Connect Error'}, status=500)
def spider_list(request, client_id, project_name): """ get spider list from one client :param request: request Object :param client_id: client id :param project_name: project name :return: json """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) spiders = scrapyd.list_spiders(project_name) spiders = [{ 'name': spider, 'id': index + 1 } for index, spider in enumerate(spiders)] return JsonResponse(spiders)
def job_list(request, client_id, project_name): """ get job list of project from one client :param request: request object :param client_id: client id :param project_name: project name :return: list of jobs """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) result = scrapyd.list_jobs(project_name) jobs = [] statuses = ['pending', 'running', 'finished'] for status in statuses: for job in result.get(status): job['status'] = status jobs.append(job) return JsonResponse(jobs)
def del_project(request, client_id, project): if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) result = scrapyd.delete_project(project=project) return JsonResponse(result)