# coding:utf-8 from Pinbot.settings import OTHER_DATABASE from celery.schedules import crontab address = OTHER_DATABASE.get('rabbitmq').get('host') username = OTHER_DATABASE.get('rabbitmq').get('user') password = OTHER_DATABASE.get('rabbitmq').get('password') BROKER_URL = 'amqp://%s:%s@%s:5672//' % (username, password, address) CELERY_IMPORTS = ( "pin_celery.tasks", "pin_utils.email.send_mail", "pin_utils.email.mailgun", "pin_utils.email.django_mail", "app.vip.tasks", "app.partner.tasks", "app.sendemail.tasks", "app.activity.tasks", "pin_utils.sms.sms_code", "pin_utils.user_log", "pin_utils.spider_utils", "app.task_system.tasks", "app.resume.tasks", "pin_utils.sms.sms_utils", ) CELERY_RESULT_BACKEND = "amqp" CELERYBEAT_SCHEDULE = {
# coding:utf-8 import thread import pika import sys import json from Pinbot.settings import OTHER_DATABASE import threading credentials = pika.PlainCredentials(OTHER_DATABASE.get('rabbitmq').get('user'), OTHER_DATABASE.get('rabbitmq').get('password')) connection = pika.BlockingConnection(pika.ConnectionParameters(credentials=credentials, host=OTHER_DATABASE.get('rabbitmq').get('host'))) channel = connection.channel() channel.queue_declare(queue=OTHER_DATABASE.get('rabbitmq').get('html_resume_queue'), durable=True) buy_resume_channel = connection.channel() # 购买简历的rabbitmq消息队列 buy_resume_channel.queue_declare(queue=OTHER_DATABASE.get('rabbitmq').get('buy_resume_queue'), durable=True) mutex = threading.Lock()
) from app.vip.vip_utils import MissionUtils from app.special_feed.feed_utils import FeedUtils from app.partner.partner_utils import PartnerCoinUtils, UploadResumeUtils from Brick.App.job_hunting.job_utils import ( JobUtils, ) from pin_utils.django_deco import ( pin_login_required, ) django_log = logging.getLogger('django') # channel.queue_declare(queue="plugin_resume_queue", durable=True) queue_name = OTHER_DATABASE.get('rabbitmq').get('html_resume_queue') @login_required(login_url=LOGIN_URL) @csrf_exempt @page_access_counter_dec(page_type_id=PINBOT_ANALYSE) def analyse_resumes(request): """ @summary:简历分析 """ p = request.POST.copy() data = p.get('data', {}) is_refresh = False if data:
# coding:utf-8 import thread import pika import sys import json from Pinbot.settings import OTHER_DATABASE import threading credentials = pika.PlainCredentials( OTHER_DATABASE.get('rabbitmq').get('user'), OTHER_DATABASE.get('rabbitmq').get('password')) connection = pika.BlockingConnection( pika.ConnectionParameters(credentials=credentials, host=OTHER_DATABASE.get('rabbitmq').get('host'))) channel = connection.channel() channel.queue_declare( queue=OTHER_DATABASE.get('rabbitmq').get('html_resume_queue'), durable=True) buy_resume_channel = connection.channel() # 购买简历的rabbitmq消息队列 buy_resume_channel.queue_declare( queue=OTHER_DATABASE.get('rabbitmq').get('buy_resume_queue'), durable=True) mutex = threading.Lock()
from Pinbot.settings import ( PROJECT_ROOT, STATIC_URL, ) from app.vip.vip_utils import MissionUtils from app.special_feed.feed_utils import FeedUtils from app.partner.partner_utils import PartnerCoinUtils, UploadResumeUtils from Brick.App.job_hunting.job_utils import ( JobUtils, ) from pin_utils.django_deco import ( pin_login_required, ) django_log = logging.getLogger('django') # channel.queue_declare(queue="plugin_resume_queue", durable=True) queue_name = OTHER_DATABASE.get('rabbitmq').get('html_resume_queue') @login_required(login_url=LOGIN_URL) @csrf_exempt @page_access_counter_dec(page_type_id=PINBOT_ANALYSE) def analyse_resumes(request): """ @summary:简历分析 """ p = request.POST.copy() data = p.get('data', {}) is_refresh = False if data: