def asset_operator(asset_list, status, username, proxy=None): """ 重启,关机,重装系统 """ g_lock = threading.Lock() try: g_lock.acquire() proxy_asset = Asset.objects.filter(proxy__proxy_name=proxy.proxy_name) need_delete_asset = set(asset_list) & set(proxy_asset) systems = [item.name for item in need_delete_asset] profile = asset_list[0].profile if status == 'rebuild': data = {'rebuild': 'true', 'profile': profile, 'systems': systems} else: data = {'power': status, 'systems': systems} data = json.dumps(data) api = APIRequest('{0}/v1.0/system/action'.format(proxy.url), proxy.username, CRYPTOR.decrypt(proxy.password)) result, codes = api.req_post(data) logger.debug(u"操作结果result:%s codes:%s" % (result, codes)) task = Task() task.task_name = result['task_name'] task.username = username task.status = result['messege'] task.start_time = datetime.datetime.now() task.url = '{0}/v1.0/system/action'.format(proxy.url) task.save() task_queue.put( dict(task_name=result['task_name'], task_user=username, task_proxy=proxy.proxy_name)) except Exception as e: raise ServerError(e) finally: g_lock.release()
def duplicate_task(request, task_id): template = get_object_or_404(Task, pk=task_id) new_path = template.dir() for user in User.objects.filter(groups__name='teachers'): new_path = new_path.replace(user.username, request.user.username) i = 1 while os.path.exists(new_path): new_path = re.sub(r"(_copy_[0-9]+$|$)", f'_copy_{i}', new_path, count=1) i += 1 copytree(template.dir(), new_path, ignore=ignore_patterns('.taskid')) copied_task = template copied_task.id = None copied_task.code = Task.path_to_code(new_path) copied_task.save() return JsonResponse({ 'id': copied_task.id, })
def get_invite_cookie(request, response): if request.COOKIES.get('add_me_link') is not None and request.COOKIES.get( 'add_me_link') != '0': try: invite_code = request.COOKIES.get('add_me_link') except ValueError: logging.info('Wrong add_me link') response.set_cookie('add_me_link', 0) return profile = UserProfile.objects.get_by_invite_key(invite_code) if profile is None: logging.info('No profile associated with code:%s' % invite_code) response.set_cookie('add_me_link', 0) return if profile.user == request.engine.user: logging.info('Self-invite') response.set_cookie('add_me_link', 0) return f = Friend.objects.get_by_user(user=request.engine.user.user) f.engine = request.engine f.add_pending(str(profile.user.id)) response.set_cookie('add_me_link', 0) # wysylamy powiadomienie mailem Task(user=request.engine.user.user, task='mail', source='friend_request', comment=json.dumps({ 'receiver': profile.user_id, 'sender': request.engine.user.user_id })).save()
def push_role_to_asset(asset_list, role, username, proxy=None): """from permManage.ansible_api import MyTask 推送系统用户到远程主机上 """ try: proxy_assets = Asset.objects.filter(proxy__proxy_name=proxy.proxy_name) need_push_assets = list(set(asset_list) & set(proxy_assets)) push_resource = gen_resource(need_push_assets) # TODO 调用Ansible API 进行推送 host_list = [asset.networking.all()[0].ip_address for asset in need_push_assets] host_names = [asset.name for asset in need_push_assets] if host_list: task = MyTask(push_resource, host_list) ret = {} # 因为要先建立用户,而push key是在 password也完成的情况下的可选项 # 1. 以秘钥 方式推送角色 role_proxy = get_one_or_all('PermRole', proxy, role.uuid_id) ret["pass_push"] = task.add_user(role.name, proxy, role.system_groups, username) time.sleep(1) # 暂停1秒,保证用户创建完成之后再推送key ret["key_push"] = task.push_key(role.name, os.path.join(role_proxy['key_path'], 'id_rsa.pub'), proxy, username) # 2. 推送账号密码 <为了安全 系统用户统一使用秘钥进行通信,不再提供密码方式的推送> # 3. 推送sudo配置文件 sudo_list = [sudo for sudo in role.sudo.all()] if sudo_list: sudo_uuids = [sudo.uuid_id for sudo in role.sudo.all()] ret['sudo'] = task.push_sudo(role, sudo_uuids, proxy, username) logger.info('推送用户结果ret:%s'%ret) # TODO 将事件放进queue中 event_task_names = [] if ret.has_key('pass_push'): tk_pass_push = ret['pass_push']['task_name'] event_task_names.append(tk_pass_push) if ret.has_key('key_push'): tk_key_push = ret['key_push']['task_name'] event_task_names.append(tk_key_push) if ret.has_key('sudo'): if 'task_name' in ret['sudo']: tk_sudo_push = ret['sudo']['task_name'] event_task_names.append(tk_sudo_push) event = dict(push_assets=host_names, role_name=role.name, password_push=False, key_push=True, task_proxy=proxy.proxy_name) event['tasks'] = event_task_names event['username'] = username task_queue.put(event) # TODO 记录task事件 for item in event['tasks']: tk = Task() tk.task_name = item tk.status = 'running' tk.start_time = datetime.datetime.now() tk.username = username tk.save() except Exception as e: raise ServerError(e)
def _pushed(self, platform_rows, success): for _, rows in platform_rows.items(): for _, row in rows.items(): storage_info = row.get(self.FAMILY) task = Task(**json.loads(row.get('task').get('task'))) if success: self._push_success(task, storage_info) else: PublicQueues.STORAGE.put((task, storage_info))
def form_valid(self, form): ret = super().form_valid(form) task = Task() task.service = '' task.model = VPC.__name__ task.identity = ret.id task.save() return ret
def task(**kwargs): """Return a saved task. Requires a users fixture if no creator is provided. """ if 'creator' not in kwargs: u = User.objects.all()[0] else: u = kwargs['creator'] defaults = {'text': 'Some text', 'creator': u, 'num_followers': 1} defaults.update(kwargs) t = Task(**defaults) t.save() following = TaskFollowing(task=t, user=u) following.save() return t
def _make_detail_extra_task(self, data): uid = data.get('uid') path = self._task.url.split('hash=')[1].split('&')[0] base_url = ('https://www.weidai.com.cn/bid/queryBiddingExtraDetails' '?hash={hash}&borrowerUid={uid}') task = Task() task.url = base_url.format(hash=path, uid=uid) task.platform = self.platform task.feature = 'weidai.bid_detail_extra' task.headers = self._task.headers self._md5_mk.update(task.url) task.row_key = self._md5_mk.hexdigest() self.tasks.append(task)
def _push_parse_task(self): TDDCLogging.info('--->Parse Task Producer Was Ready.') while True: task, status = CrawlerQueues.PARSE.get() tmp = Task(**task.__dict__) task.status = Task.Status.CRAWL_SUCCESS if not isinstance(task, Task): TDDCLogging.error('') continue if not self._push_task(CrawlerSite.PARSE_TOPIC, tmp): TDDCLogging.error('') else: CrawlerQueues.TASK_STATUS_REMOVE.put(tmp) TDDCLogging.debug('[%s:%s] Crawled Successed(%d).' % (task.platform, task.row_key, status)) self._successed_num += 1 self._successed_pre_min += 1
def main(): from common.models import Task Parser() cnt = 100 gevent.sleep(3) while True: if cnt > 0: parser_task = Task(parse_info_dict={ 'id': '%d' % cnt, 'status': 3, 'body': 'hello' }) ParserQueues.WAITING_PARSE.put(parser_task) cnt -= 1 if cnt == 0: print('Done') gevent.sleep(0.01)
def handle_noargs(self, **options): minus10 = datetime.datetime.now() - datetime.timedelta(days=10) minus40 = datetime.datetime.now() - datetime.timedelta(days=40) for p in UserProfile.objects.filter(updated_at__lte=minus10, updated_at__gte=minus40): news = News.objects.filter(created_at__gte=p.updated_at).order_by('created_at')[:1] if len(news) > 0: n = news[0].id else: n = 0 task = Task() task.user = p.user task.task = 'mail' task.source = 'missing_you' task.comment = json.dumps({'recipient': p.id, 'last_news': n}) task.save()
def _make_want_buy_list_urls(self): page_numbers = self._doc.xpath('//*[@class="num"]/text()') if len(page_numbers): last_page_number = int(page_numbers[-1].encode('utf-8')) cur_time = time.time() * 1000 # cur_time = 1494926880 * 1000 base_url = 'http://www.cheok.com/interfaces/0/0/0/0/cp_%d?bust=%d' tmp = list() for page_number in range(1, last_page_number+1): url = base_url % (page_number, cur_time) task = Task() task.url = url task.platform = self.platform task.feature = 'cheok.want_buy_list' task.cookie = 'JSESSIONID=3A32AF91FE59B1F06A61954C280DFC12' task.headers = {'Referer': 'http://www.cheok.com/car/cp_' + str(page_number - 1)} self._md5_mk.update(url) task.row_key = self._md5_mk.hexdigest() tmp.append(task) if page_number % 100 == 0 or page_number == last_page_number: self.tasks.extend(tmp) tmp = list()
def asset_operator(asset_list, status, username, proxy=None): """ 重启,关机,重装系统 """ g_lock = threading.Lock() try: g_lock.acquire() proxy_asset = Asset.objects.filter(proxy__proxy_name=proxy.proxy_name) need_delete_asset = set(asset_list) & set(proxy_asset) systems = [item.name for item in need_delete_asset] profile = asset_list[0].profile if status == 'rebuild': data = { 'rebuild': 'true', 'profile': profile, 'systems': systems } else: data = { 'power': status, 'systems': systems } data = json.dumps(data) api = APIRequest('{0}/v1.0/system/action'.format(proxy.url), proxy.username, CRYPTOR.decrypt(proxy.password)) result, codes = api.req_post(data) logger.debug(u"操作结果result:%s codes:%s"%(result, codes)) task = Task() task.task_name = result['task_name'] task.username = username task.status = result['messege'] task.start_time = datetime.datetime.now() task.url = '{0}/v1.0/system/action'.format(proxy.url) task.save() task_queue.put(dict(task_name=result['task_name'], task_user=username, task_proxy=proxy.proxy_name)) except Exception as e: raise ServerError(e) finally: g_lock.release()
def _make_detail_task(self, data): referer_base_url = 'https://www.weidai.com.cn/bid/showBidDetail?hash={hash}' base_url = 'https://www.weidai.com.cn/bid/bidDetail?hash={hash}&bid=' for detail_info in data: path = detail_info.get('hash') if not path: TDDCLogging.warning('Path Is None.') return task = Task() task.url = base_url.format(hash=path) task.platform = self.platform task.feature = 'weidai.bid_detail' task.headers = { 'Referer': referer_base_url.format(hash=path), 'X-Requested-With': 'XMLHttpRequest' } self._md5_mk.update(task.url) task.row_key = self._md5_mk.hexdigest() self.tasks.append(task)
def _want_buy_list(self): if self._json_dict.get('code') != 1: print('(%s)Error. Response Code:%d.' % (self.__class__, self._json_dict.get('code', -1000))) return objs = self._json_dict.get('object') if objs and len(objs): base_url = 'http://www.cheok.com/{cityAcronym}/sn/{carSourceNo}.html' for info in objs: url = base_url.format(cityAcronym=info.get('cityAcronym'), carSourceNo=info.get('carSourceNo')) task = Task() task.url = url task.platform = self.platform task.feature = 'cheok.want_buy_detail' task.cookie = 'JSESSIONID=3A32AF91FE59B1F06A61954C280DFC12' task.headers = { 'Referer': ('http://www.cheok.com/car/cp_' + str(self._json_dict.get('page').get('currentPage'))) } self._md5_mk.update(url) task.row_key = self._md5_mk.hexdigest() self.tasks.append(task)
def start(self): yd = datetime.datetime.now() - datetime.timedelta(days=71) auctions = AuctionModel.objects.filter(start_at__year=yd.year, start_at__month=yd.month, start_at__day=yd.day) for profile in UserProfile.objects.filter(is_active=True): interesting = [] wishlist = WishList.objects.get_by_user(user=profile.user) for auction in auctions: if str(auction.car_id) in wishlist.items: interesting.append(auction.id) if len(interesting) == 0: continue # send email Task(user=profile.user, task='mail', source='new_auction', comment=json.dumps({ 'recipient': profile.user_id, 'auctions': interesting })).save()
def _send_to_user(self, sender, user, txt): msg = Msg() msg.sender = sender msg.receiver = user if txt.startswith('@'): msg.is_to_self = False else: msg.is_to_self = True msg.content = txt msg.save() # wysylamy powiadomienie mailem if sender != user: Task(user=user, task='mail', source='new_message', comment=json.dumps({ 'recipient': user.id, 'sender': sender.id })).save() logging.info('sent message from %s to %s' % (sender, user)) return True
def _make_bid_list_tasks(self, data): total_page_number = data.get('count') if not total_page_number: return base_url = ('https://www.weidai.com.cn/list/bidList' '?type=0&periodType=0&sort=0&page={page_index}&rows=10') tmp = [] for page_index in range(2, total_page_number / 10 + 2): url = base_url.format(page_index=page_index) task = Task() task.url = url task.platform = self.platform task.feature = self.feature task.headers = { 'Referer': 'https://www.weidai.com.cn/list/showBidList', 'X-Requested-With': 'XMLHttpRequest' } self._md5_mk.update(url) task.row_key = self._md5_mk.hexdigest() tmp.append(task) if page_index % 100 == 0 or page_index == total_page_number: self.tasks.extend(tmp) tmp = []
def task_detail(request, task_id=None): errors = [] if request.method == 'POST': data = json.loads(request.body.decode('utf-8')) if '..' in data['path'] or data['path'][0] == '/': return JsonResponse( { 'errors': ['Path should not contain .. or start with /'], }, status=400) data['path'] = os.path.normpath(data['path']) new_path = os.path.join("tasks", data['path']) def set_subject(task): subj = data['path'].split('/')[0] try: task.subject = Subject.objects.get(abbr=subj) return None except Subject.DoesNotExist: return JsonResponse( { 'errors': [ f'Subject "{subj}" does not exist! Please set correct subject abbr in the path.' ], }, status=400) if not task_id: if Task.objects.filter(code=data['path']).count() != 0: return JsonResponse( { 'errors': [ f'The task with path "{data["path"]}" already exists.' ], }, status=400) task = Task() err = set_subject(task) if err: return err paths = [str(p.parent) for p in Path(new_path).rglob(".taskid")] if len(paths) != 0: return JsonResponse( { 'errors': [ f'Cannot create task in the directory "{data["path"]}", because there already exists these tasks:\n{chr(10).join(paths)}' ], }, status=400) else: task = Task.objects.get(id=task_id) err = set_subject(task) if err: return err if task.code != data['path']: paths = [ str(p.parent) for p in Path(new_path).rglob(".taskid") ] if len(paths) != 0: return JsonResponse( { 'errors': [ f'Cannot move task to the directory "{data["path"]}", because there already exists these tasks:\n{chr(10).join(paths)}' ], }, status=400) try: os.renames(os.path.join("tasks", task.code), new_path) except FileNotFoundError as e: logger.warn(e) for assignment in AssignedTask.objects.filter(task_id=task.id): try: os.renames( os.path.join("submits", *submit_assignment_path(assignment), task.code), os.path.join("submits", *submit_assignment_path(assignment), data['path']), ) except FileNotFoundError as e: logger.warn(e) task.code = data['path'] os.makedirs(task.dir(), exist_ok=True) if not task.name: task.name = task.code task.save() taskid_path = os.path.join(task.dir(), '.taskid') if not os.path.exists(taskid_path): with open(taskid_path, "w") as f: f.write(str(task.id)) for cl in data['classes']: if cl.get('assigned', None): AssignedTask.objects.update_or_create( task_id=task.id, clazz_id=cl['id'], defaults={ 'assigned': parse_datetime(cl['assigned']), 'deadline': parse_datetime(cl['deadline']) if cl.get( 'deadline', None) else None, 'max_points': cl.get('max_points', None), }) else: submits = Submit.objects.filter( assignment__task_id=task.id, assignment__clazz_id=cl['id']).count() if submits == 0: AssignedTask.objects.filter(task__id=task.id, clazz_id=cl['id']).delete() else: clazz = Class.objects.get(id=cl['id']) errors.append( f"Could not deassign from class {str(clazz)}, because it already contains {submits} submits" ) else: task = Task.objects.get(id=task_id) if request.method == 'DELETE': if AssignedTask.objects.filter(task_id=task_id).count(): return JsonResponse({ 'errors': ['Cannot delete task - there are assigned classess'] }) tasks_in_path = [ str(p.parent) for p in Path(task.dir()).rglob('.taskid') ] if len(tasks_in_path) != 1: return JsonResponse({ 'errors': [ f'Cannot delete task - there are multiple taskids:\n{chr(10).join(tasks_in_path)}' ] }) try: with open(os.path.join(task.dir(), ".taskid")) as f: task_id_in_file = int(f.read().strip()) if task_id != task_id_in_file: return JsonResponse({ 'errors': [ f'Cannot delete task - task ID ({task_id}) doesn\'t match value {task_id_in_file} in the file.' ] }) except FileNotFoundError: return JsonResponse( {'errors': ['Cannot delete task - .taskid could not be read']}) task.delete() shutil.rmtree(task.dir()) return JsonResponse({ "success": True, }) result = { 'id': task.id, 'subject_abbr': task.subject.abbr, 'path': task.code, 'classes': [], 'files': {}, 'files_uri': request.build_absolute_uri( reverse('task_asset', kwargs={ 'task_name': task.code, 'path': '_' })).rstrip('_'), 'errors': errors, 'task_link': reverse('teacher_task', kwargs={'task_id': task.id}), } ignore_list = [r'\.git', r'^\.taskid$', r'^\.$', r'__pycache__', r'\.pyc$'] for root, subdirs, files in os.walk(task.dir()): rel = os.path.normpath(os.path.relpath(root, task.dir())) def is_allowed(path): path = os.path.normpath(path) for pattern in ignore_list: if re.search(pattern, path): return False return True if not is_allowed(root): continue node = result['files'] if rel != '.': for path in rel.split('/'): if path not in node: node[path] = { 'type': 'dir', 'files': {}, } node = node[path]['files'] for f in files: if is_allowed(os.path.join(rel, f)): node[f] = { 'type': 'file', } classes = Class.objects.filter( subject__abbr=task.subject.abbr, **current_semester_conds(), ) assigned_count = 0 for clazz in classes: item = { 'id': clazz.id, 'code': clazz.code, 'timeslot': clazz.timeslot, 'week_offset': clazz.week_offset, 'teacher': clazz.teacher.username, } assigned = AssignedTask.objects.filter(task_id=task.id, clazz_id=clazz.id).first() if assigned: assigned_count += 1 item['assignment_id'] = assigned.id item['assigned'] = assigned.assigned item['deadline'] = assigned.deadline item['max_points'] = assigned.max_points result['classes'].append(item) result['can_delete'] = assigned_count == 0 return JsonResponse(result)
def main(): from common.models import Task task = Task() with open('cheok_detail.html', 'r') as f: task.__dict__ = {'body': f.read()} CheokWantBuyDetail(task)
def main(): task = Task() with open('cheok.html', 'r') as f: task.__dict__ = {'body': f.read()} ret = CheokHomepage(task) print(len(ret.tasks))
def friend_send(request, car_id): car = Car.objects.get_by_id(car_id) if request.method == 'POST': if not request.POST.has_key('friend'): request.engine.log.message(message="You forgot to select a friend") return HttpResponseRedirect(reverse('friend_send', args=[car.id])) try: receiver = UserProfile.objects.get_by_id(request.POST['friend']) except UserProfile.DoesNotExist: request.engine.log.message(message="Incorrect friend selected") return HttpResponseRedirect(reverse('friend_send', args=[car.id])) garage = Garage.objects.get_by_user(user=request.engine.user.user) if not garage.has_car(car.id): request.engine.log.message(message="Incorrect car selected") return HttpResponseRedirect(reverse('friend_send', args=[car.id])) # dodajemy gift do pending g = UserGift.objects.get_by_user(user=receiver.user) g.engine = request.engine g.add_pending(car.id, request.engine.user.user.id) # usuwamy z garazu garage.engine = request.engine garage.remove_car(car.id) # wysylamy powiadomienie mailem Task(user=request.engine.user.user, task='mail', source='gift_received', comment=json.dumps({ 'sender': request.engine.user.profile.user_id, 'recipient': receiver.user_id, 'car': car_id })).save() request.engine.log.message(message="Car sent to %s" % receiver) return HttpResponseRedirect(reverse('garage')) friend = Friend.objects.get_by_user(user=request.engine.user.user) page_no = request.GET.get('page_no') or 1 selected = friend.friends[ (int(page_no) - 1) * settings.DEFAULT_FRIENDS_PER_GIFT_PAGE:int(page_no) * settings.DEFAULT_FRIENDS_PER_GIFT_PAGE] paginator = Paginator(friend.friends, settings.DEFAULT_FRIENDS_PER_GIFT_PAGE, body=8, padding=2) try: current_page = paginator.page(page_no) except: return HttpResponseRedirect(reverse('friend_send', args=[car_id])) return { 'car': car, 'friends': UserProfile.objects.get_many_by_user_ids(selected), 'page_no': int(page_no), 'page': current_page, 'total': len(friend.friends), }
def role_proxy_operator(user_name, obj_name, data, proxy=None, obj_uuid='all', action='add'): """ 保存,更新, 删除数据,并把操作结果保存到Task表中 obj_name: PermRole, PermSudo """ result = res_info = msg_name = '' g_lock = threading.Lock() # 线程锁 if obj_name == 'PermRole': msg_name = u'系统用户' elif obj_name == 'PermSudo': msg_name = u'SUDO别名' g_url = '{0}/v1.0/permission/{1}/{2}'.format(proxy.url, obj_name, obj_uuid) try: g_lock.acquire() # 在每个proxy上(add/update/delete) role/sudo,并返回结果 api = APIRequest(g_url, proxy.username, CRYPTOR.decrypt(proxy.password)) if action == 'add': result, codes = api.req_post(data) pdata = json.loads(data) res_info = u'添加{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) elif action == 'update': result, codes = api.req_put(data) pdata = json.loads(data) res_info = u'编辑{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) elif action == 'delete': result, codes = api.req_del(data) pdata = json.loads(data) res_info = u'删除{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) logger.info('role_proxy_%s:%s'%(action, result['messege'])) # 生成唯一的事件名称,用于从数据库中查询执行结果 if 'name' not in json.dumps(data): raise ValueError('role_proxy_operator: data["name"]不存在') task_name = json.loads(data)['name'] + '_' + uuid.uuid4().hex # 将事件添加到消息队列中 task_queue.put({'server': task_name, 'username': user_name}) # 将执行结果保存到数据库中 role_task = Task() role_task.task_name = task_name role_task.proxy_name = proxy.proxy_name role_task.role_name = json.loads(data)['name'] role_task.username = user_name role_task.status = 'complete' role_task.content = res_info role_task.url = g_url role_task.start_time = datetime.datetime.now() role_task.action = action role_task.role_uuid = obj_uuid role_task.role_data = data role_task.result = result['messege'] role_task.save() except Exception as e: logger.error("[role_proxy_operator] %s"%e) finally: g_lock.release() return result
partition_records = self._crawl_task_consumer.poll(2000, 16) if not len(partition_records): gevent.sleep(1) continue for _, records in partition_records.items(): for record in records: self._record_proc(record) def _record_proc(self, record): try: item = json.loads(record.value) except Exception, e: self._consume_msg_exp('CRAWL_TASK_JSON_ERR', record.value, e) else: if item and isinstance(item, dict) and item.get('url', None): task = Task(**item) task.status = Task.Status.WAIT_CRAWL CrawlerQueues.CRAWL.put(task) CrawlerQueues.TASK_STATUS.put(task) else: self._consume_msg_exp('CRAWL_TASK_ERR', item) def _push_parse_task(self): TDDCLogging.info('--->Parse Task Producer Was Ready.') while True: task, status = CrawlerQueues.PARSE.get() tmp = Task(**task.__dict__) task.status = Task.Status.CRAWL_SUCCESS if not isinstance(task, Task): TDDCLogging.error('') continue
def _parse(self): cur_page = self._doc.xpath('//*[@class="pager-content"]/span/text()')[0] base = 'http://www.huichedai.com/invest/index{page_number}.html' if cur_page == '1': page_cnt = self._doc.xpath('//*[@class="pager-content"]/text()')[0] page_cnt = int(page_cnt.split('/')[1][:-2]) tmp = [] for index in range(2, page_cnt + 1): url = base.format(page_number=index) task = Task() task.url = url task.platform = self.platform task.feature = self.feature task.headers = {'Referer': base.format(page_number=index+1 if index == 2 else index-1)} self._md5_mk.update(url) task.row_key = self._md5_mk.hexdigest() tmp.append(task) if index % 100 == 0 or index == page_cnt: self.tasks.extend(tmp) tmp = [] detail_base = 'http://www.huichedai.com{page}' detail_urls = self._doc.xpath('//*[@class="em strong"]/@href') for detail in detail_urls: url = detail_base.format(page=detail) task = Task() task.url = url task.platform = self.platform task.feature = 'huichedai.invest_detail' task.headers = {'Referer': base.format(page_number='' if cur_page == '1' else cur_page)} self._md5_mk.update(url) task.row_key = self._md5_mk.hexdigest() self.tasks.append(task)
def push_role_to_asset(asset_list, role, username, proxy=None): """from permManage.ansible_api import MyTask 推送系统用户到远程主机上 """ try: proxy_assets = Asset.objects.filter(proxy__proxy_name=proxy.proxy_name) need_push_assets = list(set(asset_list) & set(proxy_assets)) push_resource = gen_resource(need_push_assets) # TODO 调用Ansible API 进行推送 host_list = [ asset.networking.all()[0].ip_address for asset in need_push_assets ] host_names = [asset.name for asset in need_push_assets] if host_list: task = MyTask(push_resource, host_list) ret = {} # 因为要先建立用户,而push key是在 password也完成的情况下的可选项 # 1. 以秘钥 方式推送角色 role_proxy = get_one_or_all('PermRole', proxy, role.uuid_id) ret["pass_push"] = task.add_user(role.name, proxy, role.system_groups, username) time.sleep(1) # 暂停1秒,保证用户创建完成之后再推送key ret["key_push"] = task.push_key( role.name, os.path.join(role_proxy['key_path'], 'id_rsa.pub'), proxy, username) # 2. 推送账号密码 <为了安全 系统用户统一使用秘钥进行通信,不再提供密码方式的推送> # 3. 推送sudo配置文件 sudo_list = [sudo for sudo in role.sudo.all()] if sudo_list: sudo_uuids = [sudo.uuid_id for sudo in role.sudo.all()] ret['sudo'] = task.push_sudo(role, sudo_uuids, proxy, username) logger.info('推送用户结果ret:%s' % ret) # TODO 将事件放进queue中 event_task_names = [] if ret.has_key('pass_push'): tk_pass_push = ret['pass_push']['task_name'] event_task_names.append(tk_pass_push) if ret.has_key('key_push'): tk_key_push = ret['key_push']['task_name'] event_task_names.append(tk_key_push) if ret.has_key('sudo'): if 'task_name' in ret['sudo']: tk_sudo_push = ret['sudo']['task_name'] event_task_names.append(tk_sudo_push) event = dict(push_assets=host_names, role_name=role.name, password_push=False, key_push=True, task_proxy=proxy.proxy_name) event['tasks'] = event_task_names event['username'] = username task_queue.put(event) # TODO 记录task事件 for item in event['tasks']: tk = Task() tk.task_name = item tk.status = 'running' tk.start_time = datetime.datetime.now() tk.username = username tk.save() except Exception as e: raise ServerError(e)
def role_proxy_operator(user_name, obj_name, data, proxy=None, obj_uuid='all', action='add'): """ 保存,更新, 删除数据,并把操作结果保存到Task表中 obj_name: PermRole, PermSudo """ result = res_info = msg_name = '' g_lock = threading.Lock() # 线程锁 if obj_name == 'PermRole': msg_name = u'系统用户' elif obj_name == 'PermSudo': msg_name = u'SUDO别名' g_url = '{0}/v1.0/permission/{1}/{2}'.format(proxy.url, obj_name, obj_uuid) try: g_lock.acquire() # 在每个proxy上(add/update/delete) role/sudo,并返回结果 api = APIRequest(g_url, proxy.username, CRYPTOR.decrypt(proxy.password)) if action == 'add': result, codes = api.req_post(data) pdata = json.loads(data) res_info = u'添加{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) elif action == 'update': result, codes = api.req_put(data) pdata = json.loads(data) res_info = u'编辑{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) elif action == 'delete': result, codes = api.req_del(data) pdata = json.loads(data) res_info = u'删除{0}{1} {2}'.format(msg_name, pdata['name'], result['messege']) logger.info('role_proxy_%s:%s' % (action, result['messege'])) # 生成唯一的事件名称,用于从数据库中查询执行结果 if 'name' not in json.dumps(data): raise ValueError('role_proxy_operator: data["name"]不存在') task_name = json.loads(data)['name'] + '_' + uuid.uuid4().hex # 将事件添加到消息队列中 task_queue.put({'server': task_name, 'username': user_name}) # 将执行结果保存到数据库中 role_task = Task() role_task.task_name = task_name role_task.proxy_name = proxy.proxy_name role_task.role_name = json.loads(data)['name'] role_task.username = user_name role_task.status = 'complete' role_task.content = res_info role_task.url = g_url role_task.start_time = datetime.datetime.now() role_task.action = action role_task.role_uuid = obj_uuid role_task.role_data = data role_task.result = result['messege'] role_task.save() except Exception as e: logger.error("[role_proxy_operator] %s" % e) finally: g_lock.release() return result
def main(): task = Task() with open('cheok_list.html', 'r') as f: task.__dict__ = {'body': f.read()} CheokWantBuyList(task)
partition_records = self._consumer.poll(2000, 16) if not len(partition_records): gevent.sleep(1) continue for _, records in partition_records.items(): for record in records: self._record_proc(record) def _record_proc(self, record): try: item = json.loads(record.value) except Exception, e: self._consume_msg_exp('PARSE_TASK_JSON_ERR', record.value, e) else: if item and isinstance(item, dict) and item.get('url', None): task = Task(**item) task.status = Task.Status.WAIT_PARSE ParserQueues.PARSE.put(task) ParserQueues.TASK_STATUS.put(task) else: self._consume_msg_exp('PARSE_TASK_ERR', item) def _push_new_crawl_task(self): TDDCLogging.info('--->Parser Task Producer Was Ready.') while True: task = ParserQueues.CRAWL.get() # if not self._filter.setget(task.url): # TDDCLogging.debug('New Task [%s:%s] Was Filter.' % (task.platform, task.url)) # continue msg = json.dumps(task.__dict__) if msg: