def process_request(self, type_link, data): show_debug('processing with params ...') print(data) if type_link not in self.__plugins: return None p = self.__plugins[type_link] return p.process_request(data)
def listen(self): while True: try: data = _recev(self.client) if not data: show_warning('Not data') break if "action" in data: if data['action'] == 'notify' and data[ 'ref'] == 'subscribed': show_notify("subscribe was successfully") if data['action'] == 'assign': show_text('== NEW TASK Assign ===') show_debug( 'Receiver assign task with link %s' % get_master_attr('params.link_id', data, None)) self.do_assign(data['params']) if data['action'] == 'live': _send(self.client, {'action': 'live', 'status': True}) except ConnectionError as err: show_warning("OS error: {0}".format(err)) except Exception as err1: show_warning("OS error: {0}".format(err1)) self.client.close() break
def process_thread_screenshot(self, post_type, post_id): try: # show_debug('Process take screenshot ...' + post_id) link = MongodbClient.get_instance().get_link_collection().find_one( {'link_id': post_id}) if link: data = {'processing_screenshot': 0} screenshot = self.selenium_types[post_type].screen_post( self, post_id) if screenshot: data['screenshot'] = screenshot MongodbClient.get_instance().get_link_collection().update_one( {'_id': link['_id']}, {'$set': data}) data = { 'link_id': get_master_attr('link_id', link, None), 'type': get_master_attr('type', link, None), 'screenshot': get_master_attr('screenshot', link, None) } hook_url = link['hook_url'] result = requests.post(hook_url, data) else: show_debug('NOT FOUND LINK') except Exception as e: print('error code: #117228') print(format(e))
def process_response(self, result): show_debug('processing response ...') link = self.mongodb.get_link_collection().find_one({'link_id': result['data']['link_id']}) collection_history = self.mongodb.get_link_history_collection() if link: item = { 'likes': result['data']['likes'], 'comments': result['data']['comments'], 'views': result['data']['views'], 'dislikes': result['data']['dislikes'], 'post_created_time': result['data']['created_time'], 'updated_at': result['data']['updated_at'] } # screenshot Selenium.get_instance().screen_post('YT', result['data']['link_id']) item['processing_screenshot'] = 1 item['screenshot'] = None res = self.mongodb.get_link_collection().update_one({ '_id': link['_id'] }, { '$set': item }) item['link_id'] = result['data']['link_id'] collection_history.insert_one(item) if res: return 1 return 0 return -1
def start_schedule(): year = int(get_utc_time('%Y')) month = int(get_utc_time('%m')) day = int(get_utc_time('%d')) condition = { "status": 1, "camp_start": { "$lte": datetime.datetime(year, month, day) }, "deadline": { "$gte": datetime.datetime(year, month, day) }, "timeline": '%s:00' % get_utc_time('%H') } show_debug('Execute schedule task ...') print(condition) data_tasks = client.get_link_collection().find(condition) s = sched.scheduler(time.time, time.sleep) tasks = list(data_tasks) data_crawler = [] idx = 0 for row in tasks: del row['_id'] option = {'link_id': row['link_id'], 'type': row['type']} data_crawler.append(option) idx += 1 show_debug('%s task waiting exec' % len(data_crawler)) process_crawler_thread(data_crawler)
def build(selenium_type): show_debug(selenium_type) driver = None if selenium_type == 'firefox': driver = FirefoxBuilder.build() if selenium_type == 'chrome': driver = ChromeBuilder.build() return driver
def process_attachment(self, connection, request_info): show_debug('Process download attachment ...') self.init_result() if request_info['query_params'][2] is not None: self.result = process_download_attachment( request_info['query_params'][2]) show_notify('Result') send_http_result(connection, self.result, content_type='image/png') else: print(1)
def process_request(self, data): result = { 'error': True, 'msg': None, 'data': None, 'ref': YoutubeLink.get_name() } url = 'https://www.googleapis.com/youtube/v3/videos?part=statistics&id=%s&key=%s' % (data['link_id'], ServerConfig.API_YTB_KEY.value) proxy = get_master_attr('proxy', data, None) s = requests.Session() if proxy: proxies = { "https": proxy, "http": proxy } s.proxies = proxies try: show_debug('Call request: %s' % url) response = s.get(url, timeout=10) except requests.ConnectionError as err: show_warning(format(err)) result['type'] = 'requests' result['msg'] = str(err) except requests.HTTPError as err: show_warning(format(err)) else: d = response.json() if 'error' not in d: result['error'] = False result['data'] = { 'link_id': data['link_id'], 'dislikes': get_master_attr('items.0.statistics.likeCount', d, None), 'likes': get_master_attr('items.0.statistics.dislikeCount', d, None), 'views': get_master_attr('items.0.statistics.viewCount', d, None), 'comments': get_master_attr('items.0.statistics.commentCount', d, None), 'created_time': None, 'updated_at': str(datetime.datetime.utcnow()) } else: result['msg'] = get_master_attr('error.errors.0.message', d, 'Error from api youtube') if get_master_attr('error.code', d, None) == 400: if get_master_attr('error.errors.0.reason', d, None) == 'keyInvalid': result['type'] = 'api_key' result['msg'] = 'Api key error' else: result['type'] = 'link_id' result['msg'] = 'Link id error' else: result['type'] = 'youtube_error' return result
def process_request(self, data): result = { 'error': True, 'msg': None, 'data': None, 'ref': 'FB', 'type': None } link_id = get_master_attr('link_id', data, '') token = get_master_attr('token', data, '') url = 'https://graph.facebook.com/' + link_id + '?fields=reactions.summary(true),comments.summary(true),shares,likes&access_token=' + token proxy = get_master_attr('proxy', data, None) s = requests.Session() if proxy: proxies = { "https": proxy, "http": proxy } s.proxies = proxies try: show_debug('Call request: %s' % url) response = s.get(url, timeout=10) except requests.ConnectionError as err: show_warning(format(err)) result['type'] = 'requests' result['msg'] = str(err) else: d = response.json() show_warning('Error fetch api fb') print(d) if get_master_attr('error', d, None) is None: result['error'] = False result['data'] = { 'link_id': data['link_id'], 'likes': get_master_attr('likes.count', d, None), 'shares': get_master_attr('shares.count', d, None), 'comments': get_master_attr('comments.count', d, None), 'reactions': get_master_attr('reactions.summary.total_count', d, None), 'created_time': get_master_attr('created_time', d, None), 'updated_at': str(datetime.datetime.utcnow()) } else: result['type'] = 'api_fb_error' result['msg'] = get_master_attr('error.message', d, 'Error connect api fb') code = get_master_attr('error.code', d, None) if code == 190: result['type'] = 'token' elif code == 100: result['type'] = 'link_id' return result
def process_result_callback(link_id): link = client.get_link_collection().find_one({"link_id": link_id}) if not link: print('Not found link') return None hook_url = get_master_attr('hook_url', link, None) if hook_url: data = get_data_hook(link_id, link) try: requests.post(hook_url, data) except requests.exceptions.ConnectionError as e1: show_warning(format(e1)) except Exception as e: show_warning(format(e)) show_debug('Hook request %s' % link_id) print(data)
def process_response(self, result): show_debug('processing response ...') link_id = get_master_attr('data.link_id', result, None) # get user id matches = re.findall(r'(.*)_(.*)', link_id) user_id = None if len(matches): user_id = matches[0][0] link = self.mongodb.get_link_collection().find_one({'link_id': link_id}) collection_history = self.mongodb.get_link_history_collection() if link: item = { 'profile': { 'id': user_id, }, 'likes': result['data']['likes'], 'comments': result['data']['comments'], 'reactions': result['data']['reactions'], 'shares': result['data']['shares'], 'post_created_time': result['data']['created_time'], 'updated_at': result['data']['updated_at'] } Selenium.get_instance().screen_post('FB', link_id) item['processing_screenshot'] = 1 item['screenshot'] = None res = self.mongodb.get_link_collection().update_one({ '_id': link['_id'] }, { '$set': item }) item['link_id'] = result['data']['link_id'] collection_history.insert_one(item) if res: return 1 return 0 return -1
def do_assign(self, data): res = self.detectLinkProvider.process_request(data['type'], data) res['action'] = 'detect-link' show_debug('Result response process request ...') print(res) _send(self.client, res)
def process_links(self, connection, request_info): self.init_result() method = request_info['method'] if method == 'GET': self.result['error'] = False link_id = get_master_attr('query_params.2', request_info, None) self.result['data'] = process_take_info_link(link_id) send_http_json_result(connection, self.result) if method == 'POST': # process insert data show_debug('Insert link data') data = request_info['data'] show_debug('data body') print(data) show_debug('Processing save data ...') self.result = process_save_data_link(data) show_notify('Success!') print(self.result) send_http_json_result(connection, self.result) if method == 'PUT': link_id = get_master_attr('query_params.2', request_info, None) show_debug('Edit link data: %s' % link_id) data = request_info['data'] print(data) show_debug('Processing ... ') if link_id: result = process_update_link(link_id, data) if result: self.result['msg'] = 'Updated' self.result['error'] = False send_http_json_result(connection, self.result) if method == 'DELETE': link_id = get_master_attr('query_params.2', request_info, None) show_debug('DELETE link data: %s' % link_id) show_debug('Processing ... ') if link_id: if process_delete_link(link_id): self.result['msg'] = 'Deleted' self.result['error'] = False send_http_json_result(connection, self.result)