def __parse_dedup_detail(self, bidpools): ids = [] updatetimes = [] resume_details = [] for bid in bidpools: __resume_details = {} _id = bid["userId"] if bid.get("updateTime", None): _updatetime = re.search(r'\d{4}-\d{2}-\d{2}', str(bid["updateTime"])) updatetimes.append(_updatetime.group()) else: from datetime import date _updatetime = date.today().strftime("%Y-%m-%d") updatetimes.append(_updatetime) ids.append(_id) __resume_details["workyear"] = bid.get('jobYeay') __resume_details["sex"] = Keywords().Sex(bid.get('sex')) if bid.get("sex", None) else "" __resume_details["latestcompany"] = bid.get('latelyCompName') __resume_details["desworklocation"] = bid.get('cityName') __resume_details["latestdegree"] = Keywords().Education(str(bid.get('education'))) if bid.get("education", None) else "" __resume_details["desindustry"] = bid.get('jobTitle') for k, v in __resume_details.items(): if v == "": __resume_details.pop(k) resume_details.append(__resume_details) # print "8888%s%s%s" % (ids, updatetimes, resume_details) return ids, updatetimes, resume_details
def _get_runnable_keywords(self, context, args): keywords = Keywords([]) for keyword in self._get_keywords(args): if self._variable_syntax_in(keyword.name, context): continue keywords.add_keyword(keyword) return keywords
def __init__(self, keyword, libname): self.name = keyword.name self.keywords = Keywords(keyword.steps) self.return_value = keyword.return_.value self._libname = libname self.doc = self._doc = keyword.doc.value self._timeout = keyword.timeout self._keyword_args = keyword.args.value
def __init__(self): self.FOURSQUARE_API_URL = 'https://api.foursquare.com/v2/venues/search' self._foursquare_client = foursquare.Foursquare( client_id=os.environ['FOURSQUARE_CLIENT_ID'], client_secret=os.environ['FOURSQUARE_CLIENT_SECRET'], lang='ja') categories = self._foursquare_client.venues.categories() self._categories = self._flatten_categories(categories) self._keyword = Keywords()
def __init__(self, tc_data, parent, defaults): BaseTestCase.__init__(self, tc_data.name, parent) self.doc = tc_data.doc.value self.setup = defaults.get_setup(tc_data.setup) self.teardown = defaults.get_teardown(tc_data.teardown) self.tags = defaults.get_tags(tc_data.tags) self.timeout = defaults.get_timeout(tc_data.timeout) template = defaults.get_template(tc_data.template) self.keywords = Keywords(tc_data.steps, template)
def GET(self): web.header('Content-Type', 'text/html;charset=UTF-8') form = web.input(email="*****@*****.**", adgroupId=None) logger.info("Received user " + form.email + " request to add or update ad keywords") httpsSession = createHttpsSession(form.email) wordsPath = cf.get("apiservices", "keywords") wordhandler = Keywords(wordsPath, httpsSession, logger) if form.adgroupId is not None: result = wordhandler.query_keywords(email=form.email, adgroupId=form.adgroupId) else: result = wordhandler.query_all_targeted_keywords(email=form.email) return result
'kt1s', 'kt2s', 'kp1s', 'kp2s', 'kp3s', 'kbgs', 'kbvs', 'kbds', 'kfcs', 'kbes', 'kbms', 'kros', 'kcas', 'kcwi' ] server = [] for i in allServers: server.append(i) for i in range(1, 4): for j in range(1, 17): server.append('kp' + str(i) + 's') #print(server) #print(binary_keywords) fdata = FactoryData(process_names) binVals = Keywords(server, binary_keywords) histKeys = Keywords() #print(binVals.get_keyword()) external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] app = dash.Dash(__name__) app.config.suppress_callback_exceptions = True theme = { 'dark': False, 'detail': '#007439', 'primary': '#00EA64', 'secondary': '#6E6E6E' }
def keywords(self, text, topkey=5): if self.keywords_model == None: self.keywords_model = Keywords(tol=0.0001, window=2) return self.keywords_model.keywords(text, topkey)
def __parse_resume_details(self, response_datas): resume = response_datas yfkeywords = Keywords() _resume = {} assert resume if 'resume' not in resume: raise Exception('No Resume Return! Maybe Over 300!') _resume["sex"] = yfkeywords.Sex(str(resume["resume"].get("sex"))) if resume["resume"].get("sex", None) else None _resume["jobState"] = yfkeywords.JobState(str(resume["resume"].get("jobState"))) if resume["resume"].get( "jobState") else None _resume["maritalStatus"] = yfkeywords.MaritalStatus(str(resume["resume"].get("maritalStatus"))) if resume[ "resume"].get("maritalStatus") else None _resume["expectWorkType"] = yfkeywords.Worktype(str(resume["resume"].get("expectWorkType"))) if resume[ "resume"].get("expectWorkType", None) else None _resume["education"] = yfkeywords.Education(str(resume["resume"].get("education"))) if resume["resume"].get( "education", None) else None for field in ('expectCity', 'city', 'province', 'hukouProvince', 'hukouCity'): if "," in str(resume["resume"].get(field)): citys = str(resume["resume"].get(field)) parsed_citys = [] for i in citys.split(","): parsed_citys.append(yfkeywords.Expectcity(str(i))) _resume[field] = ",".join(parsed_citys) else: _resume[field] = yfkeywords.Expectcity(str(resume["resume"].get(field))) if resume["resume"].get(field, None) else None _resume["expectSalary"] = yfkeywords.Expectsalary(str(resume["resume"].get("expectSalary"))) if resume[ "resume"].get("expectSalary", None) else None if "," in str(resume["resume"].get("jobTitle")): jobtitles = str(resume["resume"].get("jobTitle")) parsed_jobtitles = [] for i in jobtitles.split(","): parsed_jobtitles.append(yfkeywords.Jobtitle(str(i))) _resume["jobTitle"] = ",".join(parsed_jobtitles) else: _resume["jobTitle"] = yfkeywords.Jobtitle(str(resume["resume"].get("jobTitle"))) if resume["resume"].get( "jobTitle", None) else None for k, v in _resume.iteritems(): resume['resume'][k] = v for field in ['work_experiences', 'educations']: if field in resume: items = [] for item in resume[field]: if 'salary' in item: item["salary"] = yfkeywords.Expectsalary(str(item.get("salary"))) if item.get("salary", None) else None if 'compSize' in item: item["compSize"] = yfkeywords.CompSize(str(item.get("compSize"))) if item.get("compSize", None) else None if 'compIndustry' in item: item["compIndustry"] = yfkeywords.Industry(str(item.get("compIndustry"))) if item.get( "compIndustry", None) else None if 'compProperty' in item: item["compProperty"] = yfkeywords.CompProperty(str(item.get("compProperty"))) if item.get( "compProperty", None) else None if 'education' in item: item["education"] = yfkeywords.Education(str(item.get("education"))) if item.get("education", None) else None items.append(item) resume[field] = items return resume
def __init__(self): self.keyword_fetcher = Keywords() self.spot_client = Spot()
# -*- coding: utf-8 -*- from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream from tweepy import API import json import pika import time import sys from config import config from keywords import Keywords keywordObj = Keywords(['python', 'javascript']) #Variables that contains the user credentials to access Twitter API access_token = config['access_token'] access_token_secret = config['access_token_secret'] consumer_key = config['consumer_key'] consumer_secret = config['consumer_secret'] class TweetProducer(StreamListener): def __init__(self, api): self.api = api super(StreamListener, self).__init__() #setup rabbitMQ Connection connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) self.channel = connection.channel()
def download_resume(self, id, headers): logger.info('headers %s of download resume' % (headers)) try_times = 0 url = "http://www.yifengjianli.com/bidme/getUserResume" _resume = {} yfkeywords = Keywords() while True: try_times += 1 try: time.sleep(random.uniform(3, 10)) response = self.session.post(url, data={ "userId": id, "resumeCookie": "", }, headers=headers, timeout=30, proxies=self.proxies) assert response assert response.status_code == 200 response.encoding = 'utf-8' except Exception: logger.warning( 'fetch url %s with %s fail:\n%s' % (url, self.proxies, traceback.format_exc())) if try_times > 5: raise Exception("PROXY_FAIL!") else: time.sleep(30) else: break resume = json.loads(response.text) assert resume if 'resume' not in resume: raise Exception('No Resume Return! Maybe Over 300!') _resume["sex"] = yfkeywords.Sex(str(resume["resume"].get("sex"))) if resume["resume"].get("sex", None) else None _resume["jobState"] = yfkeywords.JobState(str(resume["resume"].get("jobState"))) if resume["resume"].get("jobState") else None _resume["maritalStatus"] = yfkeywords.MaritalStatus(str(resume["resume"].get("maritalStatus"))) if resume["resume"].get("maritalStatus") else None _resume["expectWorkType"] = yfkeywords.Worktype(str(resume["resume"].get("expectWorkType"))) if resume["resume"].get("expectWorkType", None) else None _resume["education"] = yfkeywords.Education(str(resume["resume"].get("education"))) if resume["resume"].get("education", None) else None for field in ('expectCity', 'city', 'province', 'hukouProvince', 'hukouCity'): if "," in str(resume["resume"].get(field)): citys = str(resume["resume"].get(field)) parsed_citys = [] for i in citys.split(","): parsed_citys.append(yfkeywords.Expectcity(str(i))) _resume[field] = ",".join(parsed_citys) else: _resume[field] = yfkeywords.Expectcity(str(resume["resume"].get(field))) if resume["resume"].get(field, None) else None _resume["expectSalary"] = yfkeywords.Expectsalary(str(resume["resume"].get("expectSalary"))) if resume["resume"].get("expectSalary", None) else None if "," in str(resume["resume"].get("jobTitle")): jobtitles = str(resume["resume"].get("jobTitle")) parsed_jobtitles = [] for i in jobtitles.split(","): parsed_jobtitles.append(yfkeywords.Jobtitle(str(i))) _resume["jobTitle"] = ",".join(parsed_jobtitles) else: _resume["jobTitle"] = yfkeywords.Jobtitle(str(resume["resume"].get("jobTitle"))) if resume["resume"].get("jobTitle", None) else None for k, v in _resume.iteritems(): resume['resume'][k] = v for field in ['work_experiences', 'educations']: if field in resume: items = [] for item in resume[field]: if 'salary' in item: item["salary"] = yfkeywords.Expectsalary(str(item.get("salary"))) if item.get("salary", None) else None if 'compSize' in item: item["compSize"] = yfkeywords.CompSize(str(item.get("compSize"))) if item.get("compSize", None) else None if 'compIndustry' in item: item["compIndustry"] = yfkeywords.Industry(str(item.get("compIndustry"))) if item.get("compIndustry", None) else None if 'compProperty' in item: item["compProperty"] = yfkeywords.CompProperty(str(item.get("compProperty"))) if item.get("compProperty", None) else None if 'education' in item: item["education"] = yfkeywords.Education(str(item.get("education"))) if item.get("education", None) else None items.append(item) resume[field] = items return json.dumps(resume, ensure_ascii=False)
VERIFY_TOKEN = "pe:/4H>}]245kph" PAGE_ACCESS_TOKEN = "EAAOTV8ZBzN5EBAEGGkXKbgl7uCzrgPlZCo2fGSHZBbTnVdixE8oxl3ROtVfZB5wT0nOZCxVz2APPpnxZAMDy48vnPFDKd0gsu41pPSuVtlLvYZASZBDZAMnCfNo5YvCpaZC6RPVDmZCTHIYm3gKCDP8vPmQwfukTJD3QVSQQDLoGQKeMwZDZD" # Instantiate data object # In real situations, load data from API / database dataObj = Data() luminaria_embutir = dataObj.luminaria_embutir controladores = dataObj.controladores eletrofita = dataObj.eletrofita fita_led = dataObj.fita_led lampada_led = dataObj.lampada_led luminaria_led = dataObj.luminaria_led lustres = dataObj.lustres # Instantiate keywords keywords = Keywords() @app.route('/webhook', methods=['GET', 'POST']) def index(): # Verify webhook if request.method == 'GET': mode = request.values['hub.mode'] token = request.values['hub.verify_token'] challenge = request.values['hub.challenge'] if mode is not None and token is not None: if mode == 'subscribe' and token == VERIFY_TOKEN: print("WEBHOOK_VERIFIED") return challenge, 200
def test_keywords_merger(self): keys = Keywords() self.assertEqual( 12, len(keys.merge(KEYWORDS, addwords=ADDWORDS, prewords=PREWORDS)))