def user_account_detail(self, user_id): error_msg = jsontree.jsontree() account_detail = jsontree.jsontree() query = """ SELECT accountId, userId1, userId2, balance FROM account WHERE userId1 = \'%s\' AND confirmed_by_user1 = True AND confirmed_by_user2 = True """ % (user_id) cursor = self.con.cursor() try: cursor.execute(query) result1 = cursor.fetchall() print result1 except Exception, e: error_msg.status = 0 error_msg.message = str(e) sys.stdout.write('USER PHASE I') return json.dumps(error_msg)
def notification(self, user_id): #Get notifications from mid #Get notifications from notification error_msg = jsontree.jsontree() notice = jsontree.jsontree() name = jsontree.jsontree() name.positive = [] name.negetive = [] name.unread = [] query = """ SELECT accountId, userId1, userId2, balance FROM account WHERE userId1 = \'%s\' AND confirmed_by_user1 = \'%s\' """ % (user_id, False) cursor = self.con.cursor() try: cursor.execute(query) result = cursor.fetchall() except Exception, e: error_msg.status = 0 error_msg.message = str(e) return json.dumps(error_msg)
def newFixFor(fb, ixProject, sFixFor, fixfortype=objects.fbFixFor, dtRelease=None, dtEnd=None, dt=None, dtStart=None, sStartNote=None, fAssignable=None): """newFixFor(fb, ixProject, sFixFor, fixfortype=objects.fbFixFor, \ dtRelease=None, dtEnd=None, dt=None, \ dtStart=None, sStartNote=None, \ fAssignable=None) """ kargs = jsontree.jsontree(ixProject=ixProject, sFixFor=sFixFor) if dt: dtRelease = dt if dtEnd: dtRelease = dtEnd if dtRelease: kargs.dtRelease = dtRelease if sStartNote: kargs.sStartNote = sStartNote if not dtStart: dtStart = datetime.datetime.now().date() if dtStart: kargs.dtStart = dtStart if fAssignable: kargs.fAssignable = 1 res = fb.newFixFor(**parse.fbargs(kargs, fixfortype)) return parse.extract(res, dict(ixFixFor=types.fbint, sFixFor=types.fbstring, sStartNote=types.fbstring))
def logout(self): data = j.jsontree() data.redirect = self.home_url data = json.dumps(data) self.hero.get(self.logout_url, data=data)
def create_balance(self, user): amount = user.amount mod = user.mod if (mod == 'positive'): positive = True else: positive = False fellow_username = user.fellow_username query = """ SELECT userId FROM users WHERE username = \'%s\' """ % (fellow_username) conn = self.connection cursor = conn.cursor() error_msg = jsontree.jsontree() try: cursor.execute(query) result = cursor.fetchone() except Exception, e: error_msg.status = 0 error_msg.message = str(e) sys.stdout.write('DATABASE PHASE I') return json.dumps(error_msg)
def create_address(self, user): username = user['username'] firstname = user['firstname'] lastname = user['lastname'] phone = user['phone'] _id = uuid.uuid1() query = """ SELECT userId FROM users WHERE username=\'%s\' """ % username cursor = self.connection.cursor() conn = self.connection msg = jsontree.jsontree() try: cursor.execute(query) except Exception, e: self.debug_InternalError(e) msg.status = 0 msg.message = str(e) return msg
def firstname_lastname(self, user_id): """ firstname: lastname: """ con = self.con user = jsontree.jsontree() query = """ SELECT firstname, lastname FROM address WHERE userId=\'%s\' """ % (user_id) cursor = con.cursor() try: cursor.execute(query) result = cursor.fetchone() cursor.close() except Exception, e: user.status = 0 user.message = e return user
def user_detail(self, user_id): user = jsontree.jsontree() firstname_lastname = self.firstname_lastname(user_id) if(firstname_lastname.status == 1): user.firstname = firstname_lastname.firstname user.lastname = firstname_lastname.lastname user.user_id = user_id else: user.status = 0 return json.dumps(user) username = self.username(user_id) if(username.status == 1): user.username = username.username else: user.status = 0 return json.dumps(user) user.status = 1 return json.dumps(user)
def logout(self, user): user_id = user.user_id cookie = user.cookie query = """ DELETE FROM cookie WHERE cookie=\'%s\' """ % (cookie) conn = self.connection cursor = conn.cursor() msg = jsontree.jsontree() try: cursor.execute(query) conn.commit() cursor.close() msg.status = 1 msg.message = "Logged out." except Exception, e: self.debug_InternalError(e) msg.status = 0 msg.message = e sys.stdout.write(e)
def editFixFor(fb, ixFixFor, sFixFor, fixfortype=objects.fbFixFor, dtRelease=None, dtEnd=None, dt=None, dtStart=None, sStartNote=None, fAssignable=None): """editFixFor(fb, ixFixFor, sFixFor, fixfortype=objects.fbFixFor, \ dtRelease=None, dtEnd=None, dt=None, dtStart=None, sStartNote=None, \ fAssignable=None """ if dt: dtRelease = dt if dtEnd: dtRelease = dtEnd baseargs = jsontree.jsontree(ixFixFor=ixFixFor, sFixFor=sFixFor) if fAssignable: baseargs.fAssignable = 1 if sStartNote: baseargs.sStartNote = sStartNote if not dtStart: dtStart = datetime.datetime.now().date() if dtStart: baseargs.dtStart = dtStart if dtRelease: baseargs.dtRelease = dtRelease return fb.editFixFor(**parse.fbargs(baseargs))
def modify_user(self, user): username = user.username firstname = user.firstname lastname = user.lastname password = user.password current_password = user.current_password user_id = user.user_id query = """ SELECT username, password FROM users WHERE userId=\'%s\' """ % (user_id) cursor = self.con.cursor() msg = jsontree.jsontree() try: cursor.execute(query) result = cursor.fetchone() cursor.close() except Exception, e: msg.status = 0 msg.phase = 1 msg.message = str(e) return msg
def step2_download(): spider = Crawler() TP = Taskplanner() TP._load_todo('task.txt') base_url = 'https://www.riteaid.com' entrance_url = 'https://www.riteaid.com/store-site-map' try: riteaid = load_jt('riteaid.txt') except: riteaid = jsontree.jsontree() counter = itertools.count(0) for store_url in TP.ignore_iter(TP.todo[entrance_url], ['data']): ## 首先处理随着url一块传入的reference data text = TP.todo[entrance_url][store_url]['data'] storeID, address = text.split(',', 1) storeID, address = storeID.strip(), address.strip() ## 然后处理每个url页面 if storeID not in riteaid: html = spider.html(store_url) if html: try: soup = bs4.BeautifulSoup(html) ''' phone number ''' for p in soup.findAll('p', attrs = {'class', 'padding-phone'}): phone = p.text.replace(p.strong.text, '').strip().replace(' ', '-') # process Phone ''' hour ''' hours = list() for ul in soup.findAll('ul', attrs = {'class', 'days'}): hours.append( ul.text.split() ) # process Office Hour ''' additional information ''' additional_info = list() for div in soup.findAll('div', attrs = {'id': 'eventListId'}): for li in div.findAll('li'): additional_info.append( li.text ) # process Additional Information ''' store detail ''' detail = {} for div in soup.findAll('div', attrs = {'class': 'storeDetailsAttributeCategory'}): storeDetailsAttributeCategory = div.strong.text.strip() detail.setdefault(storeDetailsAttributeCategory, list()) for subdiv in div.findAll('div', attrs = {'class': 'storeDetailsAttribute'}): detail[storeDetailsAttributeCategory].append(subdiv.text.strip()) # process Store Detail ''' validate the information I crawled ''' if validate(phone, hours, additional_info, detail): # <=== validate, sometime error print "CORRECT" riteaid.setdefault(storeID, (address, phone, hours, additional_info, detail) ) dump_jt(riteaid, 'riteaid.txt') print storeID, counter.next() ## 只统计正确的 else: print "ERROR!", (phone, hours, additional_info, detail) print "\t%s" % store_url except: pass
def define_user_login(self, request): user = jsontree.jsontree() user.username = request.get_json().get('username', '') user.password = request.get_json().get('password', '') try: user.api = request.headers['Authorization'] except Exception, e: user.api = None
def logout(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.cookie = request.cookies.get('tea') msg = con.logout(user) return redirect(url_for('home'))
def add_front(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return render_template('home.html') _user = json.loads(user_db.user_detail(user.user_id)) return render_template('add.html', user=_user)
def profile(username): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return render_template('home.html') else: _user = json.loads(user_db.user_detail(user.user_id)) return render_template('profile.html', user=_user)
def modify(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return render_template('home.html') error_msg = jsontree.jsontree() try: user.username = request.form['username'] user.firstname = request.form['firstname'] user.lastname = request.form['lastname'] user.password = request.form['password'] user.current_password = request.form['current_password'] except Exception, e: error_msg.status = 0 error_msg.message = e return render_template('error.html', msg=error_msg)
def read(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return redirect(url_for('home')) notice_id = request.form['notice_id'] user_db.mark_notification_read(notice_id) return redirect(url_for('notification'))
def define_user_signup(self, request): user = jsontree.jsontree() user.username = request.get_json().get('username', '') user.password = request.get_json().get('password', '') user.firstname = request.get_json().get('firstname', '') user.lastname = request.get_json().get('lastname', '') user.phone = request.get_json().get('phone', '') try: user.api = request.headers['Authorization'] except Exception, e: user.api = None
def signup(): error_msg = jsontree.jsontree() if (request.method == 'GET'): error_msg.status = 0 error_msg.message = "Method Not Allowed !!" return render_template("error.html", msg=error_msg) user = jsontree.jsontree() try: user.username = request.form['username'] user.password = request.form['password'] user.firstname = request.form['firstname'] user.lastname = request.form['lastname'] user.phone = request.form['phone'] except Exception, TypeError: error_msg.status = 0 error_msg.message = "Unexpected buffer data encountered !!" return render_template('error.html', msg=error_msg)
def _initialize_jtmenu(menu, tree): menutree = jsontree.jsontree() for name, title in tree.iteritems(): menuitem = menu[name] if isinstance(title, jsontree.jsontree): subtree = _initialize_jtmenu(menuitem, title) menutree[name] = subtree else: if isinstance(title, list): menuitem.state_titles = title else: menuitem.title = title menutree[name] = menuitem return menutree
def ajax_api_getname(): matching_names = {} user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return matching_names name = request.forms['fellow_username'] matching_names = user_db.matching_names(name) return matching_names
def notification_accept(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return redirect(url_for('home')) user.account_id = request.form['account_id'] user.decision = request.form['decision'] if(user.decision == 'Accept'): user_db.mark_accept(user) return redirect(url_for('notification'))
def s1_state_layer(): try: task = load_jt(fname) except: task = jsontree.jsontree() entrance_url = 'http://www.cvs.com/stores/cvs-pharmacy-locations' html = spider.html(entrance_url, timeout=10) if html: for line in html.split('\n'): if '<a href="/stores/cvs-pharmacy-locations' in line: soup = bs4.BeautifulSoup(line.strip()) href = soup.find('a')['href'] state = href.split(r'/').pop() task[state] = {'url': homepage + href} dump_jt(task, fname)
def get_top_packages(top_n=TOP_N, stored=False): """Identify top packages by download count on pypi. A friendly person has already provided an occasionally updated JSON feed to enable this program to build a list of the top pypi packages by download count. The default does a fresh pull of this feed. If the user wants to use a stored list, that is possible if the user sets the stored flag to true. Args: top_n (int): the number of top packages to retrieve stored (bool): whether to use the stored package list Returns: dict: top packages """ if stored: # Get stored data with open("top_packages_may_2020.json", "r") as f: data = json.load(f) else: # Get json data for top pypi packages from website top_packages_url = ( "https://hugovk.github.io/top-pypi-packages/top-pypi-packages-30-days.json" ) # Catch if internet connectivity causes failure try: with urllib.request.urlopen(top_packages_url) as url: # nosec data = json.loads(url.read().decode()) except urllib.error.URLError as e: print("Internet connection issue. Check connection") print(e) sys.exit(1) # Make JSON data easy to navigate json_data = jsontree.jsontree(data) # Place top_n packages in dict, where key is package # name and value is rank top_packages = {} for i in range(0, top_n): package_info = json_data.rows[i] package_name = package_info["project"] top_packages[package_name] = i + 1 return top_packages
def insert_into_account_table(self, account): account.account_id = str(uuid.uuid1()) account_id = account.account_id user_id1 = account.user_id1 user_id2 = account.user_id2 balance = account.balance is_positive = account.is_positive confirmed_by_user1 = account.confirmed_by_user1 confirmed_by_user2 = account.confirmed_by_user2 date_added = datetime.now() query = """ INSERT INTO account ( accountId, userId1, userId2 , balance, is_positive, confirmed_by_user1, confirmed_by_user2, date_added ) VALUES ( \'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\' ) """ % (account_id, user_id1, user_id2, balance, is_positive, confirmed_by_user1, confirmed_by_user2, date_added) conn = self.connection cursor = conn.cursor() msg = jsontree.jsontree() try: cursor.execute(query) conn.commit() cursor.close() msg.status = 1 msg.message = "Inserted into account." except Exception, e: self.debug_InternalError(e) msg.status = 0 msg.message = e sys.stdout.write()
def matching_names(self, name): error_msg = jsontree.jsontree() query = """ SELECT username FROM users WHERE username LIKE \'%s%%\' """ % (name) cursor = self.con.cursor() try: cursor.execute(query) result = cursor.fetchall() cursor.close() except Exception, e: error_msg.status = 0 error_msg.message = str(e) return json.dumps(error_msg)
def home(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return render_template('home.html') _user = json.loads(user_db.user_detail(user.user_id)) if( _user['status'] == 0): return render_template('error.html', msg=_user) _account = json.loads(user_db.user_account_detail(user.user_id)) if(_account['status'] == 0): return render_template('error.html', msg=_account, user=_user) return render_template('konto.html', user=_user, account=_account)
def extract(fbdata, fbtypemap, name_map={}): """ .. code:: python res = fb.search(q="1234", cols=keys2cols(fbtypemap, namemap)) fbobj = extract(res.events.event, fbtypemap, namemap) res = fb.edit(**fbargs(fbobj, fbtypemap, namemap)) """ import jsontree res = jsontree.jsontree() late_processes = [] for name, conv in fbtypemap.iteritems(): if getattr(conv, 'takes_data', False): late_processes.append((name, conv)) else: _convert(res, fbdata, name, conv, fbtypemap, name_map) for name, conv in late_processes: _convert(res, fbdata, name, conv, fbtypemap, name_map) return res
def add_back(username=None): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return render_template('home.html') user.fellow_username = request.form['fellow_username'] user.amount = request.form['amount'] user.mod = request.form['sign'] c = user_db.create_balance(user) c = json.loads(c) _user = json.loads(user_db.user_detail(user.user_id)) if(c['status'] == 0): return render_template('error.html', msg=c, user=_user) return redirect(url_for('home'))
def create_issue(self, group, form_data, **kwargs): """Create a Fogbugz case""" fb = fborm.FogBugzORM( self.get_option('host_url', group.project), self.get_option('secret_token', group.project) ) bug = jsontree.jsontree() bug.sTitle = form_data['title'].encode('utf8') bug.sEvent = form_data['description'].encode('utf8') bug.sFormat = u'html'.encode('utf8') if self.get_option('area', group.project): bug.sArea = self.get_option('area', group.project) if self.get_option('category', group.project): bug.sCategory = self.get_option('category', group.project) if self.get_option('project', group.project): bug.sProject = self.get_option('project', group.project) ixBug = fb.new(bug, bugtype={}) return ixBug.numerator
def del_user_transaction(self, user): error_msg = jsontree.jsontree() account_id = user.account_id user_id = user.user_id query = """ SELECT userId1 FROM account WHERE accountId = \'%s\' """ % (account_id) con = self.con cursor = con.cursor() try: cursor.execute(query) result = cursor.fetchone() except Exception, e: error_msg.status = 0 error_msg.message = str(e) return json.dumps(error_msg)
def notification(): user = jsontree.jsontree() user.user_id = request.cookies.get('user') user.user_cookie = request.cookies.get('tea') is_logged = con.is_logged(user) if(user.user_id == '' or user.user_cookie == '' or is_logged == 0): return redirect(url_for('home')) notice = user_db.notification(user.user_id) notice = json.loads(notice) _user = user_db.user_detail(user.user_id) _user = json.loads(_user) if( _user['status'] == 0): return render_template('error.html', msg=_user) if(notice['status'] == 0): return render_template('error.html', msg=notice) return render_template('notification.html', notice=notice, user=_user)
def s3_store_layer(): task = load_jt(fname) for state in task: for city in task[state]: if city != 'url': # 不要被url干扰了 if len(task[state][city].keys()) == 1: # 只有url一个key 说明没爬过 url = task[state][city]['url'] print state, city, url html = spider.html(url, timeout=10) if html: soup = bs4.BeautifulSoup(html) c = itertools.cycle(xrange(4)) # 循环器 for tr in soup.findAll( 'tr')[1:]: # 除了第一个tr不是,其他都是, 每次输出一个药店的记录 for td in tr.findAll('td'): # 每次输出一个tag flag = c.next() if flag == 0: storeID = td.text.strip() task[state][city].setdefault( storeID, jsontree.jsontree()) elif flag == 1: task[state][city][storeID].setdefault( 'addr', td.text.strip()) elif flag == 2: task[state][city][storeID].setdefault( 'tel', td.text.strip()) else: task[state][city][storeID].setdefault( 'url', homepage + td.a['href']) dump_jt(task, fname)
def __init__(self, data): self.d = jsontree.jsontree(data)
def step1_taskplan(): '''设定函数内常量''' spider = Crawler() TP = Taskplanner() base_url = 'http://www.walgreens.com/' entrance_url = 'http://www.walgreens.com/storelocator/find.jsp?tab=store%20locator&requestType=locator' '''爬每个州url''' # 州,城市,商店三块是要一段一段注释掉运行的 # TP.todo.setdefault(entrance_url, TP.dict_to_json({'data': None} ) ) # 给下一步预设空间的行为,发生在当前页面爬完的情况下 # with open('storelocator_sorted_by_state.txt', 'rb') as f: # html = f.read() # # html = spider.html(entrance_url) # 开始爬 # # if html: # soup = bs4.BeautifulSoup(html) # for a in soup.findAll('a', href = re.compile(r'/storelistings/storesbycity.jsp\?requestType=locator&state=\D*')): # TP.todo[entrance_url].setdefault( base_url + a['href'], # TP.dict_to_json({'data': a.text.strip()} ) ) '''爬每个城市url''' # 州,城市,商店三块是要一段一段注释掉运行的 # for state_url in TP.ignore_iter(TP.todo[entrance_url], ['data']): # html = spider.html(state_url) # # if html: # soup = bs4.BeautifulSoup(html) # for a in soup.findAll('a', href = re.compile(r'/storelistings/storesincity.jsp\?requestType=locator&state=[.]*')): # TP.todo[entrance_url][state_url].setdefault( base_url + a['href'], # TP.dict_to_json({'data': a.text.strip()} ) ) # print base_url + a['href'], '---', a.text.strip() '''爬每个商店url''' # 州,城市,商店三块是要一段一段注释掉运行的 # TP._load_todo('task-walgreens.txt') # for state_url in TP.ignore_iter(TP.todo[entrance_url], ['data']): # for city_url in TP.ignore_iter(TP.todo[entrance_url][state_url], ['data']): # if len(TP.todo[entrance_url][state_url][city_url]) == 1: # 只有data的时候,才爬 # html = spider.html(city_url) # if html: # soup = bs4.BeautifulSoup(html) # for p in soup.findAll('p', attrs = {'class': 'float-left wid300 nopad'}): # TP.todo[entrance_url][state_url][city_url].setdefault( base_url + p.a['href'], # TP.dict_to_json({ } ) ) '''爬每个商店里的信息''' try: walgreens = load_jt('walgreens.txt') except: walgreens = jsontree.jsontree() pStoreID = r'(?<=id=)\d+' TP._load_todo('task-walgreens.txt') for state_url in TP.ignore_iter(TP.todo[entrance_url], ['data']): for city_url in TP.ignore_iter(TP.todo[entrance_url][state_url], ['data']): for store_url in TP.ignore_iter( TP.todo[entrance_url][state_url][city_url], ['data']): ID = re.findall(pStoreID, store_url)[0] if ID not in walgreens: # 只要没有爬过 html = spider.html(store_url) if html: soup = bs4.BeautifulSoup(html) street = 'None' # extract street for p in soup.findAll( 'p', attrs={ 'class': 'mrgRt10px padTop2px padBtm2px nopad', 'itemprop': 'streetAddress' }): street = p.text.strip() city = 'None' # extract city for span in soup.findAll( 'span', attrs={'itemprop': 'addressLocality'}): city = span.text.strip() state = 'None' # extract state for span in soup.findAll( 'span', attrs={'itemprop': 'addressRegion'}): state = span.text.strip() zipcode = 'None' # extract zipcode for span in soup.findAll( 'span', attrs={'itemprop': 'postalCode'}): zipcode = span.text.strip() phone = 'None' # extract telephone for p in soup.findAll('p', attrs={ 'class': 'nopad', 'itemprop': 'telephone' }): phone = p.text.strip() ## ====================== OFFICE HOURS ====================== hours = list() # extract office hours for p in soup.findAll('p', attrs={ 'class': 'nopad wid100 float-left' }): hours.append(p.text.strip()) ## exam hours subcategory hours_category = list() for h3 in soup.findAll( 'h3'): # exam if there's store hours for strong in h3.findAll('strong'): if ('Shop' in strong.text) or ( 'Photo' in strong.text) or ('Store pickup' in strong.text): hours_category.append('store') if len( soup.findAll( 'div', attrs={ 'class': 'padBtm5px float-left' })): # exam if there's pharmacy hours hours_category.append('pharmacy') if len( soup.findAll( 'div', attrs={'class': 'mrgTop25px padBtm5px' })): # exam if there's clinic hours hours_category.append('clinic') ## process map hours to it's subcategory hours_detail, hours = {}, deque(hours) if 'store' in hours_category: hours_detail['store'] = list() hours_detail['store'].append(hours.popleft()) hours_detail['store'].append(hours.popleft()) hours_detail['store'].append(hours.popleft()) if 'pharmacy' in hours_category: hours_detail['pharmacy'] = list() hours_detail['pharmacy'].append(hours.popleft()) hours_detail['pharmacy'].append(hours.popleft()) hours_detail['pharmacy'].append(hours.popleft()) if 'clinic' in hours_category: hours_detail['clinic'] = list(hours) ## ====================== SERVICE =========================== services = dict() # extract service for div in soup.findAll( 'div', attrs={'class': 'padTop5px wid220 float-left'}): for li in div.findAll('li'): services.setdefault('shop', list()) services['shop'].append(li.text.strip()) for div in soup.findAll( 'div', attrs={'class': 'wid220 float-left'}): for li in div.findAll('li'): services.setdefault('pharmacy', list()) services['pharmacy'].append(li.text.strip()) for div in soup.findAll('div', attrs={ 'class': 'mrgTop10px mrgBtm20px' }): for a in div.findAll( 'a', href=re.compile( r'http://photo.walgreens.com/walgreens/storepage/[\s\S]*' )): services.setdefault('photo', list()) services['photo'].append(a.text.strip()) if validate(street, city, state, zipcode, phone, hours_detail, services, store_url): # 说明成功了 walgreens.setdefault( ID, { 'street': street, 'city': city, 'state': state, 'zipcode': zipcode, 'phone': phone, 'hours': hours, 'services': services }) print city, state, ID dump_jt(walgreens, 'walgreens.txt')
def __init__(self): self.todo = jsontree.jsontree() self.finished = jsontree.jsontree()
import os import sys import jsontree import halfcaff.util DEFAULTS = jsontree.jsontree( vpncli='/opt/cisco/anyconnect/bin/vpn', monitor_interval=int(60 * 2.5), # 2.5min monitor_timemachine=True, monitor_vpn=True, auto_caffeinate_vpn=False, auto_caffeinate_timemachine=False) def load_options(app): options = jsontree.clone(DEFAULTS) try: with app.open('options.json') as optfile: options.update(jsontree.load(optfile)) except: #print sys.exc_info() app.options = options save_options(app) if options.monitor_interval != DEFAULTS.monitor_interval: timer = halfcaff.util.get_timer(app.monitor) if timer: timer.interval = options.monitor_interval app.options = options if 'auto_caffeinate' in options: # old version options['auto_caffeinate_vpn'] = options['auto_caffeinate']
import sys import jsontree docker_image = sys.argv[1] tag = sys.argv[2] with open('describe_taskdefinition.json', 'r') as myfile: data2 = myfile.read().replace('\n', '') je = jsontree.JSONTreeDecoder().decode(data2) je = je.taskDefinition od = jsontree.jsontree() od.containerDefinitions = je.containerDefinitions od.containerDefinitions[0].image = docker_image + ':' + tag print od.containerDefinitions[0].image od.family = je.family od.volumes = je.volumes if isinstance(je.networkMode, basestring): od.networkMode = je.networkMode if isinstance(je.taskRoleArn, basestring): od.taskRoleArn = je.taskRoleArn taskDefinition = je["taskDefinitionArn"][je["taskDefinitionArn"].find("/") + 1:je["taskDefinitionArn"].rfind(":"):] fileName = "describe_taskdefinition_" + taskDefinition + ".json" f = open(fileName, "w") f.write(jsontree.JSONTreeEncoder().encode(od))
def states(self): url = self.url + "/states" return [jsontree.jsontree(s) for s in call_get(url, self.headers)]
import halfcaff.util import halfcaff.login import halfcaff.options import halfcaff.power ### States: ### VPN Caff Status Button Icon Icon Desc ### 0 0 Not Connected NoVPN (greyed out) Disabled Cup no steam ### 0 1 ILLEGAL -- Error Upside down cup? ### 1 0 Not Caffeinated On Off Cup no steam ### 1 1 Caffeinated Off On Cup steam _MENU_TITLES = jsontree.jsontree( Control=["Caffeinate", "Decaffeinate", "No VPN or TimeMachine (disabled)"], Options=jsontree.jsontree(AutoVPN="Auto Caffeinate on VPN Connection", AutoTM="Auto Caffeinate on Time Machine Backup", Login="******"), About="About", Quit="Quit") def _initialize_jtmenu(menu, tree): menutree = jsontree.jsontree() for name, title in tree.iteritems(): menuitem = menu[name] if isinstance(title, jsontree.jsontree): subtree = _initialize_jtmenu(menuitem, title) menutree[name] = subtree else: if isinstance(title, list): menuitem.state_titles = title
def get_state(self, entity_id): if not entity_id: raise ValueError("Entity should not be blank") url = self.url + "/states/{}".format(entity_id) return jsontree.jsontree(call_get(url, self.headers))