def search(self, keyword, start=1, count=10): # construct search URL keyword.replace(" ", "%20") url = self.SEARCH_BASE + keyword + "&start=" + str(start) + "&count=" + str(count) # get http response response = get_response(url) # convert to json object json_res = json.loads(response) if "items" not in json_res: return [] items = json_res["items"] result = [] for item in items: # filter out the freelencer profile posts if item['title'].find("Hire") == -1: i = Item(item["link"], item["title"]) result.append(i) # return at least 10 search results while len(result) < 10: start += count url = url = self.SEARCH_BASE + keyword + "&start=" + str(start) + "&count=" + str(count) response = get_response(url) json_res = json.loads(response) items = json_res["items"] for item in items: # filter out the freelencer profile posts if item['title'].find("Hire") == -1: i = Item(item["link"], item["title"]) result.append(i) return result
def search(self, keyword, start=1, count=10): # construct search URL keyword.replace(" ", "%20") url = self.SEARCH_BASE + keyword + "&start=" + str( start) + "&count=" + str(count) # get http response response = get_response(url) # convert to json object json_res = json.loads(response) if "items" not in json_res: return [] items = json_res["items"] result = [] for item in items: if item['title'].find("Hire") == -1: i = Item(item["link"], item["title"]) result.append(i) while len(result) < 10: start += count url = url = self.SEARCH_BASE + keyword + "&start=" + str( start) + "&count=" + str(count) response = get_response(url) json_res = json.loads(response) items = json_res["items"] for item in items: if item['title'].find("Hire") == -1: i = Item(item["link"], item["title"]) result.append(i) return result
def proxy(path): # ensure authorization header is present api_key = authorization_header_exists(request.headers) # retrieve usage plan from api key usage_plan = get_usage_plan(REDIS_CLIENT, USAGE_PLANS.keys(), api_key) # apply usage plan quota and throttling limits max_calls, period_in_seconds, throttling_rate = get_usage_plan_info( USAGE_PLANS, usage_plan) quota_per_seconds(REDIS_CLIENT, api_key, max_calls, period_in_seconds) rate_per_second(REDIS_CLIENT, api_key, throttling_rate) if request.method == "GET": redirect_url = get_path(ALB, path) resp = requests.get(redirect_url) response = get_response(resp) return response elif request.method == "POST": redirect_url = get_path(ALB, path) resp = requests.post(redirect_url, json=request.get_json()) response = get_response(resp) return response elif request.method == "DELETE": redirect_url = get_path(ALB, path) resp = requests.delete(redirect_url).content response = get_response(resp) return response
def cache_table(): req_dfs = pd.read_html(REPORT_URL, flavor='lxml') if not req_dfs: raise Exception("Could not find data table in webpage") df = req_dfs[0] try: dt = dateutil.parser.parse(df.iloc[0].Date, dayfirst=True) except Exception as e: dt = dateutil.parser.parse(df.iloc[0].Datum, dayfirst=True) pass logger.info("records cases:\n", df) df.to_csv( f"{CACHE_FOLDER}/{dt.isoformat()}.csv", index=False ) with open( os.path.join(CACHE_FOLDER, f"{dt.isoformat()}.html"), 'wb' ) as f: f.write(get_response(REPORT_URL).content)
def getRegionChildren(state,county,city,childtype): search_params = { "city": city, "state": state, "childtype": childtype, 'county': county, "zws_id": Zillow_API_key } region_tags = (('id'), ('name'), ('zindex'), ('latitude'), ('longitude')) region_cols = ['id', 'name','zindex','latitude', 'longitude'] ##Get starting home data## r = utils.get_response(api = 'regionChildren', params = search_params) home = utils.parse_response(response = r, api = 'regionChildren', tags = region_tags, cols = region_cols) #print(home) return home
def get_org_projects_info(org_link): """Get organisation's projects information :param org_link: Valid link to organisation's info page of a specific year :type org_link: str :return: A list of dictionaries of each project's title, description and link :rtype: list """ response = get_response(org_link) if response.ok: soup = BeautifulSoup(response.text, 'html.parser') projects_li = soup.find_all('li', attrs={'layout': True}) project_info = [] for proj_html in projects_li: proj_info = {} proj_title = proj_html.get('aria-label').replace('\n', '') proj_desc = proj_html.find('div', attrs={ 'class': 'archive-project-card__content' }).text.replace('\t', '') proj_relative_link = proj_html.select('a')[0].get('href') proj_full_link = HOME_PAGE + proj_relative_link proj_info['title'] = proj_title proj_info['description'] = proj_desc proj_info['link'] = proj_full_link project_info.append(proj_info) return project_info
def cache_content(url, dt, name): req = get_response(url) with open( os.path.join(CACHE_FOLDER, f"{dt}__{name}"), 'wb' ) as f: f.write(req.content)
def _extract_total(self): """ COUNTRY_REPORT_URL """ # # Totaal aantal testen positief in Nederland: 7431 # re_total = re.compile(r"Totaal aantal testen positief in Nederland: (\d+)") # total = re_total.findall(self.req.content.decode("utf-8")) # if not total: # raise Exception("Could not find total cases") req = get_response(COUNTRY_REPORT_URL) req.content.decode("utf-8") # Het totaal aantal gemelde patiënten: 6412 (+852) re_total_cases = re.compile( r"Het totaal aantal gemelde patiënten: (\d+)") re_total_deaths = re.compile( r"Het totaal aantal gemelde overleden patiënten: (\d+)") re_total_hospitalized = re.compile( r"Het totaal aantal gemelde patiënten opgenomen \(geweest\) in het ziekenhuis: (\d+)" ) total_cases = re_total_cases.findall(req.content.decode("utf-8"))[0] total_deaths = re_total_deaths.findall(req.content.decode("utf-8"))[0] total_hospitalized = re_total_hospitalized.findall( req.content.decode("utf-8"))[0] # ["Gemeente", "Aantal", "BevAant", "Aantal per 100.000 inwoners"] return { "Aantal": int(total_cases), "deaths": total_deaths, "hospitalized": total_hospitalized }
def download_and_xlsx(xlsx_url): with open( os.path.join(CACHE_FOLDER, f"full_data.xlsx"), 'wb' ) as f: f.write(get_response(xlsx_url).content) df = pd.read_excel(xlsx_url) cols = { "Date": "datetime", "Tested (all)": "tests", "Positive (all)": "tests_positive", "All hospitalized on certain day": "hospitalized", "All persons in intensive care on certain day": "intensive_care", "Deaths (all)": "deaths" } df = df[list(cols.keys())] df.rename(columns=cols, inplace=True) df["cases"] = df.tests_positive df["country"] = "SI" df = df[[i for i in _COLUMNS_ORDER if i in df.columns]] df["datetime"] = df.datetime.apply( lambda x: x.isoformat() ) df.sort_values(by=["datetime", "cases"], inplace=True) full_csv = os.path.join("dataset", "covid-19-si.csv") df.to_csv( full_csv, index=False )
def get_market_rankings(): "Gets the market rankings data and does basic cleaning" # get the data url = "https://web3api.io/api/v2/market/rankings" querystring = { "sortType": "tradeVolume", "size": config.SIZE } payload = utils.get_response(url, headers, querystring)["data"] # save the results in a dataframe df = pd.DataFrame(payload).drop([ "icon", "maxSupply", "totalSupply", "tokenVelocity", "transactionVolume", "uniqueAddresses", "specifications", "address", "decimals", "circulatingSupply", "rank" ], axis=1) # make sense of the blockchain column df["blockchain"] = df.blockchain.map(lambda x: x["name"]) # non-numeric columns non_num = ["name", "symbol", "blockchain"] # changing numeric columns to float64 df = pd.concat([df[non_num], df.drop(non_num, axis=1).apply(pd.to_numeric, axis=1)], axis=1) return df
def getSearchKeywordsRecommendation(keywords): if not keywords: print "enter a keyword" response = get_response("%(url)s&keywords=%(kw)s" % { "url" : url("getSearchKeywordsRecommendation"), "kw" : keywords, }) return response
def test_modify_gtt(kiteconnect): """Test modify gtt order.""" responses.add( responses.PUT, "{0}{1}".format( kiteconnect.root, kiteconnect._routes["gtt.modify"].format(trigger_id=123)), body=utils.get_response("gtt.modify"), content_type="application/json") gtts = kiteconnect.modify_gtt(trigger_id=123, trigger_type=kiteconnect.GTT_TYPE_SINGLE, tradingsymbol="INFY", exchange="NSE", trigger_values=[1], last_price=800, orders=[{ "transaction_type": kiteconnect.TRANSACTION_TYPE_BUY, "quantity": 1, "order_type": kiteconnect.ORDER_TYPE_LIMIT, "product": kiteconnect.PRODUCT_CNC, "price": 1, }]) assert gtts["trigger_id"] == 123
def get_news_content(links): content = [] for link in links: news_html = get_response(link, headers={'User-Agent': ua.random}).text soup = BeautifulSoup(news_html, 'html.parser') title = soup.find('div', attrs={ "class": "article__header" }).find('div', attrs={ "class": "article__header__title" }).text.strip() article = soup.find('div', class_='article__text') body = get_body(article) path = urlparse(link).path.split('/') date = list(filter(lambda e: e.isdigit(), path)) or '' content.append({ "title": title, 'link': link.split('?', 1)[0], "date": '.'.join(date), 'body': body, }) time.sleep(1) return content
def get_org_projects_info(org_link): """Get organisation's projects information :param org_link: Valid link to the organisation's info page of a specific year :type org_link: str :returns: A list of dictionaries of each project's title, descrition and link of project :rtype: list """ response = get_response(org_link) if response.ok: soup = BeautifulSoup(response.text, 'html.parser') projects_li = soup.find_all('li', 'mdl-list__item mdl-list__item--two-line') project_info = [] for proj_html in projects_li: proj_info = {} proj_title = proj_html.select('a')[0].text.replace('\n', '') proj_desc = proj_html.select('span')[1].text.replace('\n', '') proj_relative_link = proj_html.select('a')[0].get('href') proj_full_link = HOME_PAGE + proj_relative_link proj_info['title'] = proj_title proj_info['description'] = proj_desc proj_info['link'] = proj_full_link project_info.append(proj_info) return project_info else: print('Something Went Wrong') print(f'Status Code: {response.status_code}') sys.exit(1)
def add_post(): if not (request.json and 'text' in request.json): utils.invalid_request() text = request.json["text"] result = execute_create_query( f'INSERT INTO `post`(text) VALUES ("{text}");') return utils.get_response(result)
def schedule(): myEmail = utils.get_email_from_token(request.args.get('token')) params = {'where':json.dumps({'member_email': myEmail})} schedule = ParseDriver.make_parse_get_request('/1/classes/Commitments', params)['results'] if len(schedule) == 0: return 'no schedule found', 404 schedule = schedule[0] return utils.get_response(schedule['commitments'])
def get_member_points(): email = request.args.get('email') attendance = get_attendance(email) eids = [x['event_id'] for x in attendance] events = get_events_by_id(eids) points = sum([x['points'] for x in events if 'points' in x.keys()]) p = {'points':points, 'attendance':events} return utils.get_response(p)
def cache_content(url, save_as): save_as_folder = "/".join(save_as.split("/")[:-1]) if not os.path.exists(save_as_folder): os.makedirs(save_as_folder) req = get_response(url) with open(save_as, 'wb') as f: f.write(req.content)
def download_and_xlsx(xlsx_url): with open( os.path.join(CACHE_FOLDER, f"full_data.xlsx"), 'wb' ) as f: f.write(get_response(xlsx_url).content) df = pd.read_excel(xlsx_url, engine="openpyxl") cols_en = { "Date": "datetime", "Tested (all)": "tests", "Positive (all)": "tests_positive", "All hospitalized on certain day": "hospitalized", "All persons in intensive care on certain day": "intensive_care", "Deaths (all)": "deaths" } cols_si = { "Dátum": "datetime", "Mintavételek száma (összesen)": "tests", "pozitív esetek száma (összesen)": "tests_positive", "hospitalizált": "hospitalized", "intenzív ellátásra szoruló": "intensive_care", "elhunytak száma összesen": "deaths" } try: df = df[list(cols_en.keys())] logger.info("Using english headers in excel data") cols = cols_en except KeyError: df = df[list(cols_si.keys())] logger.info("Using SI headers in excel data") cols = cols_si except Exception as ee: raise Exception(ee) df.rename(columns=cols, inplace=True) df["cases"] = df.tests_positive df["country"] = "SI" df = df[[i for i in _COLUMNS_ORDER if i in df.columns]] df = df.loc[~df.cases.isna()] try: df["datetime"] = pd.to_datetime(df.datetime) except: pass df["datetime"] = df.datetime.apply( lambda x: x.isoformat() ) df.sort_values(by=["datetime", "cases"], inplace=True) full_csv = os.path.join("dataset", "covid-19-si.csv") df.to_csv( full_csv, index=False )
def test_mf_instruments(kiteconnect): """Test mf instruments fetch.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["mf.instruments"]), body=utils.get_response("mf.instruments"), content_type="text/csv") trades = kiteconnect.mf_instruments() assert type(trades) == list
def test_order_history(kiteconnect): """Test mf orders get.""" url = kiteconnect._routes["order.info"].format(order_id="abc123") responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, url), body=utils.get_response("order.info"), content_type="application/json") trades = kiteconnect.order_history(order_id="abc123") assert type(trades) == list
def test_profile(kiteconnect): """Test profile.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["user.profile"]), body=utils.get_response("user.profile"), content_type="application/json") profile = kiteconnect.profile() assert type(profile) == dict
def test_holdings(kiteconnect): """Test holdings.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["portfolio.holdings"]), body=utils.get_response("portfolio.holdings"), content_type="application/json") holdings = kiteconnect.holdings() assert type(holdings) == list
def test_mf_individual_sip(kiteconnect): """Test mf sips get.""" url = kiteconnect._routes["mf.sip.info"].format(sip_id="abc123") responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, url), body=utils.get_response("mf.sip.info"), content_type="application/json") trades = kiteconnect.mf_sips(sip_id="abc123") assert type(trades) == dict
def test_mf_holdings(kiteconnect): """Test mf holdings.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["mf.holdings"]), body=utils.get_response("mf.holdings"), content_type="application/json") trades = kiteconnect.mf_holdings() assert type(trades) == list
def test_get_gtts(kiteconnect): """Test all gtts fetch.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["gtt"]), body=utils.get_response("gtt"), content_type="application/json") gtts = kiteconnect.get_gtts() assert type(gtts) == list
def test_orders(kiteconnect): """Test orders.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["orders"]), body=utils.get_response("orders"), content_type="application/json") orders = kiteconnect.orders() assert type(orders) == list
def recommend_by_customer_id(): """ Load information from db and call worker to recommend portfolio for the customer. @args: customer_id (str): customer id @returns: recommended portfolio """ customer_id = request.args.get('customer_id') if customer_id: headers = { 'Content-type': 'application/json', } # Prepare data for 3rd party API (Load data from sqlite db). logger.info('Start to prepare data') time_start = time.time() data = get_data_for_recommend(customer_id, logger) spent_time = time.time() - time_start logger.info('End preparing data, spend %s secs' % str(spent_time)) try: # ------ Call 3rd party API. ----- # r = requests.post(WORKER_URL + '/portfolio/recommend', headers=headers, data=json.dumps(data)) res = r.json() # -------------------------------- # # Make response according to response from 3rd party API. if res['status_code'] == '1313': return get_response('[Success] Finish recommend process.', '200') else: return get_response('[Error] MLaaS internal error.', '500') except requests.exceptions.RequestException as e: logger.error(e) return get_response('[Error] MLaaS internal error.', '500') else: return get_response('[Error] Customer ID not specified.', '401')
def setUp(self): self.response = get_response('test/data/commune_2009_account.html', encoding='windows-1252') self.data = { 'home_tax_basis': 137402 * 1e3, 'home_tax_rate': 0.2099, 'home_tax_value': 28841 * 1e3, 'home_tax_cuts_on_deliberation': 30475 * 1e3, 'business_tax_value': 0, 'business_tax_rate': 0, }
def retrieve_data_gen(username, password, template, query_args=None, single_request=False): auth = get_auth(username, password) #query_args = get_query_args(query_args) per_page = 100 page = 0 while True: page = page + 1 request = construct_request(per_page, page, query_args, template, auth) # noqa r, errors = get_response(request, auth, template) status_code = int(r.getcode()) retries = 0 while retries < 3 and status_code == 502: print('API request returned HTTP 502: Bad Gateway. Retrying in 5 seconds') retries += 1 time.sleep(5) request = construct_request(per_page, page, query_args, template, auth) # noqa r, errors = get_response(request, auth, template) status_code = int(r.getcode()) if status_code != 200: template = 'API request returned HTTP {0}: {1}' errors.append(template.format(status_code, r.reason)) log_error(errors) response = json.loads(r.read().decode('utf-8')) if len(errors) == 0: if type(response) == list: for resp in response: yield resp if len(response) < per_page: break elif type(response) == dict and single_request: yield response if len(errors) > 0: log_error(errors) if single_request: break
def test_margins(kiteconnect): """Test margins.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["user.margins"]), body=utils.get_response("user.margins"), content_type="application/json") margins = kiteconnect.margins() assert type(margins) == dict assert kiteconnect.MARGIN_EQUITY in margins assert kiteconnect.MARGIN_COMMODITY in margins
def test_positions(kiteconnect): """Test positions.""" responses.add(responses.GET, "{0}{1}".format(kiteconnect.root, kiteconnect._routes["portfolio.positions"]), body=utils.get_response("portfolio.positions"), content_type="application/json") positions = kiteconnect.positions() assert type(positions) == dict assert "day" in positions assert "net" in positions
def test_instruments_exchangewise(kiteconnect): """Test mf instruments fetch.""" responses.add(responses.GET, "{0}{1}".format( kiteconnect.root, kiteconnect._routes["market.instruments"].format( exchange=kiteconnect.EXCHANGE_NSE)), body=utils.get_response("market.instruments"), content_type="text/csv") trades = kiteconnect.instruments(exchange=kiteconnect.EXCHANGE_NSE) assert type(trades) == list
def test_margins_segmentwise(kiteconnect): """Test margins for individual segments.""" responses.add(responses.GET, "{0}{1}".format( kiteconnect.root, kiteconnect._routes["user.margins.segment"].format( segment=kiteconnect.MARGIN_COMMODITY)), body=utils.get_response("user.margins.segment"), content_type="application/json") commodity = kiteconnect.margins(segment=kiteconnect.MARGIN_COMMODITY) assert type(commodity) == dict
def setUp(self): self.response = get_response('test/data/department_2012_account.html', encoding='windows-1252') self.data = { 'property_tax_basis': 445315000, 'property_tax_value': 141253000, 'property_tax_rate': 0.3172, 'business_profit_contribution_basis': 0, 'business_profit_contribution_value': 40288000, 'business_network_tax_value': 974000, }
def setUp(self): self.response = get_response('test/data/epci_2010_account.html', encoding='windows-1252') self.data = { 'name': 'GFP : CC MONTAGNE BOURBONNAISE', 'population': 6843, 'operating_revenues': 606000, 'compensation_2010_value': 26000, 'business_property_contribution_additionnal_value': 8000, 'business_property_contribution_uniq_value': 0, 'business_property_contribution_eolien_value': 0, }
def setUp(self): self.response = get_response('test/data/epci_2013_account.html', encoding='windows-1252') self.data = { 'name': 'GFP : CC MONTAGNE BOURBONNAISE', 'population': 6878, 'operating_revenues': 715000, 'additionnal_land_property_tax_value': 0, 'business_property_contribution_additionnal_value': 11000, 'business_property_contribution_uniq_value': 0, 'business_property_contribution_eolien_value': 0, 'business_profit_contribution_value': 4000, }
def setUp(self): self.response = get_response('test/data/region_2012_account.html', encoding='windows-1252') self.data = { 'name': 'REGION BASSE-NORMANDIE', 'population': 1470880, 'operating_revenues': 572356000, 'tipp': 113678000, 'business_profit_contribution_value': 64681000, 'business_profit_contribution_cuts_on_deliberation': 288000, 'business_network_tax_value': 13299000, }
def setUp(self): self.response = get_response('test/data/commune_leognan_2011_account.html', encoding='windows-1252') self.data = { 'home_tax_basis': 10802 * 1e3, 'home_tax_rate': 0.1976, 'home_tax_value': 2134 * 1e3, 'home_tax_cuts_on_deliberation': 3070 * 1e3, 'property_tax_value': 1539000, 'property_tax_rate': 0.1753, 'land_property_tax_value': 273000., 'land_property_tax_rate': 1.2127, }
def recent_golinks(): email = utils.get_email_from_token(request.args.get('token')) page = int(request.args.get('page', '0')) params = {} params['order'] = '-createdAt' params['skip']=page*100 r = ParseDriver.make_parse_get_request('/1/classes/ParseGoLink', params) results = r['results'] for x in results: if 'num_clicks' not in x.keys(): x['num_clicks']=0 return utils.get_response(results)
def setUp(self): self.response = get_response('test/data/region_2014_account.html', encoding='windows-1252') self.data = { 'name': 'REGION BASSE-NORMANDIE', 'population': 1475684, 'local_tax': 81584000, 'operating_revenues': 597792000, 'tipp': 113935000, 'business_profit_contribution_value': 68004000, 'business_profit_contribution_cuts_on_deliberation': 91000, 'business_network_tax_value': 13609000, }
def setUp(self): self.response = get_response('test/data/region_2013_account.html', encoding='windows-1252') self.data = { 'name': 'REGION BASSE-NORMANDIE', 'population': 1473494, 'local_tax': 80964000, 'operating_revenues': 572776000, 'tipp': 114518000, 'business_profit_contribution_value': 66810000, 'business_profit_contribution_cuts_on_deliberation': 140000, 'business_network_tax_value': 13616000, }
def setUp(self): self.response = get_response('test/data/region_2015_account.html', encoding='windows-1252') self.data = { 'name': 'REGION BASSE-NORMANDIE', 'population': 1477209, 'local_tax': 82652000, 'operating_revenues': 571984000, 'tipp': 117483000, 'business_profit_contribution_value': 69076000, 'business_profit_contribution_cuts_on_deliberation': 94000, 'business_network_tax_value': 13611000, }
def popular_golinks(): email = utils.get_email_from_token(request.args.get('token')) page = int(request.args.get('page', '0')) params = {} params['order'] = '-num_clicks' params['skip']=page*100 params['where']= json.dumps({'member_email': email}) r = ParseDriver.make_parse_get_request('/1/classes/ParseGoLink', params) results = r['results'] for x in results: if 'num_clicks' not in x.keys(): x['num_clicks']=0 return utils.get_response(results)
def setUp(self): self.response = get_response('test/data/department_2010_account.html', encoding='windows-1252') self.data = { 'population': 537820, 'operating_revenues': 504060000, 'operating_real_revenues': 498856000, 'local_tax': 213518000, 'refund_tax': 0, 'other_tax': 113116000, 'advertisement_tax': 30331000, 'tipp': 45951000, 'allocation_and_stake': 160322000, 'compensation_2010_value': 79465000 }
def setUp(self): self.response = get_response('test/data/region_2009_account.html', encoding='windows-1252') self.data = { 'tipp': 97982000, 'operating_costs': 445046000, 'property_tax_basis': 1201584000, 'property_tax_cuts_on_deliberation': 42000, 'property_tax_value': 63566000, 'property_tax_rate': 0.0529, 'business_tax_basis': 2777345000, 'business_tax_cuts_on_deliberation': 40309000, 'business_tax_value': 88318000, 'business_tax_rate': 0.0318, }
def attendance(): requesterEmail = utils.get_email_from_token(request.args.get('token')) me = scripts.load_pickle_key('member_email_hash')[requesterEmail] emails = scripts.load_pickle_key('committee_members_hash')[me['committee']] emails = [x['email'] for x in emails] params = {'limit':sys.maxint, 'where': json.dumps({'member_email': {'$in':emails}})} event_members = ParseDriver.make_parse_get_request('/1/classes/ParseEventMember', params)['results'] # return a dictionary with keys emails, values list of attended events h = {} seen = [] for em in event_members: if em['member_email'] not in seen: seen.append(em['member_email']) h[em['member_email']] = [] h[em['member_email']].append({'event_id': em['event_id'], 'type': em['type']}) return utils.get_response(h)
def setUp(self): self.response = get_response('test/data/department_2009_account.html', encoding='windows-1252') self.data = { 'population': 537061, 'operating_revenues': 465068000, 'operating_real_revenues': 459748000, 'local_tax': 193093000, 'refund_tax': 0, 'other_tax': 99257000, 'tipp': 39185000, 'allocation_and_stake': 158439000, 'allocation': 110390000, 'realignment': 15679000, 'operating_costs': 463765000, 'operating_real_costs': 428409000, 'staff_costs': 86827000, 'purchases_and_external_costs': 57954000, 'subsidies_and_contingents': 272400000, 'mandatory_contributions_and_stakes': 54939000, 'subsidies': 16009000, 'individual_aids': 113380000, 'pch': 7565000, 'apa': 45375000, 'rsa': 28671000, 'accomodation_costs': 79145000, 'financial_costs': 10238000, 'net_profit': 1303000, 'self_financing_capacity': 31339000, 'debt_at_end_year': 294726000, 'debt_annual_costs': 26249000, 'home_tax_value': 52485000, 'home_tax_basis': 457175000, 'home_tax_rate': 0.1148, 'home_tax_cuts_on_deliberation': 0, 'property_tax_value': 62591000, 'property_tax_basis': 403301000, 'property_tax_rate': 0.1552, 'property_tax_cuts_on_deliberation': 33000, 'land_property_tax_value': 596000, 'land_property_tax_basis': 1775000, 'land_property_tax_rate': 0.3363, 'land_property_tax_cuts_on_deliberation': 0, 'business_tax_value': 75344000, 'business_tax_basis': 839954000, 'business_tax_rate': 0.0897, 'business_tax_cuts_on_deliberation': 3937000, }
def setUp(self): self.response = get_response('test/data/commune_2000_account.html', encoding='windows-1252') self.data = { 'population': 116559, 'name': 'ORLEANS', 'operating_revenues': 154756 * 1e3, 'local_tax': 72981 * 1e3, 'other_tax': 4549 * 1e3, 'allocation': 30959 * 1e3, 'operating_costs': 125548 * 1e3, 'staff_costs': 58592 * 1e3, 'purchases_and_external_costs': 27790 * 1e3, 'financial_costs': 4756 * 1e3, 'contingents': 1839 * 1e3, 'paid_subsidies': 23568 * 1e3, 'net_profit': 29208 * 1e3, 'home_tax_value': 19394 * 1e3, 'home_tax_rate': 0.1756, 'property_tax_value': 25575 * 1e3, 'property_tax_rate': 0.2440, 'land_property_tax_value': 66 * 1e3, 'land_property_tax_rate': 0.3313, 'business_tax_value': 26711 * 1e3, 'business_tax_rate': 0.1703, 'investment_ressources': 118468 * 1e3, 'loans': 30969 * 1e3, 'received_subsidies': 7837 * 1e3, 'fctva': 3014 * 1e3, 'returned_properties': 0, 'investments_usage': 125254 * 1e3, 'facilities_expenses': 50482 * 1e3, 'debt_repayments': 25686 * 1e3, 'costs_to_allocate': 2073 * 1e3, 'fixed_assets': 30466 * 1e3, 'residual_financing_capacity': 6786 * 1e3, 'thirdparty_balance': 1000, 'financing_capacity': 6787 * 1e3, 'global_profit': 22421 * 1e3, 'surplus': 24048 * 1e3, 'self_financing_capacity': 33096 * 1e3, 'debt_repayment_capacity': 7410 * 1e3, 'debt_at_end_year': 96199 * 1e3, 'debt_annual_costs': 29564 * 1e3, 'advances_from_treasury': 0, 'working_capital': 10927 * 1e3, }
def setUp(self): self.response = get_response('test/data/department_2013_account.html', encoding='windows-1252') self.data = { 'operating_revenues': 531628000, 'local_tax': 188257000, 'other_tax': 140564000, 'advertisement_tax': 31324000, 'allocation': 111353000, 'working_capital': 25320000, 'property_tax_basis': 458250000, 'property_tax_value': 145357000, 'property_tax_rate': 0.3172, 'business_profit_contribution_basis': 0, 'business_profit_contribution_value': 40973000, 'business_network_tax_value': 1004000, }
def setUp(self): self.response = get_response('test/data/epci_2014_account.html', encoding='windows-1252') self.data = { 'name': 'GFP : CC MONTAGNE BOURBONNAISE', 'population': 6897, 'operating_revenues': 753000, 'home_tax_basis': 6975000, 'home_tax_rate': 0.0112, 'home_tax_value': 78000, 'additionnal_land_property_tax_value': 0, 'business_property_contribution_additionnal_value': 11000, 'business_property_contribution_uniq_value': 0, 'business_property_contribution_eolien_value': 0, 'business_profit_contribution_value': 5000, 'other_tax': 942000, 'fiscal_repayment': -860000, }
def setUp(self): self.response = get_response('test/data/epci_2008_account.html', encoding='windows-1252') self.data = { 'name': 'GFP : CC MONTAGNE BOURBONNAISE', 'population': 6858, 'operating_revenues': 593000, 'local_tax': 80000, 'home_tax_basis': 5855000, 'home_tax_value': 24000, 'home_tax_rate': 0.0041, 'property_tax_basis': 4456000, 'property_tax_value': 22000, 'property_tax_rate': 0.0050, 'business_tax_basis': 3172000, 'business_tax_value': 25000, 'business_tax_rate': 0.0077, }
def setUp(self): self.response = get_response('test/data/department_2014_account.html', encoding='windows-1252') self.data = { 'operating_revenues': 543861000, 'local_tax': 190228000, 'other_tax': 150642000, 'advertisement_tax': 42176000, 'allocation': 108938000, 'working_capital': 26133000, 'property_tax_basis': 464872000, 'property_tax_value': 147459000, 'property_tax_rate': 0.3172, 'property_tax_cuts_on_deliberation': 204000, 'business_profit_contribution_basis': 0, 'business_profit_contribution_value': 40139000, 'business_profit_contribution_cuts_on_deliberation': 34000, 'business_network_tax_value': 1074000, }
def setUp(self): self.response = get_response('test/data/region_2008_account.html', encoding='windows-1252') self.data = { 'name': 'REGION BASSE-NORMANDIE', 'population': 1422193, 'operating_revenues': 517789000, 'tipp': 92536000, 'operating_costs': 411269000, 'property_tax_basis': 1146012000, 'property_tax_value': 60623000, 'property_tax_rate': 0.0529, 'land_property_tax_basis': 8771000, 'land_property_tax_value': 631000, 'land_property_tax_rate': 0.0716, 'business_tax_basis': 2686771000, 'business_tax_value': 85439000, 'business_tax_rate': 0.0318, }
tree = parse_response_soup(body, encoding) return tree if __name__ == '__main__': parser = argparse.ArgumentParser( description="Parse the url with lxml and show the diff") parser.add_argument('url') parser.add_argument('--encoding', default='utf-8') parser.add_argument('--nosoup', default=False, action='store_true') parser.add_argument('--soupthreshold', default=0.9, type=float) args = parser.parse_args() response = get_response(args.url, {}) original_body = response.body allow_soup = not args.nosoup tree = parse_response(body=original_body, encoding=args.encoding, allow_soup=allow_soup, soupthreshold=args.soupthreshold) parsed_body = etree.tostring(tree, method='html', encoding=args.encoding) original_body_lines = original_body.decode('utf-8').splitlines() parsed_body_lines = parsed_body.decode('utf-8').splitlines() output_file = '{fn}.diff.html'.format(fn=slug_from_url(args.url)) with(open(output_file, 'w')) as diff_file:
def setUp(self): self.response = get_response('test/data/commune_2014_account.html', encoding='windows-1252') self.data = { 'name': 'ORLEANS', 'population': 117988, 'operating_revenues': 177973000, 'local_tax': 78960000, 'other_tax': 6837000, 'allocation': 34364000, 'operating_costs': 164278000, 'staff_costs': 78955000, 'purchases_and_external_costs': 32426000, 'financial_costs': 2294000, 'contingents': 222000, 'paid_subsidies': 25526000, 'net_profit': 13695000, 'investment_ressources': 64543000, 'loans': 9249000, 'received_subsidies': 9744000, 'fctva': 6704000, 'returned_properties': 0, 'investments_usage': 59005000, 'facilities_expenses': 45118000, 'debt_repayments': 12608000, 'costs_to_allocate': 0, 'fixed_assets': 0, 'residual_financing_capacity': -5537000, 'thirdparty_balance': 0, 'financing_capacity': -5537000, 'global_profit': 19233000, 'surplus': 32519000, 'self_financing_capacity': 29644000, 'debt_repayment_capacity': 17036000, 'debt_at_end_year': 97863000, 'debt_annual_costs': 14780000, 'working_capital': 5198000, 'home_tax_basis': 154364000, 'home_tax_cuts_on_deliberation': 33849000, 'property_tax_basis': 154047000, 'property_tax_cuts_on_deliberation': 0., 'land_property_tax_basis': 206000, 'land_property_tax_cuts_on_deliberation': 0., 'additionnal_land_property_tax_basis': 0., 'additionnal_land_property_tax_cuts_on_deliberation': 0., 'business_property_contribution_basis': 0., 'business_property_contribution_cuts_on_deliberation': 0., 'home_tax_value': 32401000, 'home_tax_rate': 0.2099, 'property_tax_value': 45922000, 'property_tax_rate': 0.2981, 'land_property_tax_value': 82000, 'land_property_tax_rate': 0.3960, 'additionnal_land_property_tax_value': 0., 'additionnal_land_property_tax_rate': 0., 'business_property_contribution_value': 0., 'business_property_contribution_rate': 0., 'business_profit_contribution_value': 0., 'business_profit_contribution_cuts_on_deliberation': 0., 'business_network_tax_value': 0., 'business_network_tax_cuts_on_deliberation': 0., 'retail_land_tax_value': 0., 'retail_land_tax_cuts_on_deliberation': 0., }
def dummy_view(): response = get_response(request) return jsonify(response)
def setUp(self): self.response = get_response('test/data/department_2008_account.html', encoding='windows-1252') self.data = { 'population': 535489, 'operating_revenues': 455303000, 'operating_real_revenues': 453230000, 'local_tax': 183583000, 'refund_tax': 0, 'other_tax': 99211000, 'advertisement_tax': 34398000, 'tipp': 31453000, 'allocation_and_stake': 158788000, 'allocation': 109209000, 'realignment': 17887000, 'operating_costs': 426105000, 'operating_real_costs': 392785000, 'staff_costs': 77310000, 'purchases_and_external_costs': 57394000, 'subsidies_and_contingents': 247743000, 'mandatory_contributions_and_stakes': 52527000, 'subsidies': 17595000, 'individual_aids': 174671000, 'pch': 5510000, 'apa': 40781000, 'rsa': 0, 'accomodation_costs': 0, 'financial_costs': 9761000, 'net_profit': 29198000, 'self_financing_capacity': 60444000, 'investment_ressources': 155286000, 'fctva': 7913000, 'received_subsidies': 20379000, 'sold_fixed_assets': 704000, 'loans': 45000000, 'investments_usage': 129789000, 'investments_direct_costs': 68893000, 'paid_subsidies': 44227000, 'debt_repayments': 14897000, 'residual_financing_capacity': -25497000, 'thirdparty_balance': 323000, 'financing_capacity': -25173000, 'global_profit': 54372000, 'debt_at_end_year': 260957000, 'debt_annual_costs': 24298000, 'home_tax_value': 50719000, 'home_tax_basis': 441803000, 'home_tax_rate': 0.1148, 'home_tax_cuts_on_deliberation': 1146000, 'property_tax_value': 59808000, 'property_tax_basis': 385356000, 'property_tax_rate': 0.1552, 'property_tax_cuts_on_deliberation': 953000, 'land_property_tax_value': 587000, 'land_property_tax_basis': 1745000, 'land_property_tax_rate': 0.3363, 'land_property_tax_cuts_on_deliberation': 4000, 'business_tax_value': 73789000, 'business_tax_basis': 822619000, 'business_tax_rate': 0.0897, 'business_tax_cuts_on_deliberation': 1637000, }