def path_to_root(dhis2_id): parents=[] n = api.get('organisationUnits/'+dhis2_id,json=True) while 'parent' in n and n['parent'] is not None: parents.append(n['parent']['id']) n = api.get('organisationUnits/'+n['parent']['id'],json=True) return parents
def get(self, deck_id=None): output = { "object": "full", "timestamp": int(time.time()*1000), "cards": {}, "decks": {}, "deckcards": {} } cards = db.get_cards(1) for row in cards: output["cards"][row["id"]] = { "id": row["id"], "front": row["front"], "back": row["back"] } decks = api.get(api.decks, 1) for row in decks: output["decks"][row["id"]] = row deckcards = api.get(api.deckcards, 1) for row in deckcards: output["deckcards"][row["id"]] = row self.write(output)
def get(gene_id, **kwargs): """ Given a gene ID (as per the Mygene documentation) retrieve the given gene. :returns gene: a single gene object. """ endpoint = endpoints['get-gene'].format(gene_id=gene_id) return Gene(get(endpoint, params=kwargs))
def put_property_if_not_exist(resource, obj, w, property_name, prop): r = api.get(resource, where=w) # print(r) # print(obj) # print(w) # print(property_name) # print(prop) if len(r['_items']) == 0: obj[property_name] = [prop] r = api.post(resource, obj) obj_old = r['_items'][0] else: obj_old = r['_items'][0] for pr in obj_old: if pr[0] != '_': try: obj[pr] except: obj[pr] = obj_old[pr] try: properties = r['_items'][0][property_name] except: properties = [] already_in = False for pr in properties: if pr == prop: already_in = True if not already_in: properties.append(prop) obj[property_name] = properties r = api.put(resource + "/" + r["_items"][0]["_id"], obj)
def get(variant_id, **kwargs): """ Given a variant ID (as per the MyVariant documentation) retrieve the given variant. :returns variant: a single variant object. """ endpoint = endpoints['get-variant'].format(variant_id=variant_id) return Variant(get(endpoint, params=kwargs))
def list (): api.getCredentials() log.debug ("Command: List.") url = "/gists" gists = api.get(url) public_count = 0 private_count = 0 table = Texttable(max_width=defaults.max_width) table.set_deco(Texttable.HEADER | Texttable.HLINES) table.set_cols_align(["l", "l", "l", "l", "l"]) table.set_cols_width([4, 30, 6, 20, 30]) table.header( ["","Files","Public", "Gist ID", "Description"] ) for (i, gist) in enumerate(gists): private = False file_list = '' for (file, data) in gist['files'].items(): file_list += "'" + file + "' " if gist['public']: public_count += 1 else: private_count += 1 table.add_row( [i+1, file_list, str(gist['public']), gist['id'], gist['description']] ) print table.draw() print '' print "You have %i Gists. (%i Private)" % (len(gists), private_count)
def _get_gist(id): api.getCredentials() log.debug ("Internal: _get_gist: " + id) url = "/gists/" + id gist = api.get(url) return gist
def setupStops(): for stop in api.get('stops')['data']: stops.append({ "name" : stop['name'], "stop_id" : stop['stop_id'], "routes" : stop['routes'] })
def post_if_not_exist(resource, obj, w): r = api.get(resource,where=w) try: if len(r['_items']) == 0: api.post(resource, obj) except: pass
def update_leaders(db, user, follower_id): # only update leaders if they haven't been updated recently: with db, db.cursor() as cursor: twitter = database.get_twitter(cursor, follower_id) log(user, 'maybe updating leaders for %s updated at %s', twitter.screen_name, twitter.leaders_updated_time) if twitter.leaders_updated_time and twitter.leaders_updated_time > now() - UPDATE_PERIOD: return log(user, 'updated too recently') start_time = now() api_cursor = -1 # cursor=-1 requests first page while api_cursor: # cursor=0 means no more pages log(user, 'getting cursor=%s', api_cursor) data = api.get(user, 'friends/ids', user_id=twitter.api_id, cursor=api_cursor) api_cursor = data['next_cursor'] log(user, 'got %d leaders, next_cursor=%s', len(data['ids']), api_cursor) with db, db.cursor() as cursor: leader_ids = database.add_twitter_api_ids(cursor, data['ids']) database.update_twitter_leaders(cursor, follower_id, leader_ids) # delete leaders who weren't seen again: with db, db.cursor() as cursor: database.delete_old_twitter_leaders(cursor, follower_id, start_time) database.update_twitter_leaders_updated_time(cursor, follower_id, start_time) return True
def setupRoutes(): for route in api.get('routes')['data'][api.AGENCY]: routes.append({ "name" : route['long_name'], "route_id" : route['route_id'], "stops" : route['stops'] })
def update_outsiders(db, user, outsider_ids, retry=True): try: data = api.get(user, 'lists/members', slug='fllow-outsiders', owner_screen_name=user.screen_name, count=5000, skip_status=True) except requests.exceptions.HTTPError as e: warn(user, 'fllow-outsiders list not found') if e.response.status_code == 404 and retry: api.post(user, 'lists/create', name='fllow outsiders', mode='private', description="users you manually followed / fllow didn't automatically follow") return update_outsiders(db, user, outsider_ids, retry=False) raise e current_api_ids = {user['id'] for user in data['users']} with db, db.cursor() as cursor: api_ids = database.get_twitter_api_ids(cursor, outsider_ids) added_api_ids = list(api_ids - current_api_ids) log(user, 'adding %d outsiders', len(added_api_ids)) for i in range(0, len(added_api_ids), 100): api.post(user, 'lists/members/create_all', slug='fllow-outsiders', owner_screen_name=user.screen_name, user_id=','.join(str(api_id) for api_id in added_api_ids[i:i+100])) removed_api_ids = list(current_api_ids - api_ids) log(user, 'removing %d outsiders', len(removed_api_ids)) for i in range(0, len(removed_api_ids), 100): api.post(user, 'lists/members/destroy_all', slug='fllow-outsiders', owner_screen_name=user.screen_name, user_id=','.join(str(api_id) for api_id in removed_api_ids[i:i+100]))
def adding(result,area_id,data,parents,election_id): try: parents[area_id] except: areas = api.get('areas',where={"id":area_id,"parents.election_id":election_id}) if len(areas["_items"]) > 0: k = 0 for parent in areas["_items"][0]["parents"]: if parent["election_id"] == election_id: par = k k = k + 1 parents[area_id] = areas["_items"][0]["parents"][par]['area_id'] try: data[parents[area_id]] except: parent_areas = api.get('areas',where={"id": parents[area_id],"parents.election_id":election_id}) data[parents[area_id]] = {'summary':{},'counts':{},'classification':parent_areas["_items"][0]['classification']} try: parent_id = parents[area_id] #print(area_id,parent_id) try: for s in result['summary']: try: data[parent_id]['summary'][s['name']] = data[parent_id]['summary'][s['name']] + int(s['value']) except: data[parent_id]['summary'][s['name']] = int(s['value']) for s in result['counts']: try: data[parent_id]['counts'][s['option_identifier']] = data[parent_id]['counts'][s['option_identifier']] + int(s['votes']) except: data[parent_id]['counts'][s['option_identifier']] = int(s['votes']) data = adding(result,parent_id,data,parents,election_id) except: nothing = 0 except: nothing = 0 print('x',area_id) #raise(Exception) return data
def sendtext(content): xml = api.get('account/rate_limit_status') xml = etree.fromstring(xml) limit_num = xml[1].text if int(limit_num) == 0: return code = api.post('statuses/update',status=content) logbook.info("sent!")
def _is_swearword(w): swearwordsrx = api.get('swearwords') word = w.lower() for sw in swearwordsrx: if re.match(sw, word): return True return False
def get_all_items(resource, **kwargs): resp = api.get(resource, **kwargs) out =[] try: npages = math.ceil(int(resp['_meta']['total'])/int(resp['_meta']['max_results'])) for p in range(1,npages+1): newkwargs = kwargs newkwargs['page'] = str(p) r = api.get(resource, **newkwargs) try: out = out + r['_items'] except: nothing = 0 except: print("no meta:", resource) print(kwargs) return out
def search(key): _groups = [] r = api.get('groups', 'discover', 'search', params={'q': key}) for group in deserialize(r.content)['groups']: _group = models.Group() _group.import_from_api(group) _groups.append(_group) return _groups
def find_by(**kwargs): """ Given a set of key-value pairs, or kwargs, search for the desired gene(s). :returns genes: list of matches for the query provided. """ results = get(endpoints['get-query'], params=kwargs) genes = [] for r in results.get('hits'): genes.append(Gene(r)) return genes
def strategy(): info = api.get() print('=======================') print('radius', api.getRadius()) print('me ', api.getMe()) print('friend', api.getFriend()) print('enemy1', api.getEnemy1()) print('enemy2', api.getEnemy2()) return [1000, 1000, 'gogo']
def resolve_attribute(article, attr): """ Look into 'attribute-resolvers' list of functions and return the first actual result. """ resolvers = api.get("attribute-resolvers", function=True) for r in resolvers: val = r(None, article, attr) if val is not None: return val
def df(*args, **kwargs): if g.get("my"): my = g.my if (my["isSuspended"]): return redirect("/suspend") if (rulesAgree and not my.get("rulesAgree", False)): if (api.get("web/rules_agree_period")["result"]): return redirect("/rules_agree") return f(*args, **kwargs) else: return redirect('/login', request.full_path)
def test_decks(db): db.engine.execute(_db.users.insert(), id=1, name="Jim", email="", password="") db.engine.execute(_db.users.insert(), id=2, name="Jim", email="", password="") db.engine.execute(_db.decks.insert(), [ {'name': "First", 'owner_id': 1, 'student': ""}, {'name': "Second", 'owner_id': 1, 'student': ""}, {'name': "Second", 'owner_id': 2, 'student': ""}, ]) decks = api.get(api.decks, user_id=1) list_equals(["First", "Second"], decks, "name")
def sendtext(): xml = api.get('account/rate_limit_status') xml = etree.fromstring(xml) limit_num = xml[1].text if string.atoi(limit_num) == 0: return id,content = message.get_text() if id==0:return code = api.post('statuses/update',status=content) #if code == 1: message.over(id)
def data_from_DB(preset_selectie, filter_selectie, category): if (not preset_selectie == []) & (not filter_selectie == []): keys = [] for key1 in preset_selectie: for key2 in filter_selectie: keys += [str('NL' not in preset_selectie) + key1.replace(' ', '_') + key2 + category] OHW = None pOHW = None donut = {} df_table = None count = 0 url_s = '/dashboard_geulen?' for f in keys: url_s += 'filters=' + f + '&' docs = api.get(url_s[0:-1]) for doc in docs: if count == 0: OHW = pd.read_json(doc['OHW'], orient='records').set_index('Datum') pOHW = pd.read_json(doc['pOHW'], orient='records').set_index('Datum') donut = doc['donut'] df_table = pd.read_json(doc['df_table'], orient='records') else: OHW1 = pd.read_json(doc['OHW'], orient='records').set_index('Datum') OHW = OHW.add(OHW1, fill_value=0) pOHW1 = pd.read_json(doc['pOHW'], orient='records').set_index('Datum') pOHW = pOHW.add(pOHW1, fill_value=0) if doc['donut'] is not None: for key in doc['donut']: if key in donut: donut[key] = donut[key] + doc['donut'][key] else: donut[key] = doc['donut'][key] df_table = df_table.append(pd.read_json(doc['df_table'], orient='records'), sort=True) count += 1 if category == 'global': df_table = df_table[config.columns_g].sort_values(by=['OHW']) else: col = ['Beschrijving categorie', 'Oplosactie'] df_table = df_table[config.columns_g + col].sort_values(by=['OHW']) OHW = OHW.reset_index() pOHW = pOHW.reset_index() else: OHW = None pOHW = None donut = {} df_table = None return OHW, pOHW, donut, df_table
def df(*args,**kwargs): if g.get("my"): my = g.my if(my["isSuspended"]): return redirect("/suspend") if(rulesAgree and not my.get("rulesAgree",False)): if(api.get("web/rules_agree_period")["result"]): return redirect("/rules_agree") return f(*args,**kwargs) else: return redirect('/login',request.full_path)
def main(): data_design_pattern = { "id": "", "sourceCode": api.get('source-codes', 1), "status": "PRIVATE", "tag": "design pattern", "xmlDescriptor": "descriptor", "author": "", "license": "" } api.request("Design Pattern", 'design-patterns', data_design_pattern)
def update_text(data1, data2): date_u = api.get('/Hulplijsten?id=update_date')[0]['date'] if date_u is None: date_u = ' ' return [ data1.get('0') + " projecten", data1.get('1') + " meter", data2.get('0') + " projecten", data2.get('1') + " meter", "(Laatste nieuwe data: " + date_u + ")" ]
def main(): request_token = api.get_request_token() pin = input('Go to {} and enter the PIN here: ' .format(api.get_authorize_url(request_token['oauth_token']))) access_token = api.get_access_token(request_token['oauth_token'], pin) user = User(access_token['oauth_token'], access_token['oauth_token_secret']) user_data = api.get(user, 'account/verify_credentials') logging.info('adding user %s', user_data['screen_name']) with database.connect() as db, db.cursor() as cursor: twitter_id, = database.update_twitters(cursor, [user_data]) database.update_user(cursor, twitter_id, user.access_token, user.access_token_secret)
def directmsg(): xml = api.get('direct_messages/inbox') if xml: xml = etree.fromstring(xml) num = len(xml) if num>0: for i in range(num): id = xml[i][0].text msg = xml[i][1].text message.save(msg,2) code = api.post('direct_messages/destroy',id=id) while code != 1:code,xml = api.fanfou('direct_messages/destroy',{'id':id})
def arrivalsAtRouteId(routeID): data = api.get('arrival-estimates',params={'routes':routeID})['data'] if len(data) < 1: return [] arrivals = [] for estarr in data: arr = estarr['arrivals'][0] arrivals.append({ 'stop': stopIDToName(estarr['stop_id']), 'time_left': time_left(arr['arrival_at']) }) return arrivals
def sets_played_by_player(bracket_id, tag): try: tag = str(tag) tag = tag.lower() except: msg = "Given player tag is not and cannot be converted into a string" raise exceptions.ValidationError(msg) uri = BRACKET_URL + str(bracket_id) response = api.get(uri, VALID_BRACKET_PARAMS) return _filter_sets_given_player(response, tag)
def _get_ride_details(self): url = api.RIDES + '/' + self.id resp = api.get(url) data = resp['ride'] self._athlete = athlete.StravaAthlete(data['athlete']['id']) self._elapsedTime = data['elapsedTime'] self._startDate = data['startDate'] self._name = data['name'] self._distance = data['distance'] self._movingTime = data['movingTime'] self._bike = data['bike'] self._location = data['location']
def get(self, key): error = 'Invalid group slug given.' r = api.get('groups', 'discover', 'category', key, error=error) groups = deserialize(r.content)['groups'] i = [c.slug for c in self.data].index(key) self.data[i].groups = groups return self.data[i]
def arrivalsAtRouteId(routeID): data = api.get('arrival-estimates', params={'routes': routeID})['data'] if len(data) < 1: return [] arrivals = [] for estarr in data: arr = estarr['arrivals'][0] arrivals.append({ 'stop': stopIDToName(estarr['stop_id']), 'time_left': time_left(arr['arrival_at']) }) return arrivals
def static_resolvers(fetcher, article, attribute): """ Read from function dictionary 'static-attribute-resolvers' and call the one correspinding to the specific attribute. """ resolvers = api.get("static-attribute-resolvers", function=True) if attribute in resolvers: resolver = resolvers[attribute] return resolver(article) else: return None
def _discover_group(self, *cats): _groups = [] r = api.get('groups', 'discover', *cats) for group in deserialize(r.content)['groups']: _group = models.Group() _group.import_from_api(group) _groups.append(_group) #store into groups if not _group.id in self.parent.parent: self.parent.parent.data.append(_group) return _groups
def show(tournament_name, params=[], filter_response=True): """Retrieve a single tournament record by `tournament name`""" utils._validate_query_params(params=params, valid_params=VALID_PARAMS, route_type='tournament') uri = TOURNAMENT_PREFIX + tournament_name response = api.get(uri, params) if filter_response: response = _filter_tournament_response(response, params) return response
def arrivalsAtStopID(stopID): data = api.get('arrival-estimates',params={'stops':stopID})['data'] if len(data) < 1: return [] else: data = data[0]['arrivals'] arrivals = [] for arrival in data: arrivals.append({ 'route': routeIDToName(arrival['route_id']), 'time_left':time_left(arrival['arrival_at']) }) return arrivals
def download_excel2(): doc1 = api.get('/dashboard_geulen?id=ExtraWerk1') doc2 = api.get('/dashboard_geulen?id=ExtraWerk2') Inkoop = pd.read_json(doc1[0]['df_table1'], orient='records') Inkoop = Inkoop.append(pd.read_json(doc2[0]['df_table2'], orient='records')).reset_index(drop=True) # Convert DF strIO = io.BytesIO() excel_writer = pd.ExcelWriter(strIO, engine="xlsxwriter") Inkoop.to_excel(excel_writer, sheet_name="sheet1", index=False) excel_writer.save() strIO.getvalue() strIO.seek(0) # Name download file Filename = 'Info_inkooporder_meerwerk_' \ + dt.datetime.now().strftime('%d-%m-%Y') + '.xlsx' return send_file(strIO, attachment_filename=Filename, as_attachment=True)
def main(user, mentors): db = database.connect() with db, db.cursor() as cursor: user = database.get_user(cursor, user) mentor_data = api.get(user, 'users/lookup', screen_name=','.join(mentors)) unknown = {m.lower() for m in mentors} - {m['screen_name'].lower() for m in mentor_data} if unknown: logging.warning('unknown screen names: %s', unknown) with db, db.cursor() as cursor: mentor_ids = database.update_twitters(cursor, mentor_data) database.add_user_mentors(cursor, user.id, mentor_ids)
def arrivalsAtStopID(stopID): data = api.get('arrival-estimates', params={'stops': stopID})['data'] if len(data) < 1: return [] else: data = data[0]['arrivals'] arrivals = [] for arrival in data: arrivals.append({ 'route': routeIDToName(arrival['route_id']), 'time_left': time_left(arrival['arrival_at']) }) return arrivals
def get_access_token(client_id, scope): assert isinstance(client_id, int), 'clinet_id must be positive integer' assert isinstance(scope, str), 'scope must be string' assert client_id > 0, 'clinet_id must be positive integer' url = """\ https://oauth.vk.com/authorize?client_id={client_id}&\ redirect_uri=https://oauth.vk.com/blank.hmtl&\ scope={scope}&\ &response_type=token&\ display=page\ """.replace(" ", "").format(client_id=client_id, scope=scope) webbrowser.open_new_tab(url) respones = get(url) print(respones.headers)
def get_teams_by_country(session, conf_url): source = api.get(session, conf_url) page = BeautifulSoup(source, 'html.parser') country_tables = page.find_all('table', attrs={'class': 'wikitable'}) teams_by_country = {} for country in country_tables: try: country_name = country.find_previous_sibling('h2').findNext( 'span').text team_rows = country.find_all('tr')[1:] teams_by_country[country_name] = _extract_teams(team_rows) except AttributeError: continue return teams_by_country
def main(): for a in range(1, 7): dev = '{}{}'.format("dev", a) root = api.path + '/%s/testing' % dev if os.path.isdir(root): dirlist = [ item for item in os.listdir(root) if os.path.isfile(os.path.join(root, item)) ] for i in dirlist: index = checkSystem(i) data_test_case = { "developer": api.get('developers', a), "id": "", "softwareSystem": api.get('software-systems', index), "status": "PRIVATE", "tag": i, "uri": "http://opendata.soccerlab.polymtl.ca/test-cases/" + i, "author": "", "license": "" } api.request("Test case", 'test-cases', data_test_case)
def username_to_skills(username, cur): url = f'https://api.github.com/users/{username}/repos?per_page=100&page=1&sort=pushed' try: res = get(url) res.raise_for_status() except Exception: if res.status_code == 404: cur.execute("DELETE FROM portfolios WHERE user = %(user)s", {'user': username}) else: pprint(res.json()) print('status_code', res.status_code) raise return calc_skillset(res.json())
def main(): root = api.path dirlist = [ item for item in os.listdir(root) if os.path.isdir(os.path.join(root, item)) ] for i in dirlist: data_developer = { "id": "", "name": i, "study": api.get('studies', 1), "license": "" } api.request(i, 'developers', data_developer)
def fetch_users(): params = { 'fields': '*', 'filter': [ 'userCredentials.username:$like:cypress-de-', 'userCredentials.username:$like:cypress-superAdmin', ], 'rootJunction': 'OR' } res = api.get('users.json', params) print('Fetched', len(res['users']), 'users') return res['users']
def main(): request_token = api.get_request_token() pin = input('Go to {} and enter the PIN here: '.format( api.get_authorize_url(request_token['oauth_token']))) access_token = api.get_access_token(request_token['oauth_token'], pin) user = User(access_token['oauth_token'], access_token['oauth_token_secret']) user_data = api.get(user, 'account/verify_credentials') logging.info('adding user %s', user_data['screen_name']) with database.connect() as db, db.cursor() as cursor: twitter_id, = database.update_twitters(cursor, [user_data]) database.update_user(cursor, twitter_id, user.access_token, user.access_token_secret)
def main(user, path, params): db = database.connect() with db, db.cursor() as cursor: user = database.get_user(cursor, user) all_data = [] cursor = -1 # cursor=-1 requests first page while cursor: # cursor=0 means no more pages logging.info('loading cursor=%d', cursor) data = api.get(user, path, cursor=cursor, **params) cursor = data['next_cursor'] all_data.append(data) print(json.dumps(all_data, indent=2))
def main(): for a in range(1, 7): dev = '{}{}'.format("dev", a) root = api.path + '/%s/thinkalouds/' % dev dirlist = [ item for item in os.listdir(root) if os.path.isfile(os.path.join(root, item)) ] for i in dirlist: if i.endswith('.avi'): index = checkSystem(i) date = i[16:26].replace("_", "-") data_think_aloud = { "description": "Think-aloud from {} on {}".format(dev, date), "developer": api.get('developers', a), "id": "", "registred": date, "softwareSystem": api.get('software-systems', index), "tag": "Think aloud on " + date, "author": "", "license": "" } api.request("Think Aloud", 'think-alouds', data_think_aloud)
def main(): for a in range(1,7): dev = '{}{}'.format("dev", a) root = api.path + '/%s/interviews/' % dev root2 = api.path + '/%s/thinkalouds/' % dev dirlist1 = [item for item in os.listdir(root) if os.path.isfile(os.path.join(root, item))] dirlist2 = [item for item in os.listdir(root2) if os.path.isfile(os.path.join(root2, item))] for i in dirlist1: if i.endswith('.rtf'): data_note = { "description": "Note from {} about System {}".format(dev, i[3]), "id": "", "interview": api.get('interviews', a), "status": "PRIVATE", "tag": i, "thinkaloud": None, "uri": "uri", "author": "", "license": "" } api.request("Note", 'notes', data_note) for i in dirlist2: if i.endswith('.doc' or '.xls'): data_note = { "description": "Enter a description", "id": "", "interview": None, "status": "PRIVATE", "tag": i, "thinkaloud": api.get('think-alouds', a), "uri": "uri", "author": "", "license": "" } api.request("Note", 'notes', data_note)
def main(): total = 0 #LINK WITH TASK for a in range(1,7): dev = '{}{}'.format("dev", a) root = api.path + '/%s/diaries' % dev if os.path.isdir(root): dirlist = [ item for item in os.listdir(root) if os.path.isfile(os.path.join(root, item)) ] for i in dirlist: data_diary = { "developer": api.get('developers', a), "id": "", "registred": date, "softwareSystem": api.get('software-systems', 1), "status": "PRIVATE", "task": None, #??? "uri": "uri", "author": "", "license": "" } api.request("Diary", 'diaries', data_diary) total = total + 1
def directmsg(): xml = api.get('direct_messages/inbox') if xml: xml = etree.fromstring(xml) num = len(xml) if num > 0: for i in range(num): id = xml[i][0].text msg = xml[i][1].text logbook.info(msg.encode("utf8")) sendtext(msg.encode("utf8")) code = api.post('direct_messages/destroy', id=id) logbook.info("destory") while code != 1: code, xml = api.fanfou('direct_messages/destroy', {'id': id})
def get_player_info(self, session): source = api.get(session, self.url) page = BeautifulSoup(source, 'html.parser') player_card = page.find('table', class_='infobox vcard') if player_card: try: self._get_birthdate(player_card) self._get_birthplace(player_card) self._get_height(player_card) self._get_number(player_card) self._get_position(player_card) self._get_current_team(player_card) self._get_national_team(player_card) self._check_required_fields() except: self.missing_required = True
def nearby_swearwords(fetcher, article, attribute): """ Get nearby swearwords from attribute. Check the context for a list of swearword regexps. """ words = _wordlist(article) dist = api.get('nearby-word-distance') try: wl = words.index(attribute) except ValueError: return None # It is advised to use the configuration whenever possible return api.domaincall("static-attribute-resolvers", "swearwords", words[max(0, wl - dist):min(wl + dist, len(words))])
def download(id): try: video = Video(id) print(datetime.now(), "Downloading video", id) except Exception as e: print(datetime.now(), "Fetching video failed") print(datetime.now(), str(e)) return segments = [] finished = set() os.makedirs(id, exist_ok=True) while True: for segment in video.segments: if segment.id in finished: continue try: response = api.get(segment.uri, stream=True) filename = "{}/{}".format(video.id, segment.filename) with open(filename, "wb") as data: for chunk in response.iter_content(chunk_size=128): data.write(chunk) segments.append(filename) finished.add(segment.id) except Exception as e: print(datetime.now(), "Failed downloading segment", segment.id) print(datetime.now(), str(e)) if not video.live: break sleep(300) try: video.update() except Exception as e: print(datetime.now(), "Failed updating video") print(datetime.now(), str(e)) break process(video, segments)