def insert_lando(lando, token): radikoEO = lando.img['title'] landkodo = lando.img['src'].replace('.png', '').split('/')[3] valuto = lando.contents[4].text response = requests.get(api_url + '/landoj?landkodo=' + landkodo) if len(response.json()) > 0: if 'id' in response.json()[0]: print response.json()[0]['id'] return response.json()[0]['id'] data = { 'radikoEo': radikoEO, 'finajxoEo': '', 'landkodo': landkodo, 'valuto': valuto } token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + '/landoj', headers=headers, data=data) if (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + '/landoj', headers=headers, data=data) return request.json()['insertId']
def request_token(agol_creds): ''' :param agol_creds: the name of the local file with the user/pass :return: a dictionary with folder_name, folder_id, item_name, item_id ''' user = util.get_token(agol_creds)[0][1] password = util.get_token(agol_creds)[1][1] d = { "username": user, "password": password, "referer": "http://www.arcgis.com", "f": "json" } url = "https://www.arcgis.com/sharing/rest/generateToken" r = requests.post(url, data=d) response = json.loads(r.content) token = response['token'] if 'error' in response.keys(): raise Exception(response['message'], response['details']) return token
def insert_kategorioj(token): token = util.get_token(token) id_juna = get_config("idJunajGrupoj") headers = {'x-access-token': token} for data in kategorioj: request = requests.post(api_url + '/grupoj', headers=headers, data=data) if (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + '/grupoj', headers=headers, data=data) if request.status_code == 500: print data if('insertId' in request.json()): id_grupo = str(request.json()['insertId']) print id_grupo if(data['tipo']): url = api_url + '/grupoj/kategorioj/' + data['tipo'] + '/sub/' + id_grupo requests.post(url, headers=headers) if(data['juna']): url = api_url + '/grupoj/kategorioj/' + id_juna + '/sub/' + id_grupo requests.post(url, headers=headers)
def test_get_token(): print() token = util.get_token(username="******", password="******") env.env_var["USER_1_TOKEN"] = token token = util.get_token(username="******", password="******") env.env_var["USER_2_TOKEN"] = token token = util.get_token(username="******", password="******") env.env_var["USER_3_TOKEN"] = token
def get_uzantoj(token): headers = {'x-access-token': util.get_token(token)} request = requests.get(api_url + '/uzantoj', headers=headers) if request.status_code == 200: return request.json() elif (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) get_uzantoj(token)
def post_perantoj(data, token): print "Enmetante datumojn de peranto: " + data['publikaNomo'] headers = {'x-access-token': util.get_token(token)} request = requests.post(api_url + '/perantoj', headers=headers, data=data) if request.status_code == 201: print "Sukcese enmetita" elif (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + '/perantoj', headers=headers, data=data)
def test_get_token(): #env.initialize() print() token = util.get_token(username="******", password="******") env.env_var["USER_1_TOKEN"] = token token = util.get_token(username="******", password="******") env.env_var["USER_2_TOKEN"] = token token = util.get_token(username="******", password="******") env.env_var["USER_3_TOKEN"] = token token = util.get_token(username="******", password="******") env.env_var["USER_KARNAK_TOKEN"] = token
def cartodb_append(sqlite_db_path, out_cartodb_name, gfw_env, where_clause=None): """ Append a local FC to a cartoDB dataset :param sqlite_db_path: path to local sqlite db :param out_cartodb_name: cartoDB table :param gfw_env: gfw_env :param where_clause: where_clause to apply to the dataset :return: """ key = util.get_token(settings.get_settings(gfw_env)['cartodb']['token']) account_name = get_account_name(gfw_env) # Help: http://www.gdal.org/ogr2ogr.html # The -dim 2 option ensures that only two dimensional data is created; no Z or M values cmd = [ 'ogr2ogr', '--config', 'CARTODB_API_KEY', key, '-append', '-skipfailures', '-t_srs', 'EPSG:4326', '-f', 'CartoDB', '-nln', out_cartodb_name, '-dim', '2', 'CartoDB:{0}'.format(account_name) ] cmd = add_fc_to_ogr2ogr_cmd(sqlite_db_path, cmd) cmd = add_where_clause_to_ogr2ogr_cmd(where_clause, cmd) util.run_subprocess(cmd)
def __init__(self, slack_name, slackbot_token=None, slackbot_token_file=None): self.slack_name = slack_name self.sb_token = util.get_token(slackbot_token, slackbot_token_file) self.url = self.sb_url()
def send_email(body_text): """ Send an email given a body text :param body_text: text to include in the email :return: """ username = '******' fromaddr = "{0}@gmail.com".format(username) msg = MIMEMultipart() msg['From'] = fromaddr msg['Subject'] = "gfw-sync2 results" msg.attach(MIMEText(body_text, 'html')) server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.login(fromaddr, util.get_token(username)) for toaddr in ["*****@*****.**", "*****@*****.**"]: msg['To'] = toaddr text = msg.as_string() server.sendmail(fromaddr, toaddr, text) server.quit()
def setUpClass(cls): token = util.get_token() cls.cfg = util.get_config() # Getting username from Auth profile for token authServiceUrl = cls.cfg['auth-service-url'] auth_client = _KBaseAuth(authServiceUrl) user_id = auth_client.get_user(token) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({ 'token': token, 'user_id': user_id, 'provenance': [{ 'service': 'jgi_mg_assembly', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1 }) cls.serviceImpl = jgi_mg_assembly(cls.cfg) cls.scratch = cls.cfg['scratch'] cls.callback_url = os.environ['SDK_CALLBACK_URL'] cls.reads_upa = util.load_pe_reads( os.path.join("data", "small.forward.fq"), os.path.join("data", "small.reverse.fq"))
def insert_kotizo(id_lando, kotizo, id_grupo, token): if (id_grupo): data = {'idLando': id_lando, 'prezo': kotizo, 'junaRabato': 0} token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + "/grupoj/" + str(id_grupo) + "/kotizoj", headers=headers, data=data) if (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) headers = {'x-access-token': token} request = requests.post(api_url + '/landoj', headers=headers, data=data)
def get_api_key_and_url(gfw_env): key = util.get_token(settings.get_settings(gfw_env)['cartodb']['token']) api_url = settings.get_settings(gfw_env)["cartodb"]["sql_api"] sync_api = settings.get_settings(gfw_env)["cartodb"]["synchronization_api"] return key, api_url, sync_api
def set_service_status(service, action): logging.debug("starting to execute {0} on service {1}".format(service, action)) password = util.get_token('arcgis_server_pass') cwd = r"C:\Program Files\ArcGIS\Server\tools\admin" cmd = ['python', "manageservice.py", '-u', 'astrong', '-p', password] cmd += ['-s', 'http://gis-gfw.wri.org/arcgis/admin', '-n', service, '-o', action] # Added check_call so it will crash if the subprocess fails subprocess.check_call(cmd, cwd=cwd) logging.debug("{0} on service {1} complete".format(service, action))
def main() -> None: token = get_token() logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) updater = Updater(token) try: updater.start_polling() updater.dispatcher.add_handler(MessageHandler(Filters.text, say_hello)) #sleep(10) #updater.stop() except KeyboardInterrupt: updater.stop()
def kickoff(proc_name, regions, years, gfw_env): token_info = util.get_token('s3_read_write.config') aws_access_key = token_info[0][1] aws_secret_key = token_info[1][1] lkp_proc_name = {'umd_landsat_alerts': 'glad', 'terrai': 'terrai'} tile_layer_name = lkp_proc_name[proc_name] region_str = ' '.join(regions.split(';')) year_str = ' '.join(years.split(';')) # Generate the mapnik tiles and push to s3 tile_cmd = 'python /home/ubuntu/mapnik-forest-change-tiles/generate-tiles.py' tile_cmd += ' -l {0} -r {1} -y {2} --world'.format(tile_layer_name, region_str, year_str) # Write the rasters to point and push to s3 point_cmd = 'python /home/ubuntu/raster-vector-to-tsv/processing/utilities/weekly_updates.py' point_cmd += ' -l {0} -r {1} -y {2}'.format(tile_layer_name, region_str, year_str) # add staging flags if necessary if gfw_env == 'DEV': tile_cmd += ' --staging' point_cmd += ' --staging' ptw_cmd = 'python /home/ubuntu/gfw-places-to-watch/update-ptw.py -r all --threads 25' # Required, even though these are set for ubuntu in .bashrc with fabric.api.shell_env(S3_ACCESS_KEY=aws_access_key, S3_SECRET_KEY=aws_secret_key): cmd_list = [tile_cmd, point_cmd] # If today's date is >= 4 and <= 10 and south_america is to be processed, run ptw if tile_layer_name == 'glad' and run_ptw( ) and 'south_america' in region_str and gfw_env != 'staging': cmd_list += [ptw_cmd] # required because fabric will wait if process is not actively connected to this machine # can't do multiple fabric.api.run calls for some reason # http://docs.fabfile.org/en/1.6/faq.html#my-cd-workon-export-etc-calls-don-t-seem-to-work final_cmd = ' && '.join(cmd_list) fabric.api.run(final_cmd) # Important to signal the global_forest_change_layer to kill the subprocess print '****FAB SUBPROCESS COMPLETE****'
def main(): """Main function for executing script.""" client_creds = low_level.ClientCredentials( root_certificates=get_certs(), private_key=None, certificate_chain=None) channel = low_level.Channel(hostport=HOST_PORT, client_credentials=client_creds, server_host_override=None) completion_queue = low_level.CompletionQueue() request_pb = bigtable_cluster_service_messages_pb2.ListZonesRequest( name=PROJECT_NAME) request_pb_as_str = request_pb.SerializeToString() expire_timestamp = time.time() + TIMEOUT_SECONDS call_obj = low_level.Call(channel=channel, completion_queue=completion_queue, method=METHOD, host=HOST, deadline=expire_timestamp) call_obj.add_metadata(key='Authorization', value='Bearer ' + get_token()) # First request -- invoke means "begin the RPC connection" invoke_result = call_obj.invoke(completion_queue=completion_queue, metadata_tag='1:METADATA-TAG', finish_tag='2:FINISH-TAG') print('invoke_result: %s' % (invoke_result,)) # Read the first event off the queue, expect to be METADATA_ACCEPTED EVENT_DICT[1] = completion_queue.get(deadline=None) # Second Request -- write request to connection write_result = call_obj.write(message=request_pb_as_str, tag='3:WRITE-TAG') print('write_result: %s' % (write_result,)) # Read the second event off the queue EVENT_DICT[2] = completion_queue.get(deadline=None) # Third Request -- complete means "sending data now" call_obj.complete(tag='4:COMPLETE-TAG') # Read the third event off the queue EVENT_DICT[3] = completion_queue.get(deadline=None) # Fourth request -- read the response for our request read_result = call_obj.read(tag='5:READ-TAG') print('read_result: %s' % (read_result,)) # Read off remaining events from queue EVENT_DICT[4] = completion_queue.get(deadline=None) EVENT_DICT[5] = completion_queue.get(deadline=None) # We only expect 5 events (1 for each tag) EVENT_DICT[6] = completion_queue.get(deadline=1) for i in xrange(1, 6 + 1): print('Event %d' % (i,)) print(EVENT_DICT[i]) print('')
def get_finance_data(): access_token=json.loads(util.get_token())["access_token"] print(access_token) host_ip,host_name=util.get_Host_name_IP() print('host=',host_name,'address=',host_ip) headers={ "Authorization":"Bearer "+access_token, 'REMOTE_ADDR':host_ip, 'REMOTE_HOST':host_name } print(headers) res=requests.get(config.data_url,headers=headers) if res is not None: #print(res.text) return res.text else: return {'error': res.error}
def kickoff(proc_name): token_info = util.get_token('s3_read_write.config') aws_access_key = token_info[0][1] aws_secret_key = token_info[1][1] # Required, even though these are set for ubuntu in .bashrc # Set for both tilestache and s4cmd . . . annoyingly different with fabric.api.shell_env(AWS_ACCESS_KEY_ID=aws_access_key, AWS_SECRET_ACCESS_KEY=aws_secret_key, S3_ACCESS_KEY=aws_access_key, S3_SECRET_KEY=aws_secret_key): if proc_name == 'GLAD': cmd = 'python /home/ubuntu/glad/glad-processing-gdal/process_glad.py -r sa_test' fabric.api.run(cmd) else: raise ValueError("Unknown process name in fabfile.py")
def login(self): if self.webdriver.check_main_loaded(msg='load page', limit=2): pass else: token = get_token(JSON_FILE) msg = "login" if not self.webdriver.input_userinfo( msg=msg, token=token, limit=10): return False, msg msg = "2nd auth" if not self.webdriver.check_2nd_auth(msg=msg, limit=10): return False, msg msg = "load main" if not self.webdriver.check_main_loaded(msg=msg, limit=10): return False, msg msg = "load task" if not self.webdriver.check_task_list(msg=msg, limit=2): return False, msg return True, None
def cartodb_sql(sql, gfw_env): """ Execute a SQL statement using the API :param sql: a SQL statment :param gfw_env: the gfw_env-- used to grab the correct API token :return: """ logging.debug(sql) key = util.get_token(settings.get_settings(gfw_env)['cartodb']['token']) api_url = settings.get_settings(gfw_env)["cartodb"]["sql_api"] result = urllib.urlopen("{0!s}?api_key={1!s}&q={2!s}".format(api_url, key, sql)) json_result = json.loads(result.readlines()[0], object_pairs_hook=OrderedDict) if "error" in json_result.keys(): raise SyntaxError(json_result['error']) return json_result
def __init__(self, slack_name, slackbot, api_token=None, api_token_file=None): """ slack name is the short name of the slack (preceding '.slack.com') slackbot should be an initialized slackbot.Slackbot() object api_token should be a Slack API Token. However, it can also be None, and api_token_file be the file name containing a Slack API Token instead """ self.slack_name = slack_name self.api_token = util.get_token(api_token, api_token_file) self.url = self.api_url() self.channels = self.get_channels() self.closure_text = self.get_content(self.closure) self.warning_text = self.get_content(self.warning) self.slackbot = slackbot
def set_service_status(service, action): logging.debug("starting to execute {0} on service {1}".format( service, action)) password = util.get_token('arcgis_server_pass') cwd = r"C:\Program Files\ArcGIS\Server\tools\admin" cmd = [ r'C:\PYTHON27\ArcGISx6410.5\python', "manageservice.py", '-u', 'astrong', '-p', password ] cmd += [ '-s', 'http://gis-gfw.wri.org/arcgis/admin', '-n', service, '-o', action ] # Added check_call so it will crash if the subprocess fails subprocess.check_call(cmd, cwd=cwd) logging.debug("{0} on service {1} complete".format(service, action))
def cartodb_append(sqlite_db_path, out_cartodb_name, gfw_env, where_clause=None): """ Append a local FC to a cartoDB dataset :param sqlite_db_path: path to local sqlite db :param out_cartodb_name: cartoDB table :param gfw_env: gfw_env :param where_clause: where_clause to apply to the dataset :return: """ key = util.get_token(settings.get_settings(gfw_env)['cartodb']['token']) account_name = get_account_name(gfw_env) # Help: http://www.gdal.org/ogr2ogr.html # The -dim 2 option ensures that only two dimensional data is created; no Z or M values cmd = ['ogr2ogr', '--config', 'CARTODB_API_KEY', key, '-append', '-skipfailures', '-t_srs', 'EPSG:4326', '-f', 'CartoDB', '-nln', out_cartodb_name, '-dim', '2', 'CartoDB:{0}'.format(account_name)] cmd = add_fc_to_ogr2ogr_cmd(sqlite_db_path, cmd) cmd = add_where_clause_to_ogr2ogr_cmd(where_clause, cmd) run_subprocess(cmd)
def get_test(data): temp = [] dic = {} temp = [] for d in data: a, dic = get_token(d, dic) if a: temp += [a] dic = { key: max(set(value), key=value.count) for key, value in dic.iteritems() } mul = 10 x_test = [x for sublist in temp for x in sublist] c = Counter(x_test) most_set = [x[0] for x in c.most_common(20)] x_test = list(set(x_test)) x_test = [x for x in x_test if c[x] >= 200] txt = open('../feat/test_feats', 'w') print >> txt, (temp, x_test, dic, most_set) return temp, x_test, dic, most_set
def zen(ctx, portal, production, community_id, debug): ctx.obj = {} if portal is None: ctx.obj['portal'] = 'invenio' ctx = set_invenio(ctx, production) else: ctx.obj['portal'] = portal # can only be zenodo currently could chnage in future ctx = set_zenodo(ctx, production) ctx.obj['log'] = config_log() # set up a config depending on portal and production values ctx.obj['production'] = production ctx.obj['community_id'] = community_id # get either sandbox or api token to connect ctx.obj['token'] = get_token(ctx.obj['portal'], ctx.obj['production']) if debug: ctx.obj['log'].setLevel(logging.DEBUG) ctx.obj['log'].debug(f"Token: {ctx.obj['token']}") ctx.obj['log'].debug(f"Portal: {ctx.obj['portal']}") ctx.obj['log'].debug(f"Community: {ctx.obj['community_id']}") ctx.obj['log'].debug(f"API url: {ctx.obj['url']}") ctx.obj['log'].debug(f"Production: {ctx.obj['production']}")
def kickoff(proc_name, *regions): token_info = util.get_token('s3_read_write.config') aws_access_key = token_info[0][1] aws_secret_key = token_info[1][1] lkp_proc_name = {'umd_landsat_alerts': 'glad', 'terrai': 'terrai'} tile_layer_name = lkp_proc_name[proc_name] region_str = ' '.join(regions) tile_cmd = 'python /home/ubuntu/mapnik-forest-change-tiles/generate-tiles.py' tile_cmd += ' -l {0} -r {1} --world'.format(tile_layer_name, region_str) point_cmd = 'python /home/ubuntu/raster-vector-to-tsv/processing/utilities/weekly_updates.py' point_cmd += ' -l {0}'.format(tile_layer_name) ptw_cmd = 'python /home/ubuntu/gfw-places-to-watch/update-ptw.py -r all --threads 30' # # Required, even though these are set for ubuntu in .bashrc # # Set for both tilestache and s4cmd . . . annoyingly different # # Previouly used AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY as well for tilestache with fabric.api.shell_env(S3_ACCESS_KEY=aws_access_key, S3_SECRET_KEY=aws_secret_key): # Generate the mapnik tiles and push to s3 fabric.api.run(tile_cmd) # Write the rasters to point and push to s3 fabric.api.run(point_cmd) # If it's the last week of the month, run ptw if proc_name == 'umd_landsat_alerts' and run_ptw(): fabric.api.run(ptw_cmd) # Important to signal the global_forest_change_layer to kill the subprocess print '****FAB SUBPROCESS COMPLETE****'
import boto import boto.ec2 import datetime import logging import time import util token_info = util.get_token('boto.config') access_key = token_info[0][1] secret_key = token_info[1][1] ec2_conn = boto.ec2.connect_to_region('us-east-1', aws_access_key_id=access_key, aws_secret_access_key=secret_key) def get_timestamps(bucket): out_dict = {} s3 = boto.connect_s3() bucket = s3.lookup(bucket) for key in bucket: out_dict[key.name] = datetime.datetime.strptime(key.last_modified, '%Y-%m-%dT%H:%M:%S.000Z') return out_dict def get_aws_instance(server_name): reservations = ec2_conn.get_all_reservations() for reservation in reservations:
import sys import logging logging.basicConfig( level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') from util import render_rates, get_token, read_channels, get_config, meta, mark_latest_message import telegram bot = telegram.Bot(get_token()) if len(sys.argv) >= 2: changed = sys.argv[1].split(",") else: sys.exit(1) for channel_id in read_channels(): c = get_config(channel_id) found = False for u in changed: notify = meta[u]['default_notify'] if u in c: notify = c[u]['sub'] and c[u]['notify'] if notify: found = True break if not found: continue text = "*Rates updated*:\n\n" + render_rates(channel_id, changed)
import util import page_api ''' this is a code sample to query the IRWIN Data services rest endpoint and download data to a csv ''' # enter credentials to generate a token username = '******' password = '******' # get token token = util.get_token(username, password) # the url to query endpoint_url = 'https://services1.arcgis.com/Hp6G80Pky0om7QvQ/arcgis/rest/services' \ '/[OAT_NEXT]_Resources_VIEW_(Read_Only)/FeatureServer/0/query?resultOffset={}' # specify a filter on the query where = "1=1" # return all results in a list of dictionaries feature_collection = page_api.page_api(token, endpoint_url, where) # write the feature collection to a csv file csv_file = 'all_resources.csv' util.response_to_dict(feature_collection, csv_file)
'input': args['input_paths'].split(','), 'output': args['output_path'], 'file_type': file_type, 'ref': ref, 'knownsites': known_sites.split(','), 'conf': parse_conf(), 'is_export_bam': is_export_bam.lower() == 'true', 'is_output_gvcf': is_output_gvcf.lower() == 'true' } if bam_output_path != '': payload['bam_output'] = bam_output_path log.info('########## START TO PROCESS GENE JOB ##########') s_time = time.time() # 1. get token token = get_token(iam_endpoint, user_name, password, project_id) headers = {'Content-Type': 'application/json', 'X-Auth-Token': token} # 2. submit job submit_job(payload) e_time = time.time() duration = e_time - s_time log.info( '########## GATK JOB FINISHED, total cost {} seconds ##########'. format(duration)) elif program == 'ListJob': log.info('Trying to list all gene jobs:') # 1. get token token = get_token(iam_endpoint, user_name, password, project_id) headers = {'Content-Type': 'application/json', 'X-Auth-Token': token} # 2. list jobs print json.dumps(json.loads(list_jobs()), indent=4)
update.message.reply_text( "Welcome. This bot monitors multiple sources for lira rate updates. Send /config to configure which sources you want to see. Send /sub or /unsub to subscribe or unsubscribe from getting notified when rates changed. Send /rates for latest rates on record" ) def print_rates(update, context): if rate_limit(update.effective_chat): return message = update.message.reply_text( "*Latest rates*:\n\n" + render_rates(update.effective_chat.id), disable_web_page_preview=True, parse_mode=telegram.ParseMode.MARKDOWN_V2) mark_latest_message(context.bot, message) TOKEN = get_token() if TOKEN is None or len(TOKEN) == 0: print("Please put a auth token for the bot in the token file") sys.exit(1) updater = Updater(token=TOKEN, use_context=True) dp = updater.dispatcher dp.add_handler(CommandHandler('rates', print_rates)) dp.add_handler(CommandHandler('sub', subscribe)) dp.add_handler(CommandHandler('unsub', unsubscribe)) dp.add_handler(CommandHandler('subscribers', list_subscribers)) dp.add_handler(CommandHandler('config', configure)) dp.add_handler(CallbackQueryHandler(configure_options)) dp.add_handler(CommandHandler('start', start))
def get_uzantoj(token): headers = {'x-access-token': util.get_token(token)} request = requests.get(api_url + '/uzantoj', headers=headers) if request.status_code == 200: return request.json() elif (request.status_code == 400) or (request.status_code == 403): token = '' token = util.get_token(token) get_uzantoj(token) def get_retlisto_id(): request = requests.get(api_url + '/dissendoj/retlistoj') return request.json()[0]['id'] def aldoni_uzantoj_retlisto(uzantoj, id_retlisto): for uzanto in uzantoj: if uzanto['retposxto']: data = {'retadreso': uzanto['retposxto']} url = api_url + '/dissendoj/retlistoj/' + str( id_retlisto) + '/abonantoj' request = requests.post(url, data) token = '' token = util.get_token(token) uzantoj = get_uzantoj(token) id_retlisto = get_retlisto_id() aldoni_uzantoj_retlisto(uzantoj, id_retlisto)
from __future__ import print_function import json import requests from config import SCOPE import sys import util __author__ = 'Barry Yuan <*****@*****.**>' util.get_token() response = util.get_url("dna/intent/api/v1/network-device") #print(json.dumps(response, indent=2)) print("Parsing device list...") devicelist = {'list': []} for dev in response['response']: if dev['series'] in SCOPE: if dev['managementIpAddress'].lower() != dev['hostname'].lower(): print("Found new device, collecting information...") devicelist['list'].append(dev) data = "\n".join([ "{}:\t{}".format(dev['managementIpAddress'], dev['series']) for dev in devicelist['list'] ]) util.sep( "Parsing complete, here is a list of devices that needs updated, please confirm..." ) print(data)