def _uninstall_sanity_app(): if not _is_sanity_dep_exist(): return data = { 'deployment_id': DEPLOYMENT_ID, 'workflow_id': 'uninstall' } headers = utils.create_maintenance_headers() headers.update({'content-type': 'application/json'}) utils.http_request( '{0}/executions'.format(_get_url_prefix()), method='POST', data=json.dumps(data), headers=headers) # Waiting for installation to complete utils.repetitive( utils.wait_for_workflow, timeout=5*60, interval=30, deployment_id=DEPLOYMENT_ID, workflow_id='uninstall', url_prefix=_get_url_prefix(), timeout_msg='Timed out while waiting for ' 'deployment {0} to uninstall.'.format(DEPLOYMENT_ID))
def _deploy_app(): if _is_sanity_dep_exist(should_fail=True): return dep_inputs = {'server_ip': manager_ip, 'agent_user': manager_user, 'agent_private_key_path': manager_remote_key_path} data = { 'blueprint_id': BLUEPRINT_ID, 'inputs': dep_inputs } headers = utils.create_maintenance_headers() headers.update({'content-type': 'application/json'}) utils.http_request( '{0}/deployments/{1}'.format(_get_url_prefix(), DEPLOYMENT_ID), data=json.dumps(data), headers=headers) # Waiting for create deployment env to end utils.repetitive( utils.wait_for_workflow, deployment_id=DEPLOYMENT_ID, workflow_id='create_deployment_environment', url_prefix=_get_url_prefix(), timeout_msg='Timed out while waiting for ' 'deployment {0} to be created'.format(DEPLOYMENT_ID))
def get_info(username=None, use_fallback=True): """ Get the Twitch info for a particular user or channel. Defaults to the stream channel if not otherwise specified. For response object structure, see: https://github.com/justintv/Twitch-API/blob/master/v3_resources/channels.md#example-response May throw exceptions on network/Twitch error. """ if username is None: username = config['channel'] # Attempt to get the channel data from /streams/channelname # If this succeeds, it means the channel is currently live res = utils.http_request("https://api.twitch.tv/kraken/streams/%s" % username) data = json.loads(res) channel_data = data.get('stream') and data['stream'].get('channel') if channel_data: channel_data['live'] = True channel_data['viewers'] = data['stream']['viewers'] return channel_data if not use_fallback: return None # If that failed, it means the channel is offline # Ge the channel data from here instead res = utils.http_request("https://api.twitch.tv/kraken/channels/%s" % username) channel_data = json.loads(res) channel_data['live'] = False return channel_data
def __zadara_request__(self, endpoint, isPost=False): if isPost: #Don't return anything, because this just hangs if you do... utils.http_request("https://manage.zadarastorage.com", endpoint, special_request="ZADARA_POST", content_type="application/json") else: return json.loads( utils.http_request("https://manage.zadarastorage.com", endpoint, content_type="application/json"))
def get_dataset_id(create_dataset_url, headers, dataset_title, schema_id, data): """ Get the datasetId by making a POST Request to "/data/foundation/catalog/datasets?requestDataSource=true" :param create_dataset_url: url :param headers: headers :param dataset_title: dataset title :param schema_id: schema url :param data: post request data :return: dataset id """ # Set the title and description data['name'] = dataset_title data['description'] = dataset_title query_url = '{}?name={}'.format(create_dataset_url, dataset_title) response = http_request('get', query_url, headers) results = json.loads(response) for dataset_id in results.keys(): LOGGER.debug('Existing %s ID = %s', dataset_title, dataset_id) return dataset_id # Set the schema id data["schemaRef"]["id"] = schema_id headers["Content-type"] = CONTENT_TYPE headers["Accept"] = CONTENT_TYPE res_text = http_request("post", create_dataset_url, headers, json.dumps(data)) dataset_response = str(json.loads(res_text)) LOGGER.debug("dataset_response is %s", dataset_response) dataset_id = dataset_response.split("@/dataSets/")[1].split("'")[0] LOGGER.debug("dataset_id = %s", dataset_id) return dataset_id
def vk_method(self, method, params = {}, post = False): # c57458088a0fe802c56a15e004c54f99a9cc56ac56a35e26d70d459c8ada8da # http://api.vk.com/api.php?v=3.0&api_id=1901988&method=getProfiles&format=json&rnd=343&uids=100172&fields=photo%2Csex&sid=10180116c4fd93480439bca47d636d6dd75fac30b851d4312e82ec3523&sig=5be698cf7fa09d30f58b941a4aea0e9b for key, param in params.iteritems(): try: params[key] = param.encode('utf-8') except: pass if post: url = "http://api.vk.com/api.php" # params['method'] = method params['api_id'] = self.app_id params['format'] = 'json' params['access_token'] = self.get_token() url = "https://api.vkontakte.ru/method/{method}".format(method = method) response = http_request(url, post = params, cookies_file = self.cookies_file) else: url = "https://api.vkontakte.ru/method/{method}?access_token={token}&{param_str}" url = url.format(method = method, token = self.get_token(), param_str = url_params(params)) response = http_request(url) try: json_data = json.loads(response) if json_data.has_key('response'): return json_data['response'] return json_data except json.decoder.JSONDecodeError: print "Json error: %s"%response return response
def _deploy_app(): if _is_sanity_dep_exist(should_fail=True): return dep_inputs = { 'server_ip': manager_ip, 'agent_user': manager_user, 'agent_private_key_path': manager_remote_key_path } data = {'blueprint_id': BLUEPRINT_ID, 'inputs': dep_inputs} headers = utils.create_maintenance_headers() headers.update({'content-type': 'application/json'}) utils.http_request('{0}/deployments/{1}'.format(_get_url_prefix(), DEPLOYMENT_ID), data=json.dumps(data), headers=headers) # Waiting for create deployment env to end utils.repetitive(utils.wait_for_workflow, deployment_id=DEPLOYMENT_ID, workflow_id='create_deployment_environment', url_prefix=_get_url_prefix(), timeout=60, timeout_msg='Timed out while waiting for ' 'deployment {0} to be created'.format(DEPLOYMENT_ID))
def close_batch(create_batch_url, headers, batch_id): """ Close the batch by making a POST request to "/data/foundation/import/batches" :param create_batch_url: url :param headers: headers :param batch_id: batch id """ close_batch__url = create_batch_url + "/" + batch_id + "?action=COMPLETE" http_request("post", close_batch__url, headers)
def request_list(hostname, out_format): entity = utils.ENTITY_EXTERNAL_ENVIROMENT instant = 1 json = '{"entity":%d, "instant":%d}' % (entity, instant) url = 'http://%s/api/v1/hwmon/get/device/maxtemp' % hostname utils.response_output(out_format, utils.http_request(url, json)) json = '{"entity":%d, "instant":%d}' % (entity, instant) url = 'http://%s/api/v1/hwmon/get/device/abstemp' % hostname utils.response_output(out_format, utils.http_request(url, json))
def request_list(hostname, out_format): entity = utils.ENTITY_PROCESSOR instant = 1 json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 165.0) url='http://%s/api/v1/hwmon/set/device/maxpower' % hostname utils.response_output(out_format, utils.http_request(url, json)) json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 65.0) url='http://%s/api/v1/hwmon/set/device/averagepower' % hostname utils.response_output(out_format, utils.http_request(url, json))
def request_list(hostname, out_format): json = '{"entity":-1, "instant":-1}' url = 'http://%s/api/v1/hwmon/get/map/alldevicefan' % hostname utils.response_output(out_format, utils.http_request(url, json)) url = 'http://%s/api/v1/hwmon/get/map/allfandutyout' % hostname utils.response_output(out_format, utils.http_request(url, json)) url = 'http://%s/api/v1/hwmon/get/map/allexpectduty' % hostname utils.response_output(out_format, utils.http_request(url, json))
def request_list(hostname, out_format): entity = utils.ENTITY_AIC instant = 1 json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 100.0) url='http://%s/api/v1/hwmon/set/device/maxtemp' % hostname utils.response_output(out_format, utils.http_request(url, json)) json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 52.12) url='http://%s/api/v1/hwmon/set/device/abstemp' % hostname utils.response_output(out_format, utils.http_request(url, json))
def request_list(hostname, out_format): entity = utils.ENTITY_CPU instant = 1 json = '{"entity":%d, "instant":%d, "value":{"maxtdp":%d, "cores":%d, "id":%d}}' % ( entity, instant, 165, 64, 0x8086) url = 'http://%s/api/v1/hwmon/set/device/cpu/info' % hostname utils.response_output(out_format, utils.http_request(url, json)) instant = 2 json = '{"entity":%d, "instant":%d, "value":{"maxtdp":%d, "cores":%d, "id":%d}}' % ( entity, instant, 165, 64, 0x8086) url = 'http://%s/api/v1/hwmon/set/device/cpu/info' % hostname utils.response_output(out_format, utils.http_request(url, json))
def dump_upgrade_data(): if os.path.exists(DUMP_SUCCESS_FLAG): return endpoint = _get_es_install_endpoint() port = _get_es_install_port() storage_endpoint = 'http://{0}:{1}/cloudify_storage'.format(endpoint, port) types = ['provider_context', 'snapshot'] ctx.logger.info('Dumping upgrade data: {0}'.format(types)) type_values = [] for _type in types: res = utils.http_request('{0}/_search?q=_type:{1}&size=10000'.format( storage_endpoint, _type), method='GET') if not res.code == 200: ctx.abort_operation('Failed fetching type {0} from ' 'cloudify_storage index'.format(_type)) body = res.read() hits = json.loads(body)['hits']['hits'] for hit in hits: type_values.append(hit) utils.mkdir(utils.ES_UPGRADE_DUMP_PATH, use_sudo=False) with open(DUMP_FILE_PATH, 'w') as f: for item in type_values: f.write(json.dumps(item) + os.linesep) # marker file to indicate dump has succeeded with open(DUMP_SUCCESS_FLAG, 'w') as f: f.write('success')
def get_display_name(nick): try: data = utils.http_request("https://api.twitch.tv/kraken/users/%s" % nick) data = json.loads(data) return data['display_name'] except: return nick
def POST(self): _query = web.input( lang='', lang_alias='', exec_type='', code='', inputs='', args='') _headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36", "Accept": "*/*", "Accept-Language": "en-US,en;q=0.8", "X-Forwarded-For": "61.51.%d.%d" % (randrange(10,245),randrange(10,245)), "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Origin": "http://www.compileonline.com", "Referer": "http://www.compileonline.com/execute_%s_online.php" % _query.lang, "X-Requested-With": "XMLHttpRequest" } if _query.lang_alias == "": _query.lang_alias = _query.lang if _query.exec_type == "": _query.exec_type = "compile" _data = { "lang": _query.lang_alias, "code": _query.code, "header": "", "support": "", "util": "", "inputs": _query.inputs, "args": _query.args, "stdinput": _query.stdinput } try: _res = utils.http_request( "http://www.compileonline.com/%s_new.php" % _query.exec_type, _data, _headers) return _res.read() except: return format_exc()
def get_access_token(ims_host, ims_endpoint_jwt, org_id, tech_acct, api_key, client_secret, priv_key): """ :param ims_host: ims host :param ims_endpoint_jwt: endpoint for exchange jwt :param org_id: org id :param tech_acct: technical account ID (obtained from Adobe IO integration) :param api_key: api key (obtained from Adobe IO integration) :param client_secret: client secret (obtained from Adobe IO integration) :param priv_key : private key :return: access token for the apis """ url = "https://" + ims_host + ims_endpoint_jwt headers = { "Content-Type": "application/x-www-form-urlencoded", "Cache-Control": "no-cache" } body_credentials = { "client_id": api_key, "client_secret": client_secret, "jwt_token": get_jwt_token(ims_host, org_id, tech_acct, api_key, priv_key) } body = urlencode(body_credentials) # send http post request res_text = http_request("post", url, headers, body) access_token = json.loads(res_text)["access_token"] LOGGER.debug("access_token: %s", access_token) return access_token
def _assert_deployment_monitoring_data_exists(): influx_props = utils.ctx_factory.get('influxdb') influx_host_ip = influx_props.get('influxdb_endpoint_ip') if influx_host_ip == '': influx_host_ip = 'localhost' influx_user = '******' influx_pass = '******' query = 'select * from /^{0}\./i ' \ 'where time > now() - 5s'.format(DEPLOYMENT_ID) params = urllib.urlencode(dict(u=influx_user, p=influx_pass, q=query)) endpoint = 'http://{0}:8086/db/cloudify/series'.format(influx_host_ip) url = endpoint + '?' + params resp = utils.http_request(url, method='GET', timeout=30) if not resp: ctx.abort_operation("Can't connect to influxdb") if resp.code != 200: ctx.abort_operation('Received invalid response from the ' 'monitoring service: {0}'.format(resp.reason)) resp_content = resp.readlines() json_resp = json.loads(resp_content[0]) if not json_resp: ctx.abort_operation('No monitoring data received')
def push(git_url, username=None, password=None): if username is None: username = input("Username: "******"everything up-to-date.") return remote_sha1, None print(f"updating remote master from {remote_sha1 or 'no commit'} to " f"{local_sha1} ({len(missing)} objects)") head = (f"{remote_sha1 or '0' * 40} {local_sha1} " f"refs/heads/master\x00 report-status").encode() lines = [head] data = build_lines(lines) + create_pack(missing) url = git_url + "/git-receive-pack" response = http_request(url, username, password, data=data) lines = extract_lines(response) assert len(lines) > 1, f"expect at least 2 lines, got {len(lines)}" assert lines[0] == b"unpack ok\n", \ f"expected line 1 b'unpack ok', got: {lines[0]}" assert lines[1] == b"ok refs/heads/master\n", \ f"expected line 2 b'ok, refs/heads/master\n', got {lines[1]}'" return remote_sha1, missing
def _install_sanity_app(): data = { 'deployment_id': DEPLOYMENT_ID, 'workflow_id': 'install' } headers = utils.create_maintenance_headers() headers.update({'content-type': 'application/json'}) resp = utils.http_request( '{0}/executions'.format(_get_url_prefix()), method='POST', data=json.dumps(data), headers=headers) # Waiting for installation to complete utils.repetitive( utils.wait_for_workflow, timeout=5*60, interval=30, deployment_id=DEPLOYMENT_ID, workflow_id='install', url_prefix=_get_url_prefix(), timeout_msg='Timed out while waiting for ' 'deployment {0} to install'.format(DEPLOYMENT_ID)) resp_content = resp.readlines() json_resp = json.loads(resp_content[0]) return json_resp['id']
def dump_upgrade_data(): if os.path.exists(DUMP_SUCCESS_FLAG): return endpoint = _get_es_install_endpoint() port = _get_es_install_port() storage_endpoint = 'http://{0}:{1}/cloudify_storage'.format(endpoint, port) types = ['provider_context', 'snapshot'] ctx.logger.info('Dumping upgrade data: {0}'.format(types)) type_values = [] for _type in types: res = utils.http_request('{0}/_search?q=_type:{1}&size=10000' .format(storage_endpoint, _type), method='GET') if not res.code == 200: ctx.abort_operation('Failed fetching type {0} from ' 'cloudify_storage index'.format(_type)) body = res.read() hits = json.loads(body)['hits']['hits'] for hit in hits: type_values.append(hit) utils.mkdir(utils.ES_UPGRADE_DUMP_PATH, use_sudo=False) with open(DUMP_FILE_PATH, 'w') as f: for item in type_values: f.write(json.dumps(item) + os.linesep) # marker file to indicate dump has succeeded with open(DUMP_SUCCESS_FLAG, 'w') as f: f.write('success')
def viewers(lrrbot, conn, event, respond_to): """ Command: !viewers Post the number of viewers currently watching the stream """ stream_info = twitch.get_info() if stream_info: viewers = stream_info.get("viewers") else: viewers = None # Since we're using TWITCHCLIENT 3, we don't get join/part messages, so we can't just use # len(lrrbot.channels["#loadingreadyrun"].userdict) # as that dict won't be populated. Need to call this api instead. chatters = utils.http_request( "http://tmi.twitch.tv/group/user/%s/chatters" % config["channel"]) chatters = json.loads(chatters).get("chatter_count") if viewers is not None: viewers = "%d %s viewing the stream." % (viewers, "user" if viewers == 1 else "users") else: viewers = "Stream is not live." if chatters is not None: chatters = "%d %s in the chat." % (chatters, "user" if chatters == 1 else "users") else: chatters = "No-one in the chat." conn.privmsg(respond_to, "%s %s" % (viewers, chatters))
def get_timeline(user, no_replies = True, prefix = None): format = "json" url = 'https://api.twitter.com/1/statuses/user_timeline.%s?screen_name=%s&include_rts=1'%(format, user); response = http_request(url) try: full_timeline = json.loads(response) except json.decoder.JSONDecodeError: print "Json error for: %s"%response return tweets = {} for one in full_timeline: tw_id = one['id'] text = one['text'].strip() if no_replies: if one['in_reply_to_user_id']: continue if '@' in text[0:2]: continue if one.get('retweeted_status', None): retweeted = one['retweeted_status'] text = u"RT @{0}: {1}".format(retweeted['user']['screen_name'], retweeted['text']) if prefix: text = "%s %s"%(prefix, text) tweets[tw_id] = text return tweets
def get_schema_id(create_schema_url, headers, schema_title, class_id, mixin_id, data): """ Get the schemaId by making a POST REQUEST to "/data/foundation/schemaregistry/tenant/schemas" :param create_schema_url: url :param headers: headers :param schema_title: schema title :param class_id: class_url :param mixin_id: mixin_url :param data: post request data :return: schema_url """ # Set the title and description data['title'] = schema_title data['description'] = schema_title schema_id = get_id_if_entity_exists(create_schema_url, headers, schema_title) if schema_id is None: # Set the mixin id data['meta:extends'][0] = mixin_id data["allOf"][0]["$ref"] = mixin_id # Set the class id data['meta:extends'][1] = class_id data["allOf"][1]["$ref"] = class_id headers["Content-type"] = CONTENT_TYPE res_text = http_request("post", create_schema_url, headers, json.dumps(data)) schema_id = json.loads(res_text)["$id"] LOGGER.debug("Created schema_id = %s", schema_id) return schema_id
def _assert_deployment_monitoring_data_exists(): influx_props = utils.ctx_factory.get('influxdb') influx_host_ip = influx_props.get('influxdb_endpoint_ip') if influx_host_ip == '': influx_host_ip = 'localhost' influx_user = '******' influx_pass = '******' query = 'select * from /^{0}\./i ' \ 'where time > now() - 5s'.format(DEPLOYMENT_ID) params = urllib.urlencode( dict(u=influx_user, p=influx_pass, q=query)) endpoint = 'http://{0}:8086/db/cloudify/series'.format(influx_host_ip) url = endpoint + '?' + params resp = utils.http_request(url, method='GET', timeout=30) if not resp: ctx.abort_operation("Can't connect to influxdb") if resp.code != 200: ctx.abort_operation('Received invalid response from the ' 'monitoring service: {0}'.format(resp.reason)) resp_content = resp.readlines() json_resp = json.loads(resp_content[0]) if not json_resp: ctx.abort_operation('No monitoring data received')
def viewers(lrrbot, conn, event, respond_to): """ Command: !viewers Post the number of viewers currently watching the stream """ stream_info = twitch.get_info() if stream_info: viewers = stream_info.get("viewers") else: viewers = None # Since we're using TWITCHCLIENT 3, we don't get join/part messages, so we can't just use # len(lrrbot.channels["#loadingreadyrun"].userdict) # as that dict won't be populated. Need to call this api instead. chatters = utils.http_request("http://tmi.twitch.tv/group/user/%s/chatters" % config["channel"]) chatters = json.loads(chatters).get("chatter_count") if viewers is not None: viewers = "%d %s viewing the stream." % (viewers, "user" if viewers == 1 else "users") else: viewers = "Stream is not live." if chatters is not None: chatters = "%d %s in the chat." % (chatters, "user" if chatters == 1 else "users") else: chatters = "No-one in the chat." conn.privmsg(respond_to, "%s %s" % (viewers, chatters))
def _upload_app_blueprint(app_tar): if _is_sanity_blueprint_exist(should_fail=True): return with open(app_tar, 'rb') as f: app_data = f.read() length = os.path.getsize(app_tar) headers = utils.create_maintenance_headers() headers['Content-Length'] = length headers['Content-Type'] = 'application/octet-stream' params = urllib.urlencode( dict(application_file_name='singlehost-blueprint.yaml')) endpoint = '{0}/blueprints/{1}'.format(_get_url_prefix(), BLUEPRINT_ID) url = endpoint + '?' + params utils.http_request(url, data=app_data, headers=headers)
def upload_file(create_batch_url, headers, file_with_tenant_id, dataset_id, batch_id): """ Upload the data file to a batch of the dataset :param create_batch_url: url :param headers: headers :param file_with_tenant_id: file name :param dataset_id: dataset id :param batch_id: batch id """ headers["Content-type"] = "application/octet-stream" headers["Connection"] = "keep-alive" contents = open(FILE_PATH + file_with_tenant_id, "rb").read() upload_url = create_batch_url + "/" + batch_id + "/datasets/" + dataset_id + "/files/data/" + file_with_tenant_id LOGGER.info("Upload url is %s", upload_url) http_request("put", upload_url, headers, contents) LOGGER.info("Upload file success")
def get_id_if_entity_exists(url, headers, title): headers = copy.deepcopy(headers) headers['Accept'] = ACCEPT_HEADER response = http_request('get', url, headers) results = json.loads(response)['results'] for entity in results: if entity['title'] == title: LOGGER.debug('Existing %s ID = %s', title, entity['$id']) return entity['$id']
def _assert_webserver_running(): resp = utils.http_request('http://localhost:8080', method='GET', timeout=10) if not resp: ctx.abort_operation("Can't connect to webserver") if resp.code != 200: ctx.abort_operation('Sanity app webserver failed to start')
def post(self, tinkerbell): response = utils.http_request(tinkerbell, str(self), "POST") if response: utils.log(info="Posted component to tinkerbell", body=response.read()) return True return False
def _is_sanity_blueprint_exist(should_fail=False): headers = utils.create_maintenance_headers() res = utils.http_request('{0}/blueprints/{1}'.format( _get_url_prefix(), BLUEPRINT_ID), method='GET', headers=headers, should_fail=should_fail) if not res: return False return res.code == 200
def _assert_webserver_running(): resp = utils.http_request( 'http://localhost:8080', method='GET', timeout=10) if not resp: ctx.abort_operation("Can't connect to webserver") if resp.code != 200: ctx.abort_operation('Sanity app webserver failed to start')
def _is_sanity_blueprint_exist(should_fail=False): headers = utils.create_maintenance_headers() res = utils.http_request( '{0}/blueprints/{1}'.format(_get_url_prefix(), BLUEPRINT_ID), method='GET', headers=headers, should_fail=should_fail) if not res: return False return res.code == 200
def post_all(cls, components, tinkerbell): components_json = json.dumps( {"components": [c.post_dict() for c in components]}) response = utils.http_request(tinkerbell, components_json, "POST") if response: utils.log(info="Posted components to tinkerbell", body=response.read()) return True return False
def fetch_new_token_for_app(self, app_id): # open(self.cookies_file, 'w').close() html = self.get_auth_html(app_id) # Получаем форму авторизации # print html if not "Login success" in html: form_action_url = self.parse_form_action(html) # Адрес куда отправлять данные авторизации form_data = self.fill_auth_data( self.parse_form_data(html) ) # Заполненные данные формы логина sleep(3) confirm_form = http_request(form_action_url, post = form_data, cookies_file = self.cookies_file) form_action_url = self.parse_form_action(confirm_form) sleep(2) response = http_request(form_action_url, read_headers = True, cookies_file = self.cookies_file) else: response = html self.app_token, self.user_id = self.parse_token_and_user_id(response) # open(self.cookies_file, 'w').close() if self.debug: print "caching token: %s"%self.app_token self.mc.set(self.mc_key_for_token(app_id, self.login), self.app_token, 60*60*12) return self.app_token
def request_list(hostname, out_format): entity = utils.ENTITY_PROCESSOR instant = 1 json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 0.0) url = 'http://%s/api/v1/hwmon/set/device/maxtemp' % hostname utils.response_output(out_format, utils.http_request(url, json)) json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, -20.0) url = 'http://%s/api/v1/hwmon/set/device/reltemp' % hostname utils.response_output(out_format, utils.http_request(url, json)) instant = 2 json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, 0.0) url = 'http://%s/api/v1/hwmon/set/device/maxtemp' % hostname utils.response_output(out_format, utils.http_request(url, json)) json = '{"entity":%d, "instant":%d, "value":%f}' % (entity, instant, -10.0) url = 'http://%s/api/v1/hwmon/set/device/reltemp' % hostname utils.response_output(out_format, utils.http_request(url, json))
def get_api_data(url, timeout=DEFAULT_TIMEOUT, max_response=MAX_RESPONSE): try: response = http_request(url, timeout=timeout, max_response=max_response, json=True) return response except HttpError as error: domain = urllib.parse.urlparse(url).netloc log.error('Error making request to {} API ({}): {}'.format( domain, error.type, error.message)) return None
def get_tenant_id(tenant_id_url, headers): """ Get TenantId by making a GET call to "/data/foundation/schemaregistry/stats" :param tenant_id_url - url :param headers - headers :return - tenant id """ res_text = http_request("get", tenant_id_url, headers) tenant_id = "_" + json.loads(res_text)["tenantId"] LOGGER.debug("tenant_id = %s", tenant_id) return tenant_id
def _uninstall_sanity_app(): if not _is_sanity_dep_exist(): return data = {'deployment_id': DEPLOYMENT_ID, 'workflow_id': 'uninstall'} headers = utils.create_maintenance_headers() headers.update({'content-type': 'application/json'}) utils.http_request('{0}/executions'.format(_get_url_prefix()), method='POST', data=json.dumps(data), headers=headers) # Waiting for installation to complete utils.repetitive(utils.wait_for_workflow, timeout=5 * 60, interval=30, deployment_id=DEPLOYMENT_ID, workflow_id='uninstall', url_prefix=_get_url_prefix(), timeout_msg='Timed out while waiting for ' 'deployment {0} to uninstall.'.format(DEPLOYMENT_ID))
def _delete_sanity_blueprint(): if not _is_sanity_blueprint_exist(): return headers = utils.create_maintenance_headers() resp = utils.http_request('{0}/blueprints/{1}'.format( _get_url_prefix(), BLUEPRINT_ID), method='DELETE', headers=headers) if resp.code != 200: ctx.abort_operation('Failed deleting ' 'deployment {0}: {1}'.format( BLUEPRINT_ID, resp.reason))
def restore_upgrade_data(es_endpoint_ip, es_endpoint_port): bulk_endpoint = 'http://{0}:{1}/_bulk'.format(es_endpoint_ip, es_endpoint_port) all_data = '' with open(DUMP_FILE_PATH) as f: for line in f: element = _update_element_if_required(json.loads(line)) all_data += _create_element_request(element) ctx.logger.info('Restoring elasticsearch data') res = utils.http_request(url=bulk_endpoint, data=all_data, method='POST') if not res.code == 200: ctx.abort_operation('Failed restoring elasticsearch data.') ctx.logger.info('Elasticsearch data was successfully restored')
def get_auth_html(self, appid): params = { 'client_id': appid, 'scope': self.scope, 'redirect_uri': 'http://api.vkontakte.ru/blank.html', 'display': 'wap', 'response_type': 'token' } param_str = url_params(params) # url = "http://oauth.vk.com/authorize?%s"%param_str url = "http://api.vk.com/oauth/authorize?%s"%param_str html = http_request(url, cookies_file = self.cookies_file, read_headers = True) return html
def _configure_elasticsearch(host, port): ctx.logger.info('Configuring Elasticsearch storage index...') storage_endpoint = 'http://{0}:{1}/cloudify_storage/'.format(host, port) storage_settings = json.dumps({ "settings": { "analysis": { "analyzer": { "default": { "tokenizer": "whitespace" } } } } }) ctx.logger.debug('Deleting `cloudify_storage` index if exists...') if utils.http_request(storage_endpoint, method='GET'): utils.http_request(storage_endpoint, method='DELETE') ctx.logger.debug('Creating `cloudify_storage` index...') utils.http_request(storage_endpoint, storage_settings, 'PUT')
def _delete_sanity_blueprint(): if not _is_sanity_blueprint_exist(): return headers = utils.create_maintenance_headers() resp = utils.http_request( '{0}/blueprints/{1}'.format(_get_url_prefix(), BLUEPRINT_ID), method='DELETE', headers=headers) if resp.code != 200: ctx.abort_operation('Failed deleting ' 'deployment {0}: {1}'.format(BLUEPRINT_ID, resp.reason))
def has_successful_batch(dataset_url, headers, input_dataset_id): """ :param dataset_url: dataset url :param headers: headers :param input_dataset_id: dataset id :return: True or False """ check_url = '{}/{}'.format(dataset_url, input_dataset_id) response = http_request('get', check_url, headers) dataset = json.loads(response)[input_dataset_id] if 'lastSuccessfulBatch' in dataset: LOGGER.debug('Input dataset has previous successful batch') return True return False
def get_twitch_emotes(): data = utils.http_request("https://api.twitch.tv/kraken/chat/emoticons") data = json.loads(data)['emoticons'] emotesets = {} for emote in data: emote['regex'] = emote['regex'].replace(r"\<\;", "<").replace(r"\>\;", ">").replace(r"\"\;", '"').replace(r"\&\;", "&") regex = re.compile("(%s)" % emote['regex']) for image in emote['images']: html = '<img src="%s" width="%d" height="%d" alt="{0}" title="{0}">' % (image['url'], image['width'], image['height']) emotesets.setdefault(image.get("emoticon_set"), {})[emote['regex']] = { "regex": regex, "html": html, } return emotesets
def api_request(uri, *args, **kwargs): # Send the information to the server try: res = utils.http_request(config['siteurl'] + uri, *args, **kwargs) except: log.exception("Error at server in %s" % uri) else: try: res = json.loads(res) except: log.exception("Error parsing server response from %s: %s" % (uri, res)) else: if 'success' not in res: log.error("Error at server in %s" % uri)
def _assert_logs_and_events(execution_id): headers = utils.create_maintenance_headers() params = urllib.urlencode( dict(execution_id=execution_id, type='cloudify_log')) endpoint = '{0}/events'.format(_get_url_prefix()) url = endpoint + '?' + params resp = utils.http_request(url, method='GET', headers=headers, timeout=30) if not resp: ctx.abort_operation("Can't connect to elasticsearch") if resp.code != 200: ctx.abort_operation('Failed to retrieve logs/events') resp_content = resp.readlines() json_resp = json.loads(resp_content[0]) if 'items' not in json_resp or not json_resp['items']: ctx.abort_operation('No logs/events received')
def _assert_logs_and_events(execution_id): headers = utils.create_maintenance_headers() params = urllib.urlencode(( ('execution_id', execution_id), ('type', 'cloudify_event'), ('type', 'cloudify_log'), ('_sort', '@timestamp'), ('_size', 100), ('_offset', 0), )) endpoint = '{0}/events'.format(_get_url_prefix()) url = endpoint + '?' + params ctx.logger.debug('Sending request to url: {0}, with the following ' 'headers: {1}'.format(url, headers)) resp = utils.http_request(url, method='GET', headers=headers, timeout=30) if not resp: ctx.abort_operation("Can't connect to Cloudify's rest service") if resp.code != 200: ctx.abort_operation('Failed to retrieve logs/events') resp_content = resp.readlines() json_resp = json.loads(resp_content[0]) if 'items' not in json_resp or not json_resp['items']: ctx.logger.debug('No items received. The response is: ' '{0}'.format(json_resp)) ctx.abort_operation('No logs/events received') db_name = 'cloudify_db' for table_name in ['logs', 'events']: proc = utils.run([ 'sudo', '-u', 'postgres', 'psql', db_name, '-t', '-c', 'SELECT COUNT(*) FROM {0}'.format(table_name), ]) count = int(proc.aggr_stdout) if count <= 0: ctx.abort_operation( 'Failed to retrieve {0} from PostgreSQL'.format(table_name))
def get_upcoming_events(calendar, after=None): """ Get the next several events from the calendar. Will include the currently-happening events (if any) and a number of following events. Results are cached, so we get more events than we should need, so that if the first few events become irrelevant by the time the cache expires, we still have the data we need. (Technically, the API quota limits allow us to get the events, for both calendars, every 1.7 seconds... but still, caching on principle.) The "after" parameter allows overriding the reference time, for testing purposes. """ if after is None: after = datetime.datetime.now(datetime.timezone.utc) url = EVENTS_URL % urllib.parse.quote(calendar) data = { "maxResults": EVENT_COUNT, "orderBy": "startTime", "singleEvents": "true", "timeMin": after.strftime(DATE_FORMAT), "timeZone": config['timezone'].zone, "key": config['google_key'], } res = utils.http_request(url, data) res = json.loads(res) if 'error' in res: raise Exception(res['error']['message']) formatted_items = [] for item in res['items']: formatted_items.append({ "id": item['id'], "url": item['htmlLink'], "title": item['summary'], "creator": item['creator']['displayName'], "start": dateutil.parser.parse(item['start']['dateTime']), "end": dateutil.parser.parse(item['end']['dateTime']), "location": item.get('location'), }) return formatted_items
def main(): es_endpoint_ip = ctx_properties['es_endpoint_ip'] es_endpoint_port = ctx_properties['es_endpoint_port'] if utils.is_upgrade or utils.is_rollback: # 'provider_context' and 'snapshot' elements will be migrated to the # future version es_upgrade_utils.dump_upgrade_data() if not es_endpoint_ip: es_endpoint_ip = ctx.instance.host_ip _install_elasticsearch() utils.systemd.restart(ES_SERVICE_NAME, append_prefix=False) utils.wait_for_port(es_endpoint_port, es_endpoint_ip) _configure_elasticsearch(host=es_endpoint_ip, port=es_endpoint_port) _wait_for_shards(es_endpoint_port, es_endpoint_ip) utils.clean_var_log_dir('elasticsearch') else: ctx.logger.info('External Elasticsearch Endpoint provided: ' '{0}:{1}...'.format(es_endpoint_ip, es_endpoint_port)) time.sleep(5) utils.wait_for_port(es_endpoint_port, es_endpoint_ip) ctx.logger.info('Checking if \'cloudify_storage\' ' 'index already exists...') if utils.http_request('http://{0}:{1}/cloudify_storage'.format( es_endpoint_ip, es_endpoint_port), method='HEAD').code == 200: ctx.abort_operation('\'cloudify_storage\' index already exists on ' '{0}, terminating bootstrap...'.format( es_endpoint_ip)) _configure_elasticsearch(host=es_endpoint_ip, port=es_endpoint_port) if not es_endpoint_port: utils.systemd.stop(ES_SERVICE_NAME, append_prefix=False) ctx.instance.runtime_properties['es_endpoint_ip'] = es_endpoint_ip
def get_game(name, all=False): """ Get the game information for a particular game. For response object structure, see: https://github.com/justintv/Twitch-API/blob/master/v3_resources/search.md#example-response-1 May throw exceptions on network/Twitch error. """ search_opts = { 'query': name, 'type': 'suggest', 'live': 'false', } res = utils.http_request("https://api.twitch.tv/kraken/search/games", search_opts) res = json.loads(res) if all: return res['games'] else: for game in res['games']: if game['name'] == name: return game return None
def _configure_elasticsearch(host, port): storage_endpoint = 'http://{0}:{1}/cloudify_storage/'.format(host, port) storage_settings = json.dumps({ "settings": { "analysis": { "analyzer": { "default": {"tokenizer": "whitespace"} } } } }) ctx.logger.info('Deleting `cloudify_storage` index if exists...') if utils.http_request(storage_endpoint, method='GET'): utils.http_request(storage_endpoint, method='DELETE') ctx.logger.info('Creating `cloudify_storage` index...') utils.http_request(storage_endpoint, storage_settings, 'PUT') blueprint_mapping_endpoint = storage_endpoint + 'blueprint/_mapping' blueprint_mapping = json.dumps({ "blueprint": { "properties": { "plan": {"enabled": False} } } }) ctx.logger.info('Declaring blueprint mapping...') utils.http_request(blueprint_mapping_endpoint, blueprint_mapping, 'PUT') deployment_mapping_endpoint = storage_endpoint + 'deployment/_mapping' deployment_mapping = json.dumps({ "deployment": { "properties": { "workflows": {"enabled": False}, "inputs": {"enabled": False}, "policy_type": {"enabled": False}, "policy_triggers": {"enabled": False}, "groups": {"enabled": False}, "outputs": {"enabled": False} } } }) ctx.logger.info('Declaring deployment mapping...') utils.http_request(deployment_mapping_endpoint, deployment_mapping, 'PUT') execution_mapping_endpoint = storage_endpoint + 'execution/_mapping' execution_mapping = json.dumps({ "execution": { "properties": { "parameters": {"enabled": False} } } }) ctx.logger.info('Declaring execution mapping...') utils.http_request(execution_mapping_endpoint, execution_mapping, 'PUT') node_mapping_endpoint = storage_endpoint + 'node/_mapping' node_mapping = json.dumps({ "node": { "_id": {"path": "id"}, "properties": { "types": {"type": "string", "index_name": "type"}, "properties": {"enabled": False}, "operations": {"enabled": False}, "relationships": {"enabled": False} } } }) ctx.logger.info('Declaring node mapping...') utils.http_request(node_mapping_endpoint, node_mapping, 'PUT') node_instance_mapping_endpoint = \ storage_endpoint + 'node_instance/_mapping' node_instance_mapping = json.dumps({ "node_instance": { "_id": {"path": "id"}, "properties": { "runtime_properties": {"enabled": False} } } }) ctx.logger.info('Declaring node instance mapping...') utils.http_request(node_instance_mapping_endpoint, node_instance_mapping, 'PUT') deployment_modification_mapping_endpoint = \ storage_endpoint + 'deployment_modification/_mapping' deployment_modification_mapping = json.dumps({ "deployment_modification": { "_id": {"path": "id"}, "properties": { "modified_nodes": {"enabled": False}, "node_instances": {"enabled": False}, "context": {"enabled": False} } } }) ctx.logger.info('Declaring deployment modification mapping...') utils.http_request( deployment_modification_mapping_endpoint, deployment_modification_mapping, 'PUT') deployment_update_mapping_endpoint = \ storage_endpoint + 'deployment_update/_mapping' deployment_update_mapping = json.dumps({ "deployment_update": { "_id": {"path": "id"}, "properties": { "deployment_update_nodes": {"enabled": False}, "deployment_update_node_instances": {"enabled": False}, "deployment_update_deployment": {"enabled": False}, "deployment_plan": {"enabled": False} } } }) ctx.logger.info('Declaring deployment update mapping...') utils.http_request( deployment_update_mapping_endpoint, deployment_update_mapping, 'PUT')
def get_calendar_data(): ical = utils.http_request(URL) return icalendar.Calendar.from_ical(ical)