def firstDeleteRemoteContainers(remoteHost): localurl = 'http://127.0.0.1:4433/containers/json' response = requests.get(localurl) for item in response.json(): for name in item["Names"]: remoteurl = 'http://{0}:4433/containers/{1}?v=1&f=1'.format(remoteHost, name) requests.delete(remoteurl)
def loadDatasetToMLDB(cls): dataset_name = "null_column_test" cls.url = "http://localhost:%d/v1" % cls.port cls.dataset_url = cls.url + "/datasets/" + dataset_name requests.delete(cls.dataset_url) # Register the dataset data = json.dumps({ "type": "sparse.mutable", "id": dataset_name, }) requests.post(cls.url + "/datasets", data=data) # Fill the data # ___________________ # | | col1 | col2 | # ------------------- # | r1 | 1 | | # ------------------- # | r2 | 1 | 2 | # ------------------- ts = datetime.datetime.now().isoformat(' ') cols = [['col1', 1, ts]] data = {"rowName": "r1", "columns": cols} requests.post(cls.dataset_url + "/rows", json.dumps(data)) cols = [['col1', 1, ts], ['col2', 2, ts]] data = {"rowName": "r2", "columns": cols} requests.post(cls.dataset_url + "/rows", json.dumps(data)) # Commit the dataset requests.post(cls.dataset_url + "/commit")
def delete_object(self, type, id): self._check_type(type) url = self.get_url_by_type_and_id(type, id) requests.delete(url) _object_to_mark_as_deleted = ObjectHolder(type, id) _found_object = filter(lambda x: x == _object_to_mark_as_deleted, self._objects) next(_found_object).mark_as_deleted()
def remove_units(self, units, app_name): requests.delete( "{0}/apps/{1}/units".format(settings.TSURU_HOST, app_name), headers=self.authorization, data=str(units) )
def main(argv): try: opts, args = getopt.getopt(argv,"hd:g:",["-d"]) except getopt.GetoptError: print 'No option for disks added, using default of 3 Disks' for opt, arg in opts: if opt == '-d': print arg + ' disks entered' numberOfDisks = int(arg) #run our prebuild pegs = prebuild(numberOfDisks) #print our starting disks prerunpegsize = requests.get(createStackUrl + '/' + str(pegs[0]) + '/size' , auth=(username,password)) preruntopdisk = requests.get(createStackUrl + '/' + str(pegs[0]) + '/peek' , auth=(username,password)) #run hanoi print 'stack ' + str(pegs[0]) + ' has ' + prerunpegsize.text \ + ' disks with the disk ' + preruntopdisk.text + ' on top' hanoi(numberOfDisks, pegs[0], pegs[1], pegs[2]) #print our final disks prerunpegsize = requests.get(createStackUrl + '/' + str(pegs[2]) + '/size' , auth=(username,password)) preruntopdisk = requests.get(createStackUrl + '/' + str(pegs[2]) + '/peek' , auth=(username,password)) #run hanoi print 'stack ' + str(pegs[2]) + ' has ' + prerunpegsize.text \ + ' disks with the disk ' + preruntopdisk.text + ' on top' #do cleanups for i in range(len(pegs)): requests.delete(createStackUrl + '/' + str(pegs[i]) + '/clear' , auth=(username,password))
def delete(self, uri, payload=None, etag=None, content_type="application/json", accept="application/json"): headers = {'content-type': content_type, 'accept': accept} if etag is not None: headers['if-match'] = etag self.logger.debug("DELETE {0}...".format(uri)) self.payload_logger.debug("Headers:") self.payload_logger.debug(json.dumps(headers, indent=2)) if payload != None: self.payload_logger.debug("Payload:") if content_type == 'application/json': self.payload_logger.debug(json.dumps(payload, indent=2)) else: self.payload_logger.debug(payload) if payload is None: self.response = requests.delete(uri, auth=self.auth, headers=headers) else: self.response = requests.delete(uri, json=payload, auth=self.auth, headers=headers) return self._response()
def tearDown(self): """ Delete the server """ headers = {'Content-Type': 'application/json; charset=utf-8'} response = requests.get(__db_url__) value = response.json() if value: db_length = len(value['databases']) last_db_id = value['databases'][db_length-1]['id'] url = 'http://%s:8000/api/1.0/databases/%u/servers/' % \ (__host_or_ip__,last_db_id) response = requests.get(url) value = response.json() if value: server_length = len(value['members']) last_server_id = value['members'][server_length-1]['id'] print "ServerId to be deleted is " + str(last_server_id) url += str(last_server_id) response = requests.delete(url) self.assertEqual(response.status_code, 200) # Delete database db_url = __db_url__ + str(last_db_id) response = requests.delete(db_url) self.assertEqual(response.status_code, 200) else: print "The Server list is empty" else: print "The database list is empty"
def TestGroup(): access_token = login(13635273142) url = URL + "/groups" group = {"master":13635273142,"members":[13635273142], "name":"test", "super":True} headers = {} headers["Authorization"] = "Bearer " + access_token headers["Content-Type"] = "application/json; charset=UTF-8" r = requests.post(url, data=json.dumps(group), headers = headers) assert(r.status_code == 200) obj = json.loads(r.content) group_id = obj["data"]["group_id"] url = URL + "/groups/%s"%str(group_id) r = requests.patch(url, data=json.dumps({"name":"test_new"}), headers = headers) assert(r.status_code == 200) url = URL + "/groups/%s/members"%str(group_id) r = requests.post(url, data=json.dumps({"uid":13635273143}), headers = headers) assert(r.status_code == 200) url = URL + "/groups/%s/members/13635273143"%str(group_id) r = requests.delete(url, headers = headers) assert(r.status_code == 200) url = URL + "/groups/%s"%str(group_id) r = requests.delete(url, headers = headers) print "test group completed"
def absent(name, profile='grafana'): ''' Ensure that a data source is present. name Name of the data source to remove. ''' if isinstance(profile, string_types): profile = __salt__['config.option'](profile) ret = {'result': None, 'comment': None, 'changes': None} datasource = _get_datasource(profile, name) if not datasource: ret['result'] = True ret['comment'] = 'Data source {0} already absent'.format(name) return ret requests.delete( _get_url(profile, datasource['id']), headers=_get_headers(profile), timeout=profile.get('grafana_timeout', 3), ) ret['result'] = True ret['comment'] = 'Data source {0} was deleted'.format(name) return ret
def kill(): args = {} droplets = get(args)['droplets'] for d in droplets: if d['name'] == DROPLET_NAME: print 'Deleting %s (%d)' % (d['name'], d['id']) print requests.delete(API+'/'+str(d['id']), headers=HEADERS)
def tearDownModule(): # remove index created INI_CONFIG = IniParser().read_ini(MY_PUB_INI_FILE) requests.delete(ElasticSettings.url() + '/' + INI_CONFIG['DISEASE']['index']) os.remove(MY_PUB_INI_FILE) if os.path.exists(TEST_DATA_DIR + '/STAGE'): shutil.rmtree(TEST_DATA_DIR + '/STAGE')
def test_collection_user_authorized_documents(suite, collection_level_user, docs): jwt = _login(collection_level_user) headers = _auth_header(jwt) ## # authorized_documents = 'http://localhost:5000/api/authorized-app/authorized-documents' # # delete all documents in a collection confirmed_dangerous_delete = requests.delete(authorized_documents, headers=headers, params={'delete_all': True}) assert confirmed_dangerous_delete.ok # delete all documents in a collection, set delete_all to false and throw an error dangerous_delete = requests.delete(authorized_documents, headers=headers, params={'delete_all': False}) assert not dangerous_delete.ok # add an item to the collection post = requests.post(authorized_documents, headers=headers, data=json.dumps(docs)) assert post.ok # get the first page of the docs added (one) get_all = requests.get(authorized_documents + ';json', headers=headers) assert get_all.ok # delete the doc that we added confirmed_dangerous_delete = requests.delete(authorized_documents, headers=headers, params={'delete_all': True}) assert confirmed_dangerous_delete.ok _logout(jwt)
def publish(bulk, endpoint, rebuild, mapping): # if configured to rebuild_index # Delete and then re-create to dataType index (via PUT request) index_url = endpoint + "/dco" if rebuild: requests.delete(index_url) r = requests.put(index_url) if r.status_code != requests.codes.ok: print(r.url, r.status_code) r.raise_for_status() # push current dataType document mapping mapping_url = endpoint + "/dco/datatype/_mapping" with open(mapping) as mapping_file: r = requests.put(mapping_url, data=mapping_file) if r.status_code != requests.codes.ok: # new mapping may be incompatible with previous # delete current mapping and re-push requests.delete(mapping_url) r = requests.put(mapping_url, data=mapping_file) if r.status_code != requests.codes.ok: print(r.url, r.status_code) r.raise_for_status() # bulk import new dataType documents bulk_import_url = endpoint + "/_bulk" r = requests.post(bulk_import_url, data=bulk) if r.status_code != requests.codes.ok: print(r.url, r.status_code) r.raise_for_status()
def __init__(self, url, index, mappings = None, clean = False): ''' clean: remove already existing index ''' self.url = url # Valid index for elastic self.index = self.safe_index(index) self.index_url = self.url+"/"+self.index self.max_items_bulk = 100 self.wait_bulk_seconds = 2 # time to wait to complete a bulk operation try: r = requests.get(self.index_url) except requests.exceptions.ConnectionError: raise ElasticConnectException() if r.status_code != 200: # Index does no exists r = requests.post(self.index_url) if r.status_code != 200: logging.info("Can't create index %s (%s)" % (self.index_url, r.status_code)) raise ElasticWriteException() else: logging.info("Created index " + self.index_url) else: if clean: requests.delete(self.index_url) requests.post(self.index_url) logging.info("Deleted and created index " + self.index_url) if mappings: self.create_mappings(mappings)
def test_timed_cycle(self): return # basically a debug test for thread pool bit target = 'http://requestbin.zapier.com/api/v1/bin/test_timed_cycle' hooks = [self.make_hook(event, target) for event in ['comment.added', 'comment.changed', 'comment.removed']] for n in range(4): early = datetime.now() # fires N * 3 http calls for x in range(10): comment = Comment.objects.create( site=self.site, content_object=self.user, user=self.user, comment='Hello world!' ) comment.comment = 'Goodbye world...' comment.save() comment.delete() total = datetime.now() - early print(total) while True: response = requests.get(target + '/view') sent = response.json if sent: print(len(sent), models.async_requests.total_sent) if models.async_requests.total_sent >= (30 * (n+1)): time.sleep(5) break time.sleep(1) requests.delete(target + '/view') # cleanup to be polite
def _delete_on_dc(self, domain): url = self._get_url(domain) + '/{}'.format(domain.id) req.delete( url=url, headers=self._get_headers(), verify=self._get_verify(domain) )
def check_local(): global yang_models_url body = request.json if body['repository']['owner_name'] == 'yang-catalog': if body['result_message'] == 'Passed': if body['type'] == 'push': # After build was successful only locally json_body = jsonify({ "title": "Cron job - every day pull of ietf draft yang files.", "body": "ietf extracted yang modules", "head": "yang-catalog:master", "base": "master" }) requests.post(yang_models_url + '/pulls', json=json_body, headers={'Authorization': 'token ' + token}) if body['type'] == 'pull_request': # If build was successful on pull request pull_number = body['pull_request_number'] log.write('pull request was successful %s' % repr(pull_number)) #requests.put('https://api.github.com/repos/YangModels/yang/pulls/' + pull_number + # '/merge', headers={'Authorization': 'token ' + token}) requests.delete(yang_models_url, headers={'Authorization': 'token ' + token})
def close(self): if self.ignoreCert == 'true': requests.delete(self.base_url, params={'sessionid': self._sessionid}, auth=SpnegoAuth(self.keytab, self.principal), verify=False) else: requests.delete(self.base_url, params={'sessionid': self._sessionid}, auth=SpnegoAuth(self.keytab, self.principal))
def reset(self): """ Removes configured imposters (HTTP stubs). :rtype: None """ # TODO add validation requests.delete(self._imposters_url)
def _un_publish(self, token): url = "http://%s:%s/service/%s" % ( self._server_ip, self._server_port, token) requests.delete(url, headers=self._headers) # remove token and obj from records del self.pubdata[token]
def test_update_document(): simple_documents = _url('simple-app/simple-documents') document_1 = _url('simple-app/simple-documents/added-document-1') docs = [ { "name": "Added Document {0}".format(x), "date": datetime.utcnow().isoformat() } for x in range(101) ] # delete all documents in a collection confirmed_dangerous_delete = requests.delete(simple_documents, params={'delete_all': True}) assert confirmed_dangerous_delete.ok # add an item to the collection post = requests.post(simple_documents, data=json.dumps(docs)) assert post.ok old_date = docs[0]['date'] docs[0]['date'] = datetime.utcnow().isoformat() post = requests.post(document_1, data=json.dumps(docs[0])) assert post.ok get = requests.get(document_1) assert get.ok document_1 = get.json() assert document_1['date'] != old_date # delete the doc that we added confirmed_dangerous_delete = requests.delete(simple_documents, params={'delete_all': True}) assert confirmed_dangerous_delete.ok
def acl_consul(acl_consul_instance): ACLConsul = collections.namedtuple('ACLConsul', ['port', 'token']) port, token = acl_consul_instance yield ACLConsul(port, token) # remove all data from the instance, to have a clean start base_uri = 'http://127.0.0.1:%s/v1/' % port requests.delete(base_uri + 'kv/?recurse=1')
def rebuild_index(self): # check that ElasticSearch is awake self.check_index() print "Deleting type: %s" % (Config.elasticsearch['type_record']) r = requests.delete(Config.elasticsearch['uri_records']) print "...response from: %s (%s)" % (r.url, r.status_code) print "Deleting type: %s" % (Config.elasticsearch['type_config']) r = requests.delete(Config.elasticsearch['uri_configs']) print "...response from: %s (%s)" % (r.url, r.status_code) # check to see if index exists - in which case it will have a mapping even if it is empty, create if not if requests.get(Config.elasticsearch['uri_index'] + '/_mapping').status_code == 404: print "Creating index: %s" % (Config.elasticsearch['index']) r = requests.post(Config.elasticsearch['uri_index']) print "...response from: %s (%s)" % (r.url, r.status_code) # check for mapping and create one if provided and does not already exist # this will automatically create the necessary index type if it is not already there if Config.elasticsearch['mapping']: r = requests.get(Config.elasticsearch['uri_records'] + '_mapping') if r.status_code == 404: print "Creating mapping for type: %s" % (Config.elasticsearch['type_record']) r = requests.put(Config.elasticsearch['uri_records'] + '_mapping', data=json.dumps(Config.elasticsearch['mapping'])) print "...response from: %s (%s)" % (r.url, r.status_code) print "Creating mapping for type: %s" % (Config.elasticsearch['type_config']) r = requests.put(Config.elasticsearch['uri_configs'] + '_mapping', data=json.dumps(Config.elasticsearch['mapping'])) print "...response from: %s (%s)" % (r.url, r.status_code) else: print "Warning: no elasticsearch mapping defined in Config.py."
def delete_hooks(orgOrUser, repoOrRepos, hook_type=None): hooks = list_hooks(orgOrUser, repoOrRepos, hook_type) url_templ = 'https://api.github.com/repos/{owner}/{repo}/hooks/{hook}' auth = HTTPBasicAuth(*get_auth()) for hook in hooks: url = url_templ.format(owner=orgOrUser, **hook) requests.delete(url, auth=auth)
def test_delete_server(self): """ server delete test """ response = requests.get(__db_url__) value = response.json() if value: db_length = len(value['databases']) last_db_id = value['databases'][db_length-1]['id'] url = 'http://%s:8000/api/1.0/databases/%u/servers/' % \ (__host_or_ip__,last_db_id) response = requests.get(url) value = response.json() if value: server_length = len(value['members']) last_server_id = value['members'][server_length-1]['id'] print "ServerId to be deleted is " + str(last_server_id) url = 'http://%s:8000/api/1.0/databases/%u/servers/' % \ (__host_or_ip__,last_db_id) url += str(last_server_id) response = requests.delete(url) if response.status_code == 403: print value['statusstring'] self.assertEqual(value['statusstring'], 'Cannot delete a running server') else: self.assertEqual(response.status_code, 204) db_url = __db_url__ + str(last_db_id) response = requests.delete(db_url) self.assertEqual(response.status_code, 204) else: print "The Server list is empty"
def test_job_states(hostname, large_file): """ Test that after requesting an action, the job will update and eventually wind up in status "done" :param hostname: The hostname under test (this fixture is automatically injected by pytest) :param large_file: A large-ish filename (this fixture is automatically injected by pytest) """ with open(large_file, 'r') as f: resp = requests.post(hostname + '/images', data={'user_id': 'test-user-{}'.format(uuid.uuid4())}, files={'file': ('bridge.jpeg', f)}) img_id = resp.json()['id'] # Send a resize request and use the job_id to check for current state resp = requests.put(hostname + '/image/{}'.format(img_id), data={'action': 'resize', 'size': '50,50'}) job_id1 = resp.json()['job_id'] # Send a crop request and use the job_id to check for current state resp = requests.put(hostname + '/image/{}'.format(img_id), data={'action': 'crop', 'box': '50,50,100,100'}) job_id2 = resp.json()['job_id'] # Send a transcode request and use the job_id to check for current state resp = requests.put(hostname + '/image/{}'.format(img_id), data={'action': 'transcode', 'extension': 'bmp'}) job_id3 = resp.json()['job_id'] wait_for_job_done(hostname + '/job/{}'.format(job_id1)) wait_for_job_done(hostname + '/job/{}'.format(job_id2)) wait_for_job_done(hostname + '/job/{}'.format(job_id3)) # Clean up test data and delete the image requests.delete(hostname + '/image/{}'.format(img_id))
def unsubscribe(listname, address): url = get_url(listname) try: requests.delete(url, timeout=TIMEOUT, data={'address': address}) except requests.exceptions.RequestException: return False return True
def test_upload_and_download_node_GET_gzip(): # download file in compressed format, works with all the above options # curl -X GET http://<host>[:<port>]/node/<node_id>?download&compression=<zip|gzip> # upload node TESTURL = "{SHOCK_URL}/node".format(SHOCK_URL=SHOCK_URL) TESTHEADERS = {"Authorization": "OAuth {}".format(TOKEN)} FILES = {'upload': open(DATADIR + 'CCC.txt', 'rb')} if DEBUG: print("POST", TESTURL, TESTHEADERS, FILES) response = requests.post(TESTURL, headers=TESTHEADERS, files=FILES) data = json.loads(response.content.decode("utf-8")) NODEID = data["data"]["id"] # test my node exists TESTURL = SHOCK_URL + "/node/{}".format(NODEID) TESTHEADERS = {"Authorization": "OAuth {}".format(TOKEN)} FILES = {} if DEBUG: print("GET", TESTURL, TESTHEADERS) response = requests.get(TESTURL, headers=TESTHEADERS) data = json.loads(response.content.decode("utf-8")) assert data["status"] == 200 # Download node DLURL = SHOCK_URL + "/node/{}?download&compression=gzip".format(NODEID) response = requests.get(DLURL, headers=TESTHEADERS) assert response.content[0:3] != b"CCC" #cleanup NODEURL = SHOCK_URL + "/node/{}".format(NODEID) requests.delete(NODEURL, headers=TESTHEADERS)
def update_tracker(session_token, download_id, tracker): announce_url = tracker['announce'] parts = list(urlparse(announce_url)) parts[1] = NEW_TRACKER_HOST new_announce = urlunparse(parts) print("> UPDATE tracker %s ==> %s" % (announce_url, new_announce)) # add new tracker url = MAFREEBOX_API_URL + ("downloads/%d/trackers" % download_id) rep = requests.post(url, json={ 'announce': new_announce, 'is_enabled': True }, headers={ 'X-Fbx-App-Auth': session_token }) get_api_result(rep) # remove prev tracker url = MAFREEBOX_API_URL + ("downloads/%d/trackers/%s" % (download_id, quote(announce_url, safe=''))) rep = requests.delete(url, headers={ 'X-Fbx-App-Auth': session_token }) get_api_result(rep) # active new tracker url = MAFREEBOX_API_URL + ("downloads/%d/trackers/%s" % (download_id, quote(new_announce, safe=''))) rep = requests.delete(url, json={ 'is_enabled': True }, headers={ 'X-Fbx-App-Auth': session_token }) get_api_result(rep)
def remove_user(): try: data_json = request.get_json() if not check_arguments(['login', 'code'], data_json): raise Exception("Bad arguments") login = data_json['login'] code = data_json['code'] if check_connect(login, code): url = get_users_url("remove_login") data = {'login': login} headers = {'Content-type': 'application/json'} result = requests.delete(url, data=json.dumps(data), headers=headers).json() if 'error' in result: raise Exception(result['error']) url = get_users_url("remove_user") result = requests.delete(url, data=json.dumps(data), headers=headers).json() if 'error' in result: raise Exception(result['error']) url = get_session_url("logout?login={0}&code={1}".format(login, code)) result = requests.get(url) return result.text raise Exception("Access denied") except Exception as e: return make_response(str(e), 400, {'olol':'ololol'})
def delete_logstash_pipeline(topic): logger.info("Deleting Logstash pipeline for topic %s", topic) request_body = json.loads(json.dumps({'topic': topic})) r = requests.delete(url_logstash_pipeline_manager, json=request_body) logger.info("Response: Code %s", r)
from requests.auth import HTTPBasicAuth import requests print requests.options('https://request_me.tjctf.org/').text print requests.put('https://request_me.tjctf.org/', data = {'username':'******', 'password':'******'}).text print requests.post('https://request_me.tjctf.org/', auth=HTTPBasicAuth('abcde', 'abcde')).text print requests.delete('https://request_me.tjctf.org/', auth=HTTPBasicAuth('abcde', 'abcde')).text
def delete(self, path, data=None): path = format_path(path, self.eventbrite_api_url) return requests.delete(path, headers=self.headers, data=data or {})
def delete_emp(self, emp_id, headers): delete_url = "http://182.92.81.159/api/sys/user" + "/" + emp_id return requests.delete(delete_url, headers=headers)
operation['resource_name'] + "'"), auth=AUTH) # Extract the Moid from the Results json_result = json.loads(response.text) moid = json_result["Results"][0]["Moid"] response = requests.patch( BURL + operation['resource_path'] + "/" + moid, data=json.dumps(operation['request_body']), auth=AUTH) # DELETE if operation['request_method'] == "DELETE": # GET the Moid of the MO to DELETE response = requests.get( (BURL + operation['resource_path'] + "?$filter=Name eq '" + operation['resource_name'] + "'"), auth=AUTH) # Extract the Moid from the Results json_result = json.loads(response.text) moid = json_result["Results"][0]["Moid"] response = requests.delete(BURL + operation['resource_path'] + "/" + moid, auth=AUTH) print(response) print(response.text)
def delete_success_service_facility(self, facility_slug: str, service_slug): res = requests.delete(f"{config.base_api_uri}/services/{service_slug}/facilities/{facility_slug}") logger.info(res.status_code) world.service_facility_deleted = res.status_code
def delete_server(self, server_id): headers = {"Authorization": f"Bearer {self.access_token}"} response = requests.delete(self.base_url + f'/linode/instances/{server_id}', headers=headers)
def make_request(self): self.request = delete(*self.args)
def destroy_participant(tournament_name, participant_name): participant_id = get_participant_json_by_name(participant_name)["id"] url = BASE_URL + "tournaments/" + tournament_name + "/participants/" + participant_id + ".json" r = requests.delete(url=url) print(r.json()) LOGGER.info("Request to destroy participants sent to Challonge api.")
def delete_index(self, index): url = 'http://{host}:{port}/{index}'.format( host=self._host, port=self._port, index=index) return requests.delete(url)
import requests # Variable used for authentication through users' github token headers = {'Authorization': 'token xxxxx'} repo = 'test_api' # Variable containing the delete request that will delete a repo login = requests.delete('https://api.github.com/repos/Lamia7/' + repo, headers=headers, timeout=3) print(login.text)
def clear_v1_http(): return requests.delete(f"{config.url}clear/v1")
### STEP 2 START payload = {'pin': pin, 'payload': codecs.decode(step1_response.text, 'unicode_escape'), 'deviceId': device_id} r = requests.post(external_server + '/step2', data=json.dumps(payload), headers=external_headers, verify=False, timeout=30) step2_url = 'http://' + tv_address + ':8080/ws/pairing?step=2&app_id=com.samsung.companion&device_id=12345&type=1&request_id=0' step2_response = requests.post(step2_url, data=r.text, timeout=10) ### STEP 2 END ### STEP 3 START payload = {'pin': pin, 'payload': codecs.decode(step2_response.text, 'unicode_escape'), 'deviceId': device_id} r = requests.post(external_server + '/step3', data=json.dumps(payload), headers=external_headers, verify=False, timeout=30) enc_key = r.json()['session_key'] session = r.json()['session_id'] print('session_key: ' + enc_key) print('session_id: ' + session) step3_url = 'http://' + tv_address + ':8080/ws/apps/CloudPINPage/run' requests.delete(step3_url, timeout=10) ### STEP 3 END print('waiting for a sec...') time.sleep(2) ## STEP 4 START WEBSOCKETS millis = int(round(time.time() * 1000)) step4_url = 'http://' + tv_address + ':8000/socket.io/1/?t=' + str(millis) websocket_response = requests.get(step4_url, timeout=10) websocket_url = 'ws://' + tv_address + ':8000/socket.io/1/websocket/' + websocket_response.text.split(':')[0] time.sleep(1) print('sending KEY_VOLDOWN command!') aesLib = aes_lib.AESCipher(enc_key, session) connection = websocket.create_connection(websocket_url)
def destroy_tournament(tournament_name): url = BASE_URL + "tournaments/" + tournament_name + ".json" r = requests.delete(url=url) print(r.json()) LOGGER.info("Delete request sent to Challonge api.")
def test_uploading(): # create object check resp = requests.post(BASE_URL + '/bucket/object?create') assert resp.status_code == STATUS_OK resp = requests.post(BASE_URL + '/another_bucket/another_object?create') assert resp.status_code == STATUS_OK resp = requests.post(BASE_URL + '/another-bucket/another-object?create') assert resp.status_code == STATUS_OK resp = requests.post(BASE_URL + '/bucket/object?create') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.post(BASE_URL + '/no_bucket/no_object?create') assert resp.status_code == STATUS_BAD_REQUEST # upload part check data = open('./t1.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=3', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t2.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=5', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=10', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t2.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t2.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/obJect?partNumber=20', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/another-bucket/another-object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_OK data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/no_object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST data = open('./t1.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=10001', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=dsfk', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/no_bucket/object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/no_object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST # complete check resp = requests.post(BASE_URL + '/bucket/object?complete') assert resp.status_code == STATUS_OK resp = requests.get(BASE_URL + '/another_bucket/another_object?complete') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.post(BASE_URL + '/no_bucket/object?complete') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.post(BASE_URL + '/bucket/no_object?complete') assert resp.status_code == STATUS_BAD_REQUEST # upload after complete data = open('./t3.txt', 'rb').read() resp = requests.put(url=BASE_URL+'/bucket/object?partNumber=1', data=data, headers={ 'Content-MD5': hashlib.md5(data).hexdigest()}) assert resp.status_code == STATUS_BAD_REQUEST # delete part resp = requests.delete(BASE_URL + '/another-bucket/another-object?partNumber=1') assert resp.status_code == STATUS_OK resp = requests.delete(BASE_URL + '/another-bucket/another-object?partNumber=1') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.delete(BASE_URL + '/no_bucket/object?partNumber=1') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.delete(BASE_URL + '/bucket/no_object?partNumber=1') assert resp.status_code == STATUS_BAD_REQUEST # delete object resp = requests.delete(BASE_URL + '/another-bucket/another-object?delete') assert resp.status_code == STATUS_OK resp = requests.delete(BASE_URL + '/no_bucket/object?delete') assert resp.status_code == STATUS_BAD_REQUEST resp = requests.delete(BASE_URL + '/bucket/no_object?delete') assert resp.status_code == STATUS_BAD_REQUEST # add and update metadata resp = requests.put(url=BASE_URL+'/bucket/object?metadata&key=dataSource', data="http://www.ietf.org/rfc/rfc2616.txt") assert resp.status_code == STATUS_OK resp = requests.put(url=BASE_URL+'/bucket/object?metadata&key=license', data="Not this") assert resp.status_code == STATUS_OK resp = requests.put(url=BASE_URL+'/bucket/object?metadata&key=license', data="Apache 2.0") assert resp.status_code == STATUS_OK resp = requests.put(url=BASE_URL+'/bucket/object?metadata&key=forDelete', data="delete it") assert resp.status_code == STATUS_OK resp = requests.put(url=BASE_URL+'/no_bucket/object?metadata&key=license', data="Apache 2.0") assert resp.status_code == STATUS_NOT_FOUND resp = requests.put(url=BASE_URL+'/bucket/no_object?metadata&key=license', data="Apache 2.0") assert resp.status_code == STATUS_NOT_FOUND # delete metadata resp = requests.delete(url=BASE_URL+'/bucket/object?metadata&key=forDelete') assert resp.status_code == STATUS_OK resp = requests.delete(url=BASE_URL+'/bucket/object?metadata&key=forDelete') assert resp.status_code == STATUS_OK resp = requests.delete(url=BASE_URL+'/no_bucket/object?metadata&key=license') assert resp.status_code == STATUS_NOT_FOUND resp = requests.delete(url=BASE_URL+'/bucket/no_object?metadata&key=license') assert resp.status_code == STATUS_NOT_FOUND # get metadata resp = requests.get(url=BASE_URL+'/bucket/object?metadata&key=license') assert resp.status_code == STATUS_OK resp = requests.get(url=BASE_URL+'/bucket/object?metadata&key=forDelete') assert resp.status_code == STATUS_OK resp = requests.get(url=BASE_URL+'/no_bucket/object?metadata&key=license') assert resp.status_code == STATUS_NOT_FOUND resp = requests.get(url=BASE_URL+'/bucket/no_object?metadata&key=license') assert resp.status_code == STATUS_NOT_FOUND resp = requests.get(url=BASE_URL+'/bucket/object?metadata') assert resp.status_code == STATUS_OK resp = requests.get(url=BASE_URL+'/another_bucket/another_object?metadata') assert resp.status_code == STATUS_OK resp = requests.get(url=BASE_URL+'/no_bucket/object?metadata') assert resp.status_code == STATUS_NOT_FOUND resp = requests.get(url=BASE_URL+'/bucket/no_object?metadata') assert resp.status_code == STATUS_NOT_FOUND
def clear_tracks(self, recording): r = requests.delete(self._url + "/{}/tracks".format(recording.id_)) r.raise_for_status()
def deletewebhook(): delete = input("Webhook?: ") webhook = Webhook(delete) webhook.send("@everyone jays#0023") requests.delete(delete)
def del_auth_token(self): headers = {'Content-Type': 'application/json;charset=UTF-8', 'st-auth-token': token} url = "https://"+opc_address+"/shterm/api/authenticate" r = requests.delete(url, headers=headers, verify=False) if r.status_code == 204: logger.warning('已成功注销 API token!')
def make_unload_model_request(model_name): response = requests.delete(DELETE_MODEL_URL.format(model_name)) return response.status_code, json.loads(response.content.decode("utf-8"))
def test_bucket(self): bucket = 'test-bucket' req_objects = ('object', 'object2') max_bucket_listing = tf.cluster_info['s3api'].get( 'max_bucket_listing', 1000) # GET Bucket (Without Object) status, _junk, _junk = self.conn.make_request('PUT', bucket) self.assertEqual(status, 200) url, headers = self.conn.generate_url_and_headers('GET', bucket) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertCommonResponseHeaders(resp.headers) self.assertIsNotNone(resp.headers['content-type']) self.assertEqual(resp.headers['content-length'], str(len(resp.content))) elem = fromstring(resp.content, 'ListBucketResult') self.assertEqual(elem.find('Name').text, bucket) self.assertIsNone(elem.find('Prefix').text) self.assertIsNone(elem.find('Marker').text) self.assertEqual(elem.find('MaxKeys').text, str(max_bucket_listing)) self.assertEqual(elem.find('IsTruncated').text, 'false') objects = elem.findall('./Contents') self.assertEqual(list(objects), []) # GET Bucket (With Object) for obj in req_objects: status, _junk, _junk = self.conn.make_request('PUT', bucket, obj) self.assertEqual( status, 200, 'Got %d response while creating %s' % (status, obj)) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertCommonResponseHeaders(resp.headers) self.assertIsNotNone(resp.headers['content-type']) self.assertEqual(resp.headers['content-length'], str(len(resp.content))) elem = fromstring(resp.content, 'ListBucketResult') self.assertEqual(elem.find('Name').text, bucket) self.assertIsNone(elem.find('Prefix').text) self.assertIsNone(elem.find('Marker').text) self.assertEqual(elem.find('MaxKeys').text, str(max_bucket_listing)) self.assertEqual(elem.find('IsTruncated').text, 'false') resp_objects = elem.findall('./Contents') self.assertEqual(len(list(resp_objects)), 2) for o in resp_objects: self.assertIn(o.find('Key').text, req_objects) self.assertIsNotNone(o.find('LastModified').text) self.assertRegexpMatches( o.find('LastModified').text, r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$') self.assertIsNotNone(o.find('ETag').text) self.assertEqual(o.find('Size').text, '0') self.assertIsNotNone(o.find('StorageClass').text is not None) self.assertEqual(o.find('Owner/ID').text, self.conn.user_id) self.assertEqual( o.find('Owner/DisplayName').text, self.conn.user_id) # DELETE Bucket for obj in req_objects: self.conn.make_request('DELETE', bucket, obj) url, headers = self.conn.generate_url_and_headers('DELETE', bucket) resp = requests.delete(url, headers=headers) self.assertEqual(resp.status_code, 204, 'Got %d %s' % (resp.status_code, resp.content))
def EliminarDatosComerciales(comercialID): url = settings.COMERCIAL_SERVICE_URL_DELETE args = {'comercialID': comercialID} response = requests.delete(url, params=args) return True if response.status_code == 200 else False
def delete_GitHub_token(token_id, *, auth, headers): """Delete a temporary GitHub token""" r = requests.delete('https://api.github.com/authorizations/{id}'.format(id=token_id), auth=auth, headers=headers) r.raise_for_status()
def removePerson(self, person_id): data = {"person_id": person_id} res = requests.delete(url=self._baseURL + self._persons, headers=self.headers, data=data) return res.json()
def rm_repo(url, repo_name): headers = {"Accept": "application/json"} response = requests.delete(f"{url}/repositories/{repo_name}", headers=headers) return response
def main(): argument_spec = rabbitmq_argument_spec() argument_spec.update( dict( state=dict(default='present', choices=['present', 'absent'], type='str'), name=dict(required=True, type='str'), durable=dict(default=True, type='bool'), auto_delete=dict(default=False, type='bool'), internal=dict(default=False, type='bool'), exchange_type=dict(default='direct', aliases=['type'], type='str'), arguments=dict(default=dict(), type='dict') ) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) url = "%s://%s:%s/api/exchanges/%s/%s" % ( module.params['login_protocol'], module.params['login_host'], module.params['login_port'], urllib_parse.quote(module.params['vhost'], ''), urllib_parse.quote(module.params['name'], '') ) if not HAS_REQUESTS: module.fail_json(msg=missing_required_lib("requests"), exception=REQUESTS_IMP_ERR) result = dict(changed=False, name=module.params['name']) # Check if exchange already exists r = requests.get(url, auth=(module.params['login_user'], module.params['login_password']), verify=module.params['ca_cert'], cert=(module.params['client_cert'], module.params['client_key'])) if r.status_code == 200: exchange_exists = True response = r.json() elif r.status_code == 404: exchange_exists = False response = r.text else: module.fail_json( msg="Invalid response from RESTAPI when trying to check if exchange exists", details=r.text ) if module.params['state'] == 'present': change_required = not exchange_exists else: change_required = exchange_exists # Check if attributes change on existing exchange if not change_required and r.status_code == 200 and module.params['state'] == 'present': if not ( response['durable'] == module.params['durable'] and response['auto_delete'] == module.params['auto_delete'] and response['internal'] == module.params['internal'] and response['type'] == module.params['exchange_type'] ): module.fail_json( msg="RabbitMQ RESTAPI doesn't support attribute changes for existing exchanges" ) # Exit if check_mode if module.check_mode: result['changed'] = change_required result['details'] = response result['arguments'] = module.params['arguments'] module.exit_json(**result) # Do changes if change_required: if module.params['state'] == 'present': r = requests.put( url, auth=(module.params['login_user'], module.params['login_password']), headers={"content-type": "application/json"}, data=json.dumps({ "durable": module.params['durable'], "auto_delete": module.params['auto_delete'], "internal": module.params['internal'], "type": module.params['exchange_type'], "arguments": module.params['arguments'] }), verify=module.params['ca_cert'], cert=(module.params['client_cert'], module.params['client_key']) ) elif module.params['state'] == 'absent': r = requests.delete(url, auth=(module.params['login_user'], module.params['login_password']), verify=module.params['ca_cert'], cert=(module.params['client_cert'], module.params['client_key'])) # RabbitMQ 3.6.7 changed this response code from 204 to 201 if r.status_code == 204 or r.status_code == 201: result['changed'] = True module.exit_json(**result) else: module.fail_json( msg="Error creating exchange", status=r.status_code, details=r.text ) else: module.exit_json( changed=False, name=module.params['name'] )
def delete(extension, token): base_url = 'https://my.tanda.co/api/v2/' auth = 'Bearer ' + token headers = {'Content-Type': 'application/json', 'Authorization': auth} requests.delete(base_url + extension, headers=headers)
def remove_fb_user(cls, user): requests.delete('https://graph.facebook.com/v2.2/%s/accounts/test-users?access_token=%s&uid=%s' %(cls.app_id, cls.app_token, user['id'])) #remove user requests.delete("https://graph.facebook.com/v2.0/%s?access_token=%s" %(user['id'], cls.app_token))
def removeFace(self, face_id): data = {"face_id": face_id} res = requests.delete(url=self._baseURL + self._faces, headers=self.headers, data=data) return res.json()
def delete_all(self, index='dark'): """ Runs $ curl -XDELETE 'http://localhost:9200/your_index/' """ r = requests.delete('http://localhost:9200/%s' % (index)) print('Index %s deleted.' % index)
def keywordDelete(self, url): return requests.delete(url=url)