コード例 #1
0
def update_status(_id, status, trace=None):
    ci_payload['runner']['state'] = status
    if trace is not None:
        ci_payload['runner']['trace'] = trace
    requests.put('{0}/api/v1/builds/{1}'.format(ci_payload['ci']['url'], _id),
                 verify=False,
                 data=ci_payload['runner'])
コード例 #2
0
ファイル: test_templates.py プロジェクト: LucianU/kuma
    def test_deki_only_user(self, get_current):
        if not settings.DEKIWIKI_ENDPOINT:
            # Skip, if MindTouch API unavailable
            raise SkipTest()

        get_current.return_value.domain = 'testserver.com'
        self.assertRaises(User.DoesNotExist, User.objects.get,
                          username='******')

        if not getattr(settings, 'DEKIWIKI_MOCK', False):
            # HACK: Ensure that expected user details are in MindTouch when not
            # mocking the API
            mt_email = '*****@*****.**'
            user_xml = MINDTOUCH_USER_XML % dict(username="******",
                    email=mt_email, fullname="None", status="active",
                    language="", timezone="-08:00", role="Contributor")
            DekiUserBackend.put_mindtouch_user(deki_user_id='=testaccount',
                                               user_xml=user_xml)
            passwd_url = '%s/@api/deki/users/%s/password?apikey=%s' % (
                settings.DEKIWIKI_ENDPOINT, '=testaccount',
                settings.DEKIWIKI_APIKEY)
            requests.put(passwd_url, data='theplanet')

        r = self.client.post(reverse('users.pw_reset'),
                             {'email': '*****@*****.**'})
        eq_(302, r.status_code)
        eq_('http://testserver/en-US/users/pwresetsent', r['location'])
        eq_(1, len(mail.outbox))
        assert mail.outbox[0].subject.find('Password reset') == 0

        u = User.objects.get(username='******')
        assert mail.outbox[0].body.find('pwreset/%s' % int_to_base36(u.id)) > 0
コード例 #3
0
    def upload(self, url, startByte, endByte, contentSize, fileSize, data, decodeResponse=False):
        headers = {}
        headers['Content-Length'] = contentSize
        headers['Content-Range'] = "bytes " + str(startByte) + "-" + str(endByte) + "/" + str(fileSize)
        headers.update(self.authorization)
        #self.session.put(url, headers=headers, data=data, background_callback=background_callback) 
        response = requests.put(url, headers=headers, data=data) 

        r = json.loads(response.text)

        if 'error' in r:
            if r['error']['code'] == 'unauthenticated':
                self.refreshToken()
                headers.update(self.authorization)
                response = requests.put(url, headers=headers, data=data) 
                r = json.loads(response.text)
                if 'error' in r:
                    print 'ERROR1 '
                    print r
            else:
                print 'ERROR2 '
                print r
        if decodeResponse:
            print "Decoding response"
            return r
        else:
            return response.text
        return 
コード例 #4
0
ファイル: sqi.py プロジェクト: circleapp/server
def add_attribute(columns):
    u = 'https://api.parse.com/1/classes/Attribute'
    attrs_request = requests.get(u, data={
        'limit': 500,
    }, headers={
        "X-Parse-Application-Id": keys.PARSE_APP_ID,
        "X-Parse-REST-API-Key": keys.PARSE_REST_KEY
    })

    attribute_records = attrs_request.json()['results']

    print 'Agregando a todos los registros'

    for a in attribute_records:
        columns_dict = {}
        for column in columns:
            if a.get(column) is None:
                print "Agregar columna: ", column
                columns_dict[column] = False

        uu = 'https://api.parse.com/1/classes/Attribute/%s' % a.get('objectId')
        requests.put(uu, data=json.dumps(columns_dict), headers={
            "X-Parse-Application-Id": keys.PARSE_APP_ID,
            "X-Parse-REST-API-Key": keys.PARSE_REST_KEY,
            'Content-type': 'application/json'
        })

    print "Atributos agregados"
コード例 #5
0
ファイル: demo_requests.py プロジェクト: abattye/barbican
def demo_store_secret_two_step_binary():
    """Store secret (2-step):"""
    secret = 'bXktc2VjcmV0LWhlcmU='  # base64 of 'my secret'
    ep_2step = '/'.join([end_point, version, 'secrets'])

    # POST metadata:
    payload = {}
    pr = requests.post(ep_2step, data=json.dumps(payload), headers=hdrs)
    pr_j = pr.json()
    secret_ref = pr_j.get('secret_ref')
    assert secret_ref

    # PUT data to store:
    hdrs_put = dict(hdrs)
    hdrs_put.update({
        'content-type': 'application/octet-stream',
        'content-encoding': 'base64'}
    )
    requests.put(secret_ref, data=secret, headers=hdrs_put)

    # GET secret:
    hdrs_get = dict(hdrs)
    hdrs_get.update({
        'accept': 'application/octet-stream'})
    gr = requests.get(secret_ref, headers=hdrs_get)
    LOG.info('Get secret 2-step (binary): {0}\n'.format(gr.content))

    return secret_ref
コード例 #6
0
ファイル: connection.py プロジェクト: Annakan/python_api
    def put(self, uri, payload=None, etag=None,
            content_type="application/json", accept="application/json"):

        headers = {'content-type': content_type,
                   'accept': accept}
        if etag is not None:
            headers['if-match'] = etag

        self.logger.debug("PUT  {0}...".format(uri))
        self.payload_logger.debug("Headers:")
        self.payload_logger.debug(json.dumps(headers, indent=2))
        if payload != None:
            self.payload_logger.debug("Payload:")
            if content_type == 'application/json':
                self.payload_logger.debug(json.dumps(payload, indent=2))
            else:
                self.payload_logger.debug(payload)

        if payload is None:
            self.response = requests.put(uri, auth=self.auth, headers=headers)
        else:
            self.response = requests.put(uri, json=payload,
                                         auth=self.auth, headers=headers)

        return self._response()
コード例 #7
0
    def test_start_stop_server_with_different_ports(self):
        """
        ensure Start and stop server is working properly using ports other than default ports
        """
        response = requests.get(__db_url__)
        value = response.json()
        if value:
            db_length = len(value['databases'])
            last_db_id = value['databases'][db_length-1]['id']

            url = 'http://%s:8000/api/1.0/databases/%u/start' % \
                (__host_or_ip__,last_db_id)

            response = requests.put(url)
            print "Starting...."
            value = response.json()
            if not value['statusstring']:
                print "error"
            elif "Start request sent successfully to servers" in value['statusstring']:
                self.assertEqual(response.status_code, 200)
                time.sleep(5)
                CheckServerStatus(self, last_db_id, 'running')
                time.sleep(10)
                print "Stopping...."
                url_stop = 'http://%s:8000/api/1.0/databases/%u/stop' % \
                (__host_or_ip__,last_db_id)
                response = requests.put(url_stop)
                value = response.json()
                if "Connection broken" in value['statusstring']:
                    self.assertEqual(response.status_code, 200)
                    time.sleep(10)
                    CheckServerStatus(self, last_db_id, 'stopped')
            elif response.status_code == 500:
                self.assertEqual(response.status_code, 500)
    def __enter__(self):
        """Setup the integration tests environment. Specifically:

        -- if there's already a database around with the desired
        name then delete that database
        -- create a temporary database including creating a design doc
        which permits reading persistant instances of Boo
        -- configure the async model I/O classes to use the newly
        created temporary database
        """
        # in case a previous test didn't clean itself up delete database
        # totally ignoring the result
        if self.delete:
            response = requests.delete(self.database_url)

        # create database
        if self.create:
            response = requests.put(self.database_url)
            assert response.status_code == httplib.CREATED

            # install design docs
            for (design_doc_name, design_doc) in self.design_docs.items():
                url = "%s/_design/%s" % (self.database_url, design_doc_name)
                response = requests.put(
                    url,
                    data=design_doc,
                    headers={"Content-Type": "application/json; charset=utf8"})
                assert response.status_code == httplib.CREATED

        # connect async actions to our temp database
        async_model_actions.database = self.database_url

        return self
コード例 #9
0
ファイル: record_mgmt.py プロジェクト: AvraGitHub/GithubAPI
 def commit_file_code(self, file_name, obj_details):
     file_content = open(file_name).read()
     self.commit_response = {}
     self.email = obj_details.get("EMAIL")
     params = {}
     content_encoded = base64.b64encode(file_content)
     params['message'] = file_name + " created"
     params['content'] = content_encoded
     params['branch'] = "abap"
     params['path'] = file_name
     params['committer'] = {'name': "1", 'email': self.email}
     url = settings.CONFIG_GITHUB_URL + file_name
     self.check_sleep_and_set_api_count()
     request_status = requests.put(url, auth=(settings.GIT_USERNAME, settings.GIT_PASSWORD), data=json.dumps(params))
     if request_status.status_code == 201:
         self.commit_response = request_status.json()
     elif request_status.status_code == 422:
         new_params = {}
         new_params['ref'] = 'abap'
         new_params['path'] = file_name
         self.check_sleep_and_set_api_count()
         get_file = requests.get(url, auth=(settings.GIT_USERNAME, settings.GIT_PASSWORD), params=new_params).json()
         new_checksum = githash(open(file_name).read())
         if new_checksum != get_file['sha']:
             params['sha'] = get_file['sha']
             params['message'] = file_name + " updated"
             self.check_sleep_and_set_api_count()
             request_status = requests.put(url, auth=(settings.GIT_USERNAME, settings.GIT_PASSWORD), data=json.dumps(params))
             self.commit_response = request_status.json()
     self.log_to_db()
コード例 #10
0
ファイル: cron.py プロジェクト: ChromiumEx/kitsune
def csat_survey_emails():
    querysets = [(Revision.objects.all(), ('creator', 'reviewer',)),
                 (Answer.objects.not_by_asker(), ('creator',)),
                 (Reply.objects.all(), ('user',))]

    end = datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
    start = end - timedelta(days=30)

    users = _get_cohort(querysets, (start, end))

    for u in users:
        p = u.profile
        if p.csat_email_sent is None or p.csat_email_sent < start:
            survey_id = SURVEYS['general']['community_health']
            campaign_id = SURVEYS['general']['community_health_campaign_id']

            try:
                requests.put(
                    'https://restapi.surveygizmo.com/v4/survey/{survey}/surveycampaign/'
                    '{campaign}/contact?semailaddress={email}&api_token={token}'
                    '&api_token_secret={secret}&allowdupe=true'.format(
                        survey=survey_id, campaign=campaign_id, email=u.email,
                        token=settings.SURVEYGIZMO_API_TOKEN,
                        secret=settings.SURVEYGIZMO_API_TOKEN_SECRET),
                    timeout=30)
            except requests.exceptions.Timeout:
                print 'Timed out adding: %s' % u.email
            else:
                p.csat_email_sent = datetime.now()
                p.save()
コード例 #11
0
ファイル: figshare.py プロジェクト: pombredanne/linkitup
def update_article(article, checked_urls):

    article_id = article["article_id"]

    oauth_token = g.user.oauth_token
    oauth_token_secret = g.user.oauth_token_secret

    oauth = OAuth1(
        client_key,
        client_secret=client_secret,
        resource_owner_key=oauth_token,
        resource_owner_secret=oauth_token_secret,
    )

    processed_urls = []
    for k, u in checked_urls.items():
        if u["uri"] in processed_urls:
            continue

        processed_urls.append(u["uri"])

        body = {"link": u["web"]}
        headers = {"content-type": "application/json"}

        response = requests.put(
            "http://api.figshare.com/v1/my_data/articles/{}/links".format(article_id),
            data=json.dumps(body),
            headers=headers,
            auth=oauth,
        )
        results = json.loads(response.content)
        app.logger.debug("Added {} with the following results:\n{}".format(u["uri"], results))

    app.logger.debug("Tag with Linkitup")
    body = {"tag_name": "Enriched with Linkitup"}
    headers = {"content-type": "application/json"}

    response = requests.put(
        "http://api.figshare.com/v1/my_data/articles/{}/tags".format(article_id),
        data=json.dumps(body),
        headers=headers,
        auth=oauth,
    )

    app.logger.debug("Add a link to Linkitup")
    body = {"link": "http://linkitup.data2semantics.org"}
    headers = {"content-type": "application/json"}

    response = requests.put(
        "http://api.figshare.com/v1/my_data/articles/{}/links".format(article_id),
        data=json.dumps(body),
        headers=headers,
        auth=oauth,
    )

    app.logger.debug("Added enriched with Linkitup tag")

    publish_nanopublication(article, checked_urls, oauth)

    return
コード例 #12
0
ファイル: gifboom.py プロジェクト: Pilfer/GifboomAPI
 def changeProfilePicture(self,photo):
     headers = {
         "accept" : "application/json",
         "accept-language" : "en",
         "connection" : "keep-alive",
         "x-user-authentication-token" : self.authToken,
         "x-client-version" : self.clientVersion,
         "x-device-id" : self.deviceId,
         "user-agent" : self.useragent
     }
     files = {
         'user[avatar]': (photo, open(photo, 'rb'))
     }
     if self.proxies == None:
         r = requests.put(self.base + "/users/" + self.info['_id'],headers=headers,files=files)
     else:
         r = requests.put(self.base + "/users/" + self.info['_id'],headers=headers,files=files,proxies=self.proxies)
     
     if r.status_code == requests.codes.ok:
         old_avatar = self.info['avatar']
         upload_response = r.text
         tmpinfo = json.loads(upload_response.decode('utf8'))
         if tmpinfo['avatar'] != old_avatar:
             self.info = tmpinfo
             return True
         else:
             self.info = json.loads(upload_response.decode('utf8'))
             return False
     else:
         return False
コード例 #13
0
ファイル: gifboom.py プロジェクト: Pilfer/GifboomAPI
 def editProfile(self,gender,bio,website,name,location,username,email,birthday):
     #gender syntax: 0 = male, 1 = female
     #birthday syntax: 1990-02-01 (Y-M-D)
     payload = {
         "user[gender]": gender,
         "user[bio]" : bio,
         "user[website]" : website,
         "user[name]" : name,
         "user[location]" : location,
         "user[username]" : username,
         "user[email]" : email,
         "user[birthday]" : birthday + "T05:00:00Z"
     }
     
     headers = {
         "accept" : "application/json",
         "accept-language" : "en",
         "connection" : "keep-alive",
         "x-user-authentication-token" : self.authToken,
         "x-client-version" : self.clientVersion,
         "x-device-id" : self.deviceId,
         "user-agent" : self.useragent
     }
     if self.proxies == None:
         r = requests.put(self.base + "/users/" + self.info['_id'],headers=headers,data=payload)
     else:
         r = requests.put(self.base + "/users/" + self.info['_id'],headers=headers,data=payload,proxies=self.proxies)
     
     if r.status_code == requests.codes.ok:
         edit_response = r.text
         tmpinfo = json.loads(edit_response.decode('utf8'))
         self.info = tmpinfo
         return True
     else:
         return False
コード例 #14
0
 def docker_push(self):
     # Test Push
     self.image_id = self.gen_random_string()
     self.parent_id = self.gen_random_string()
     image_id = self.image_id
     parent_id = self.parent_id
     namespace = self.user_credentials[0]
     repos = self.gen_random_string()
     # Docker -> Index
     images_json = json.dumps([{'id': image_id}, {'id': parent_id}])
     resp = requests.put('{0}/v1/repositories/{1}/{2}/'.format(
         self.index_endpoint, namespace, repos),
         auth=tuple(self.user_credentials),
         headers={'X-Docker-Token': 'true'},
         data=images_json)
     self.assertEqual(resp.status_code, 200, resp.text)
     token = resp.headers.get('x-docker-token')
     # Docker -> Registry
     images_json = []
     images_json.append(self.upload_image(parent_id, None, token))
     images_json.append(self.upload_image(image_id, parent_id, token))
     # Updating the tags does not need a token, it will use the Cookie
     self.update_tag(namespace, repos, image_id, 'latest')
     # Docker -> Index
     resp = requests.put('{0}/v1/repositories/{1}/{2}/images'.format(
         self.index_endpoint, namespace, repos),
         auth=tuple(self.user_credentials),
         headers={'X-Endpoints': 'registrystaging-docker.dotcloud.com'},
         data=json.dumps(images_json))
     self.assertEqual(resp.status_code, 204)
     return (namespace, repos)
コード例 #15
0
ファイル: mug.py プロジェクト: tedder/smugmug-website-builder
  def upload(filepath):
    # http://api.smugmug.com/services/api/?method=upload&version=1.3.0
    fstat = os.stat(filepath)
    filename = os.path.basename(filepath)
    hash_md5 = hashlib.md5()
    with open(filepath, 'rb') as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_md5.update(chunk)
    file_md5 = hash_md5.hexdigest()
 
    params = {
      filenameSize: fstat.ST_SIZE,
      filenameMD5: file_md5,
      'X-Smug-AlbumID': None,
      'X-Smug-Caption': None,
      'X-Smug-Pretty': true,
      'X-Smug-SessionID': None,
      'X-Smug-Version': '1.3.0'
    }
    requests.put('http://upload.smugmug.com/test.jpg')

    payload['APIKey'] = key
    payload['Pretty'] = True
    payload['method'] = method
    #payload['SessionID'] = secret
    #payload = { 'APIKey': 
    req = requests.get(url, params=payload)
    #print "fetched url: %s" % req.url
    return req.json()
コード例 #16
0
def publish(bulk, endpoint, rebuild, mapping):
    # if configured to rebuild_index
    # Delete and then re-create to dataType index (via PUT request)

    index_url = endpoint + "/dco"

    if rebuild:
        requests.delete(index_url)
        r = requests.put(index_url)
        if r.status_code != requests.codes.ok:
            print(r.url, r.status_code)
            r.raise_for_status()

    # push current dataType document mapping

    mapping_url = endpoint + "/dco/datatype/_mapping"
    with open(mapping) as mapping_file:
        r = requests.put(mapping_url, data=mapping_file)
        if r.status_code != requests.codes.ok:

            # new mapping may be incompatible with previous
            # delete current mapping and re-push

            requests.delete(mapping_url)
            r = requests.put(mapping_url, data=mapping_file)
            if r.status_code != requests.codes.ok:
                print(r.url, r.status_code)
                r.raise_for_status()

    # bulk import new dataType documents
    bulk_import_url = endpoint + "/_bulk"
    r = requests.post(bulk_import_url, data=bulk)
    if r.status_code != requests.codes.ok:
        print(r.url, r.status_code)
        r.raise_for_status()
コード例 #17
0
 def upload_image(self, image_id, parent_id, token):
     layer = self.gen_random_string(7 * 1024 * 1024)
     json_obj = {
         'id': image_id
     }
     if parent_id:
         json_obj['parent'] = parent_id
     json_data = json.dumps(json_obj)
     h = hashlib.sha256(json_data + '\n')
     h.update(layer)
     layer_checksum = 'sha256:{0}'.format(h.hexdigest())
     resp = requests.put('{0}/v1/images/{1}/json'.format(
         self.registry_endpoint, image_id),
         data=json_data,
         headers={'Authorization': 'Token ' + token,
                  'X-Docker-Checksum': layer_checksum},
         cookies=self.cookies)
     self.assertEqual(resp.status_code, 200, resp.text)
     self.update_cookies(resp)
     resp = requests.put('{0}/v1/images/{1}/layer'.format(
         self.registry_endpoint, image_id),
         data=self.generate_chunk(layer),
         headers={'Authorization': 'Token ' + token},
         cookies=self.cookies)
     self.assertEqual(resp.status_code, 200, resp.text)
     self.update_cookies(resp)
     return {'id': image_id, 'checksum': layer_checksum}
コード例 #18
0
ファイル: commands.py プロジェクト: haradama/djehuty-sample
    def take_action(self, parsed_args):
        try:
            if parsed_args.person == None:
                response = urlopen("https://sheetsu.com/apis/v1.0/f053bbc8/name/{}".format(self.app_args.user))

                html = response.read()
                html = loads(html)

                html[0]["attendance"] = "x"

                r = requests.put("https://sheetsu.com/apis/v1.0/f053bbc8/name//{}".format(self.app_args.user), data=html[0])

            else:
                response = urlopen("https://sheetsu.com/apis/v1.0/f053bbc8/name/{}".format(parsed_args.person))

                html = response.read()
                html = loads(html)

                html[0]["attendance"] = "x"

                r = requests.put("https://sheetsu.com/apis/v1.0/f053bbc8/name//{}".format(parsed_args.person), data=html[0])

            r.status_code

            return '@{} I completed.'.format(self.app_args.user)

        except:
            pass
コード例 #19
0
ファイル: webhdfs.py プロジェクト: mpenkov/smart_open
    def __init__(self, uri_path, min_part_size=WEBHDFS_MIN_PART_SIZE):
        """
        Parameters
        ----------
        min_part_size: int, optional
            For writing only.

        """
        self.uri_path = uri_path
        self._closed = False
        self.min_part_size = min_part_size
        # creating empty file first
        payload = {"op": "CREATE", "overwrite": True}
        init_response = requests.put("http://" + self.uri_path,
                                     params=payload, allow_redirects=False)
        if not init_response.status_code == httplib.TEMPORARY_REDIRECT:
            raise WebHdfsException(str(init_response.status_code) + "\n" + init_response.content)
        uri = init_response.headers['location']
        response = requests.put(uri, data="", headers={'content-type': 'application/octet-stream'})
        if not response.status_code == httplib.CREATED:
            raise WebHdfsException(str(response.status_code) + "\n" + response.content)
        self.lines = []
        self.parts = 0
        self.chunk_bytes = 0
        self.total_size = 0

        #
        # This member is part of the io.BufferedIOBase interface.
        #
        self.raw = None
コード例 #20
0
ファイル: database_test.py プロジェクト: AdvEnc/voltdb
    def test_request_with_id_member(self):
        """
        ensure id and members are not allowed in payload
        """
        response = requests.get(__url__)
        value = response.json()

        if value:
            db_length = len(value['databases'])
            last_db_id = value['databases'][db_length-1]['id']
            print 'Database id to be updated is ' + str(last_db_id)
            url = __url__ + str(last_db_id)

        response = requests.put(url, json={'name': 'test', 'members': [3]})
        value = response.json()
        self.assertEqual(value['error'], 'You cannot specify \'Members\' while updating database.')
        self.assertEqual(response.status_code, 404)

        response = requests.put(url, json={'name': 'test', 'id': 33333})
        value = response.json()
        self.assertEqual(value['error'], 'Database Id mentioned in the payload and url doesn\'t match.')
        self.assertEqual(response.status_code, 404)

        response = requests.put(url, json={'name': 'test123', 'id': last_db_id})
        value = response.json()
        self.assertEqual(value['status'], 200)
        self.assertEqual(response.status_code, 200)
コード例 #21
0
def create_schema(storage_index_url):
    # making three tries, in case of communication errors with elasticsearch
    for _ in xrange(3):
        try:
            # delete index if already exist
            response = requests.head(storage_index_url)
            if response.status_code == 200:
                response = requests.delete(storage_index_url)
                response.raise_for_status()

            # create index
            response = requests.post(storage_index_url, data=json.dumps(
                SETTINGS))
            response.raise_for_status()

            # set mappings
            response = requests.put("{0}/blueprint/_mapping".format(
                storage_index_url), json.dumps(BLUEPRINT_SCHEMA))
            response.raise_for_status()
            response = requests.put("{0}/deployment/_mapping".format(
                storage_index_url), json.dumps(DEPLOYMENT_SCHEMA))
            response.raise_for_status()

            response = requests.put("{0}/node/_mapping".format(
                storage_index_url), json.dumps(NODE_SCHEMA))
            response.raise_for_status()

            response = requests.put("{0}/node_instance/_mapping".format(
                storage_index_url), json.dumps(NODE_INSTANCE_SCHEMA))
            response.raise_for_status()

            print 'Done creating elasticsearch storage schema.'
            break
        except HTTPError:
            pass
コード例 #22
0
ファイル: domains.py プロジェクト: floe-charest/jeto
    def put(self, id=None):
        domain = Domain.query.get(id)
        if current_user.has_permission(
            EditDomainPermission,
            getattr(domain.domain_controller, 'id')
        ):
            if 'domain_controller' in request.json:
                # If the controller is to be changed in the _edit,
                # Delete the domain on the current controller
                if domain.domain_controller is not None and\
                        request.json['domain_controller'] is not None:
                    self._delete_on_dc(domain)

                # If the domain is currently on the default controller and the
                # new controller is expected to be different, delete it on the
                # default controller
                if domain.domain_controller is None and\
                        request.json['domain_controller'] is not None:
                    self._delete_on_dc(domain)

                # If we are changing the controller to be the default one
                if domain.domain_controller is not None and\
                        request.json['domain_controller'] is None:
                    self._delete_on_dc(domain)

            domain = self._editDomain(id)

            req.put(
                '{}/{}'.format(self._get_url(domain), id),
                headers=self._get_headers(),
                data=json.dumps(marshal(domain, domain_fields)),
                verify=self._get_verify(domain)
            )

        return self.get(domain.id)
コード例 #23
0
ファイル: bulk_restore.py プロジェクト: CottageLabs/sysadmin
def put_mapping(mapping_dict):
    for index in mapping_dict:
        if not index.startswith('_'):
            # create the index first
            i = config['ELASTIC_SEARCH_HOST']
            i += '/' + index
            ri = requests.put(i)
            if ri.status_code != 200:
                print 'Failed to create Index:', index, ', HTTP Response:', ri.status_code
                print ri.text
                sys.exit(3)
            # now create each type inside the index
            for key, mapping in mapping_dict[index]['mappings'].iteritems():
                im = i + '/' + key + '/_mapping'
                exists = requests.get(im)
                # do not overwrite existing mappings
                if exists.status_code != 200:
                    themapping = {}
                    themapping[key] = mapping
                    r = requests.put(im, json.dumps(themapping))
                    if r.status_code != 200:
                        print 'Failed to do PUT mapping for Index:', index, ', Key:', key, ', HTTP Response:', r.status_code
                        sys.exit(4)
                    else:
                        print 'Mapping OK for Index:', index, ', Key:', key, ', HTTP Response:', r.status_code
                else:
                    print 'Mapping already exists for Index:', index, ', Key:', key
        else:
            print 'Ignoring {0}, no index names start with _'.format(index)
コード例 #24
0
    def _push_metadata_software_deployments(
            self, cnxt, server_id, stack_user_project_id):
        rs = db_api.resource_get_by_physical_resource_id(cnxt, server_id)
        if not rs:
            return
        deployments = self.metadata_software_deployments(cnxt, server_id)
        md = rs.rsrc_metadata or {}
        md['deployments'] = deployments
        rows_updated = db_api.resource_update(
            cnxt, rs.id, {'rsrc_metadata': md}, rs.atomic_key)
        if not rows_updated:
            action = "deployments of server %s" % server_id
            raise exception.ConcurrentTransaction(action=action)

        metadata_put_url = None
        metadata_queue_id = None
        for rd in rs.data:
            if rd.key == 'metadata_put_url':
                metadata_put_url = rd.value
            if rd.key == 'metadata_queue_id':
                metadata_queue_id = rd.value
        if metadata_put_url:
            json_md = jsonutils.dumps(md)
            requests.put(metadata_put_url, json_md)
        if metadata_queue_id:
            project = stack_user_project_id
            token = self._get_user_token(cnxt, rs, project)
            zaqar_plugin = cnxt.clients.client_plugin('zaqar')
            zaqar = zaqar_plugin.create_for_tenant(project, token)
            queue = zaqar.queue(metadata_queue_id)
            queue.post({'body': md, 'ttl': zaqar_plugin.DEFAULT_TTL})
コード例 #25
0
ファイル: trellotools.py プロジェクト: Natman64/Trellonos
    def move_card(self, card, list):
        """ Moves a card to a new list """
        # TODO this doesn't work
        url = BASE_URL + 'cards/' + card['id'] + '/idList'
        params = self.request_params({'value': list['id']})

        requests.put(url, params=params)
コード例 #26
0
ファイル: github.py プロジェクト: pombredanne/GrimoireELK
    def geo_locations_to_es(self):
        max_items = self.elastic.max_items_bulk
        current = 0
        bulk_json = ""

        url = self.elastic.url + "/github/geolocations/_bulk"

        logging.debug("Adding geoloc to %s (in %i packs)" % (url, max_items))


        for loc in self.geolocations:
            if current >= max_items:
                requests.put(url, data=bulk_json)
                bulk_json = ""
                current = 0

            geopoint = self.geolocations[loc]
            location = geopoint.copy()
            location["location"] = loc
            # First upload the raw issue data to ES
            data_json = json.dumps(location)
            # Don't include in URL non ascii codes
            safe_loc = str(loc.encode('ascii', 'ignore'),'ascii')
            geo_id = str("%s-%s-%s" % (location["lat"], location["lon"],
                                       safe_loc))
            bulk_json += '{"index" : {"_id" : "%s" } }\n' % (geo_id)
            bulk_json += data_json +"\n"  # Bulk document
            current += 1

        requests.put(url, data = bulk_json)

        logging.debug("Adding geoloc to ES Done")
コード例 #27
0
def putRequest(queue, payload=None):
    response = {}
    statusCode = {}
    data = {}
    while not queue.empty():
        resourceURI = queue.get(timeout=1)
        response["Node"] = resourceURI
        try:
            if payload is None:
                r = requests.put(resourceURI, timeout=20)
            else:
                r = requests.put(resourceURI, data=payload, timeout=20)
            if r.headers["Content-Type"] == "application/json":
                data = r.json
            else:
                data = r.text
            response["StatusCode"] = r.status_code
            response["Data"] = data
        except requests.exceptions.Timeout:
            response["StatusCode"] = 408
            response["Data"] = data
        except requests.exceptions.ConnectionError:
            response["Node"] = resourceURI
            statusCode["StatusCode"] = 404
            response["Data"] = "n/a"

        GreenletRequests.NodeResponsesPost.append(response)
        print "Threaded PUT with ID " + str(GreenletRequests.npo) + " executed for " + resourceURI
        GreenletRequests.npo += 1
        gevent.sleep(0)
コード例 #28
0
def updateLSPs(linkPathDict, linkDict):
    if len(linkPathDict) == 0:
        return
    r = requests.get('https://10.10.2.25:8443/NorthStar/API/v1/tenant/1/topology/1/te-lsps/', headers=authHeader,
                     verify=False)
    lsp_list = json.loads(json.dumps(r.json()))

    new_lsps = []
    for lsp in lsp_list:
        if lsp['name'] not in linkPathDict:
            continue
        # Fill only the required fields
        ero = []
        path = linkPathDict[lsp['name']]
        for i in range(0, len(path) - 1):
            ero.append({'topoObjectType': 'ipv4', 'address': getZNodeIpAddress(path[i], path[i + 1], linkDict)})

        new_lsp = {}
        for key in ('from', 'to', 'name', 'lspIndex', 'pathType'):
            new_lsp[key] = lsp[key]

        new_lsp['plannedProperties'] = {'ero': ero}
        new_lsps.append(new_lsp)

    requests.put('https://10.10.2.25:8443/NorthStar/API/v1/tenant/1/topology/1/te-lsps/bulk',
                 json=new_lsps, headers=authHeader, verify=False)
コード例 #29
0
ファイル: chronos.py プロジェクト: seomoz/roger-mesos-tools
    def put(self, file_path, environmentObj, container, environment, act_as_user):
        self.fetchUserPass(environment)
        data = open(file_path).read()
        chronos_resource = "scheduler/iso8601"
        if 'parents' in json.loads(data):
            chronos_resource = "scheduler/dependency"

        print(colored("TRIGGERING CHRONOS FRAMEWORK UPDATE FOR JOB: {}".format(container), "cyan"))
        print(colored("curl -X PUT -H 'Content-type: application/json' --data-binary @{} {}/{}".format(
            file_path, environmentObj['chronos_endpoint'], chronos_resource), "cyan"))
        endpoint = environmentObj['chronos_endpoint']
        deploy_url = "{}/{}".format(endpoint, chronos_resource)

        if not act_as_user:
            resp = requests.put(deploy_url, data=data,
                                headers={'Content-type': 'application/json'},
                                auth=(self.user, self.passw),
                                allow_redirects=True)
        else:
            resp = requests.put(deploy_url, data=data,
                                headers={'Content-type': 'application/json', 'act-as-user': act_as_user},
                                auth=(self.user, self.passw),
                                allow_redirects=True)
        chronos_message = "{}".format(resp)
        print(colored(chronos_message, "yellow"))
        task_id = []
        body = json.loads(data)
        if 'name' in body:
            task_id.append(body['name'])

        return resp, task_id
コード例 #30
0
ファイル: lightring.py プロジェクト: jeurgen/ring-lights
def switch_off_on(lightId):
        payload = '{"on": false}'
        r = requests.put(lightPath+lightId+'/state', data=payload)
	time.sleep(0.5)       
 
	payload = '{"on": true}'
        r = requests.put(lightPath+lightId+'/state', data=payload)
コード例 #31
0
ファイル: put.py プロジェクト: satee143/REST-API
 def Put_Resource(self):
     result=requests.put(self.uri,self.load)
     print(result.status_code)
     print(result.content)
     self.json_load=result.text
コード例 #32
0
ファイル: lists.py プロジェクト: hoskliza/Test
 def update_name(self, idList, value):
     resp = requests.put("https://trello.com/1/lists/{}/name".format(idList), params={"key": self._apikey, "token": self._token}, data={"value": value})
     resp.raise_for_status()
     return json.loads(resp.text)
コード例 #33
0
ファイル: lists.py プロジェクト: hoskliza/Test
 def update_idBoard(self, idList, value, pos=None):
     resp = requests.put("https://trello.com/1/lists/{}/idBoard".format(idList), params={"key": self._apikey, "token": self._token}, data={"value": value, "pos": pos})
     resp.raise_for_status()
     return json.loads(resp.text)
コード例 #34
0
ファイル: lists.py プロジェクト: hoskliza/Test
 def update(self, idList, name=None, closed=None, idBoard=None, pos=None, subscribed=None):
     resp = requests.put("https://trello.com/1/lists/{}".format(idList), params={"key": self._apikey, "token": self._token}, data={"name": name, "closed": closed, "idBoard": idBoard, "pos": pos, "subscribed": subscribed})
     resp.raise_for_status()
     return json.loads(resp.text)
コード例 #35
0
	username = user_list[0]
	print "[+] Found user {}".format(username)
else:
	r = requests.post("http://{}/rest/user/".format(ip), data={'username' : username, 'password' : password})
	print "[+] Create user"
	
	if not "User created" in r.text and not "User already exist" in r.text:
		print "[-] Cannot create user"
		os._exit(0)

r = requests.get("http://{}/rest/settings/general/webinterface/".format(ip))
if "true" in r.text:
	print "[+] Web repository already enabled"
else:
	print "[+] Enable web repository"
	r = requests.put("http://{}/rest/settings/general/webinterface/".format(ip), data='{"enabled" : "true"}')
	if not "Web interface successfully enabled" in r.text:
		print "[-] Cannot enable web interface"
		os._exit(0)

print "[+] Get repositories list"
r = requests.get("http://{}/rest/repository/".format(ip))
repository_list = r.json()

if len(repository_list) > 0:
	repository = repository_list[0]['name']
	print "[+] Found repository {}".format(repository)
else:
	print "[+] Create repository"

	r = requests.post("http://{}/rest/repository/".format(ip), cookies={'csrftoken' : csrf_token}, data={'name' : repository, 'csrfmiddlewaretoken' : csrf_token})
コード例 #36
0
ファイル: backend.py プロジェクト: HackTrinity/CHAD
 def reset_instance(self, user_id, chall_id):
     res = requests.put(f'{self.endpoint}/instances/{user_id}/{chall_id}')
     self.__raise_status(res)
     res.raise_for_status()
コード例 #37
0
def detect_web(options):
    format_output('w',
                  'Setting up automatic probe server type and webshell type')
    format_output('w', 'Detecting server info of ' + options.url)
    server_list = ['apache', 'nginx', 'iis']
    shell_list = ['php', 'aspx', 'asp', 'jsp']
    header = forge_header(options)
    web_hint = 'Web server may be '
    shell_hint = 'The shell type may be '
    if options.shell_type == 'detect':
        for shell in shell_list:
            if shell in options.url.lower():
                format_output('h', shell_hint + shell)
                options.shell_type = shell
                break

    if options.server_type == 'detect' or options.shell_type == 'detect':
        proxies = get_proxy(options)
        try:
            get_rsp = requests.get(url=options.url,
                                   headers=header,
                                   timeout=options.time_out,
                                   proxies=proxies,
                                   verify=False)
        except Exception as e:
            format_output('e', str(e))
            return 'error'

        if 'server' in get_rsp.headers:
            format_output('h', web_hint + get_rsp.headers['server'])
            options.server_type = get_rsp.headers['server'].lower()

        if 'x-powered-by' in get_rsp.headers:
            power_hint = 'Web server may be x-powered-by '
            format_output('h', power_hint + get_rsp.headers['x-powered-by'])
            if options.shell_type == 'detect':
                for shell in shell_list:
                    if shell in get_rsp.headers['x-powered-by'].lower():
                        format_output('h', shell_hint + shell)
                        options.shell_type = shell
                        break
            if options.server_type == 'detect':
                for server in server_list:
                    if server in get_rsp.headers['x-powered-by'].lower():
                        format_output('h', web_hint + server)
                        options.server_type = server
                        break

    if options.server_type == 'detect':
        random_str = str(random.sample(string.printable, 5)).encode('hex')
        random_url = options.url + random_str
        random_rsp = requests.get(url=random_url, headers=header, verify=False)
        if random_rsp.status_code == 404:
            for server in server_list:
                if server in str(random_rsp.text).lower():
                    format_output('h', web_hint + server)
                    options.server_type = server
                    break

    if options.server_type == 'detect':
        put_rsp = requests.put(url=options.url, headers=header, verify=False)
        if put_rsp.status_code == 405 or put_rsp.status_code == 411:
            options.server_type = 'nginx'
            format_output('h', web_hint + options.server_type)
        if put_rsp.status_code == 200:
            options.server_type = 'apache'
            format_output('h', web_hint + options.server_type)

    if options.server_type == 'detect':
        del_rsp = requests.delete(url=options.url,
                                  headers=header,
                                  verify=False)
        if del_rsp.status_code == 501:
            options.server_type = 'iis'
            format_output('h', web_hint + options.server_type)
        if del_rsp.status_code == 403:
            options.server_type = 'apache'
            format_output('h', web_hint + options.server_type)
コード例 #38
0
ファイル: test_movies.py プロジェクト: cjbara/paradigms
	def reset_data(self):
		m = {}
		m['apikey'] = 'AAAAAAAB'
		r = requests.put(self.RESET_URL, data = json.dumps(m))
コード例 #39
0
logging.info('Uploaded new ZIP file! Response:\n%s', prettylog(json.loads(r_upload.text)))

# update the version string to current date
date_string = datetime.now().strftime('%Y-%m-%d')

data_update = {}
data_update['metadata'] = copy.deepcopy(data_new['metadata'])
data_update['metadata']['version'] = date_string
del data_update['metadata']['doi'] # even unchanged it will trigger an error

# send the metadata update
headers = {'Content-Type': 'application/json'}
md_url = '%s?access_token=%s' % (draft_url, token)
logging.info('Updating metadata at %s with:\n %s' % (draft_url, prettylog(data_update)))
r_update = requests.put(md_url,
                        data = json.dumps(data_update),
                        headers = headers)
if r_update.status_code != 200:
    logging.critical('Error updating metadata: %s', r_update.text)
    sys.exit()
logging.info('Updated Metadata!\n%s' % (prettylog(json.loads(r_update.text))))

# publish manually
#input('Go to https://zenodo.org/deposit/%s, set the version to >>> %s <<< and then publish it.' % (draft_url.rsplit('/', 1)[-1], date_string))

# publish the updated deposit
logging.info('Publishing deposition %s' % (draft_url))
r_publish = requests.post('%s/actions/publish' % (draft_url), params = {'access_token': token})
if r_publish.status_code != 202:
    logging.critical('Error publishing new version: %s', r_publish.text)
    sys.exit()
コード例 #40
0
	def edit(self, dashboard_id, data):
		data = data if data else {}
		return requests.put('{api_endpoint}/dashboard/{dashboard_id}'.format(api_endpoint=API_TAGO, dashboard_id=dashboard_id), headers=self.default_headers, data=json.dumps(data)).json()
コード例 #41
0
def CORS(Excel_Location, Excel_Sheet_Name, Module_Name):
    result = {}
    try:
        returnvalue = readexcel(Excel_Location, Excel_Sheet_Name, Module_Name)
        print("Data from find_vulnerable_parameters ")
        print(result)
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        API = returnvalue['API']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Method = returnvalue['HTTPMethod']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Protocol = returnvalue['Protocol']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        BaseURL = returnvalue['BaseURL']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        RelativeURL = returnvalue['RelativeURL']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        URL = returnvalue['URL']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Body = returnvalue['Body']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Header = returnvalue['Header']
    except Exception as error:
        print(error)
        traceback.print_stack()


    try:
        Cookie = returnvalue['Cookie']
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Check_BaseURL = re.findall(r'\$(.*?)\$', str(BaseURL))
        if (Check_BaseURL != ""):
            for key in Check_BaseURL:
                BaseURL = BaseURL.replace("$", "")
            print(URL)
        else:
            print("No Change in Base URL")
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Check_RelativeURL = re.findall(r'\$(.*?)\$', str(RelativeURL))
        if (Check_RelativeURL != ""):
            for key in Check_RelativeURL:
                RelativeURL = RelativeURL.replace("$", "")
            print(RelativeURL)
        else:
            print("No Change in Relative URL")
    except Exception as error:
        print("Error in reading Relative URL")
        traceback.print_stack()

    try:
        Check_Method = re.findall(r'\$(.*?)\$', str(Method))
        if (Check_Method != ""):
            for key in Check_Method:
                Method = Method.replace("$", "")
            print(Method)
        else:
            print("No Change in Method")
    except Exception as error:
        print("Error in finding Method")
        traceback.print_stack()

    try:
        Check_Protocol = re.findall(r'\$(.*?)\$', str(Protocol))
        if (Check_Protocol != ""):
            for key in Check_Protocol:
                Protocol = Protocol.replace("$", "")
            print(Protocol)
        else:
            print("No Change in Protocol")
    except Exception as error:
        print("Error in finding Protocol")
        traceback.print_stack()

    try:
        Check_URL = re.findall(r'\$(.*?)\$', str(URL))
        if(Check_URL != ""):
            for key in Check_URL:
                URL = URL.replace("$","")
            print(URL)
        else:
            print("No Change in URL")
    except Exception as error:
            print(error)
            traceback.print_stack()

    try:
        Check_Body = re.findall(r'\$(.*?)\$', str(Body))
        if (Check_Body != ""):
            for key in Check_Body:
                Body = Body.replace("$", "")
            print(Body)
        else:
            print("No Change in Body")
            print(Body)
    except Exception as error:
        print(error)
        traceback.print_stack()


    try:
        Check_Header = re.findall(r'\$(.*?)\$', str(Header))
        if (Check_Header != ""):
            for key in Check_Header:
                Header = Header.replace("$", "")
            print(Header)
        else:
            print("No Change in Header")
            print(Header)
    except Exception as error:
        print(error)
        traceback.print_stack()

    try:
        Check_Cookie = re.findall(r'\$(.*?)\$', str(Cookie))
        if (Check_Cookie != ""):
            for key in Check_Cookie:
                Cookie = Cookie.replace("$", "")
            print(Cookie)
        else:
            print("No Change in Cookie")
            print(Cookie)
    except Exception as error:
        print(error)
        traceback.print_stack()


    try:
        StatusCode = {}
        Origin = {'Origin':'www.geeksforgeeks.org'}
        print(Origin)

        if(Method == 'GET'):
            GET = requests.get(URL, data=Body, headers=Origin)
            result['GET StatusCode'] = str(GET.status_code)

        elif(Method == 'POST'):
            POST = requests.post(URL, data=Body, headers=Origin)
            result['GET StatusCode'] = str(POST.status_code)

        elif (Method == 'PUT'):
            PUT = requests.put(URL, data=Body, headers=Origin)
            result['HOST StatusCode'] = str(PUT.status_code)

        elif (Method == 'DELETE'):
            DELETE = requests.delete(URL, data=Body, headers=Origin)
            result['GET StatusCode'] = str(DELETE.status_code)

    except Exception as error:
        print("Error in executing HOST Injection")
        traceback.print_stack()

    return result
コード例 #42
0
ファイル: OVM_PUT.py プロジェクト: SnarferX/CCTO
import json
import pprint
import array as arr

#variable
pp = pprint.PrettyPrinter(indent=4)
client = ovmclient.Client('base_Uri', 'user', 'password')
user = '******'
password = '******'
baseUri = 'https://ovmdmgr04:7002/ovm/core/wsapi/rest'
client = ovmclient.Client(baseUri, user, password)
repo_name = client.repositories.get_id_by_name('pool07-virt1-repo')
repo_value = (repo_name['value'])

s = requests.Session()
s.auth=( user, password )
s.verify=False #disables SSL certificate verification
s.headers.update({'Accept': 'application/json', 'Content-Type': 'application/json'})

argument = sys.argv[1]
# argument2 = sys.argv[2]
# argument3 = sys.argv[3]

vm_id = client.vms.get_id_by_name(argument)
vm_value = str(vm_id['value'])
print(vm_value)

r = requests.put(item, block=True, timeout=None)


コード例 #43
0
ファイル: cli.py プロジェクト: BU-EC500-SP15/haas
def do_put(url, data={}):
    return check_status_code(requests.put(url, data=json.dumps(data)))
import serial
import time
import string
import pynmea2
import requests
import json

url = 'http://<server>/Thingworx'
headers = { 'Content-Type': 'application/json', 'appKey': '<appKey>','Accept': 'text/html'}

while True:
 	port="/dev/ttyAMA0"
	ser=serial.Serial(port, baudrate=9600, timeout=0.5)
	dataout = pynmea2.NMEAStreamReader()
	newdata=ser.readline()
	
	if newdata[0:6] == "$GPGLL":	
		newmsg=pynmea2.parse(newdata)		
		lat=newmsg.latitude
		lng=newmsg.longitude
		locstr = "Latitude=" + str(lat) + "and Longitude=" + str(lng)
		print(locstr)
		response = requests.put (url+'/Things/MyAsset_<userid>/Properties/*',json = {"GPS":{"longitude":lng, "latitude":lat, "elevation":0.5 , "units": "WGS84"}}, auth = (<username>,<password>) ,headers=headers, verify=False)
コード例 #45
0
import requests

BASE = "http://127.0.0.1:5000/"
#
#
data = [{
    "fact": "smelly cat, smelly cat"
}, {
    "fact": "cats are big"
}, {
    "fact": "kitty cat"
}, {
    "fact": "cats have 7 lifes"
}]

for i in range(len(data)):
    response = requests.put(BASE, data[i])
    print(response.json())
コード例 #46
0
def perform_attack(Area,METhod,Any_Parameter,Payload_RowLength,Payload_SheetName,URL,Body,Header,Cookie):
    result = []
    if(Any_Parameter):
        print("Parameter found in = " + Area)
        for i in range(2, Payload_RowLength + 1):
            Payload_RowContents = Payload_SheetName.cell(row=i, column=1)
            print("Row " + str(Payload_RowContents.row - 1) + " = " + str(Payload_RowContents.value), end="" + "\n")
            for key in Any_Parameter:
                if(Area=='URL'):
                    print("Area is = " + Area)
                    AttackURL = URL.replace(key, str(Payload_RowContents.value))
                    AttackURL = AttackURL.replace("$", "")
                    print("Attack url : " + str(AttackURL))
                    AttackBody = str(Body).replace("$", "")
                    print("Body : " + str(AttackBody))
                    AttackHeader = str(Header).replace("$", "")
                    print("Header : " + str(AttackHeader))
                    AttackCookie = str(Cookie).replace("$", "")
                    print("Cookie : " + AttackCookie)
                elif(Area == 'Body'):
                    print("Area is = " + Area)
                    AttackBody = Body.replace(key, str(Payload_RowContents.value))
                    print("Original BOdy ===============" + AttackBody)
                    print("for2", i+1)
                    AttackURL = URL
                    print("URL : " + str(AttackURL))
                    AttackBody = str(AttackBody).replace("$", "")
                    print("AttackBody : " + str(AttackBody))
                    AttackHeader = str(Header)
                    print("Header : " + str(AttackHeader))
                    AttackCookie = str(Cookie)
                    print("Cookie : " +  AttackCookie)
                elif(Area == 'Header'):
                    print("Area is = " + Area)
                    AttackHeader = Header.replace(key, str(Payload_RowContents.value))
                    AttackURL = AttackURL.replace("$", "")
                    print(AttackURL)
                    AttackBody = str(Body).replace("$", "")
                    print(AttackBody)
                    AttackHeader = str(AttackHeader).replace("$", "")
                    print(AttackHeader)
                    AttackCookie = str(Cookie).replace("$", "")
                    print(AttackCookie)
                elif(Area == 'Cookie'):
                    print("Area is = " + Area)
                    AttackCookie = Cookie.replace(key, str(Payload_RowContents.value))
                    AttackURL = AttackURL.replace("$", "")
                    print(AttackURL)
                    AttackBody = str(Body).replace("$", "")
                    print(AttackBody)
                    AttackHeader = str(Header).replace("$", "")
                    print(AttackHeader)
                    AttackCookie = str(AttackCookie).replace("$", "")
                    print(AttackCookie)                
                try:
                    if (METhod == 'GET'):
                        print("Method found in attack = " + METhod)
                        response = requests.get(AttackURL, data=AttackBody, headers=AttackHeader)
                    elif(METhod == 'POST'):
                        print("Method found in attack = " + METhod)
                        response = requests.post(AttackURL, data=AttackBody, headers=AttackHeader)
                        print("Got ============ response")
                    elif(METhod == 'PUT'):
                        response = requests.put(AttackURL, data=AttackBody, headers=AttackHeader)
                    elif(METhod == 'DELETE'):
                        response = requests.delete(AttackURL, data=AttackBody, headers=AttackHeader)
                    StatusCode = str(response.status_code)
                    Response_Body = str(response.text)
                    print("Response Status Code : " + str(StatusCode) + "\n")
                    print("Response Body : " + str(Response_Body) + "\n")
                    result.append(StatusCode)
                    print(result)
                    time.sleep(10)
                except:
                    print(traceback)
                    print("Error in executing: " + str(AttackURL))
                    StatusCode = '500'
                    result.append(StatusCode)
                    print(result)
                    time.sleep(10)
                print(result)
    else:
        print("No Parameter choosen in the API")
    return result
コード例 #47
0
df = pd.Series(freq).to_frame()
df = df.sort_values(by=0, ascending=False)

tags = df.head(tags_to_add).index.tolist()
print('The following tags will be created:')
print(tags)

for keyword in keyword_list:
    for tag in tags:
        if tag in keyword['keyword']:
            if tag not in keyword['tags']:
                keyword['tags'].append(tag)

print('Adding tags to your keywords...')
url = 'https://api.dragonmetrics.com/v1.3/campaigns/' + str(
    target_campaign_id) + '/keywords'


def chunks(l, n):
    for i in range(0, len(l), n):
        yield l[i:i + n]


for chunk in chunks(keyword_list, 100):
    r = requests.put(url, data=json.dumps(chunk), headers=headers)
    print('Updating keywords... ' + str(len(chunk)) +
          ' keywords just got updated')

print('Done')
コード例 #48
0
ファイル: create_lsh_model.py プロジェクト: tamanobi/es-knn
            "_aknn_nb_tables": args["aknn_tables"],
            "_aknn_nb_bits_per_table": args["aknn_bits"]
        },
        "_aknn_vector_sample": [
            # Populated below.
        ]
    }

    # Delete and remake the index.
    print("Deleting index %s" % body["_index"])
    index_url = "%s/%s" % (args["es_host"], body["_index"])
    req = requests.delete(index_url)
    assert req.status_code == 200, "Failed to delete index: %s" % json.dumps(req.json())

    print("Creating index %s" % body["_index"])
    req = requests.put(index_url)
    assert req.status_code == 200, "Failed to create index: %s" % json.dumps(req.json())

    # Put the mapping. This can fail if you already have this index/type setup.
    print("Creating mapping for index %s" % body["_index"])
    mapping_url = "%s/%s/%s/_mapping" % (args["es_host"], body["_index"], body["_type"])
    req = requests.put(mapping_url, json=mapping)
    assert req.status_code == 200, "Failed to create mapping: %s" % json.dumps(req.json())

    # Create an iterable over the feature documents.
    docs = iter_docs('./features')

    # Populate the vector sample by randomly sampling vectors from iterable.
    nb_samples = 2 * args["aknn_bits"] * args["aknn_tables"]
    #print("Sampling %d feature vectors from %s" % (nb_samples, args["features_source"]))
    while len(body["_aknn_vector_sample"]) < nb_samples:
def gns3_create_nodes(gns3_server, project_id, gns3_code_topology_data):

    print("""
    ╔═╗┌┬┐┌─┐┌─┐  ╦   ╔═╗┬─┐┌─┐┌─┐┌┬┐┌─┐  ┌┐┌┌─┐┌┬┐┌─┐┌─┐
    ╚═╗ │ ├┤ ├─┘  ║   ║  ├┬┘├┤ ├─┤ │ ├┤   ││││ │ ││├┤ └─┐
    ╚═╝ ┴ └─┘┴    ╩.  ╚═╝┴└─└─┘┴ ┴ ┴ └─┘  ┘└┘└─┘─┴┘└─┘└─┘.
    """)

    gns3_appliances = gns3_get_appliances_names_and_id(gns3_server)

    list_images = []
    list_node_names = []

    for node in gns3_code_topology_data['gns3_nodes']:
        appliance = node['appliance']
        name = node['name']
        r_get_nodes = requests.get(gns3_server + '/v2/projects/' +
                                   str(project_id) + '/nodes')
        r_get_nodes_dict = r_get_nodes.json()
        # Checking existence of nodes in the project from the topology file.
        for dictionary_node in r_get_nodes_dict:
            if dictionary_node['name'] == name:
                console_port = dictionary_node['console']
                status = dictionary_node['status']
                print(name, 'is already created.', 'Console port:',
                      console_port, 'Status:', status)
                break
        else:  # the code in the else block runs only if the loop completes without encountering a break statement.
            list_images.append(appliance)
            list_node_names.append(name)
    if not list_node_names:
        print()
        print('All nodes were already created in GNS3 project.', 'Project ID:',
              project_id)
        print('#' * 100)
        return
    else:
        # Creating new nodes from the topology file.
        for node_image, node_name in zip(list_images, list_node_names):
            print()
            print('Pair:', '[' + node_image + ']', '[' + node_name + ']')

            payload_coordinates = '{"x": 0, "y": 0}'
            payload_create_node = '{"name": "' + node_name + '"}'

            for key, value in gns3_appliances.items():
                if node_image == key:
                    # Built in GNS3
                    if node_image == 'Cloud':
                        cloud_payload = '{"name": "' + node_name + \
                                        '", "node_type": "cloud", "compute_id": "local"}'
                        cloud_r = requests.post(gns3_server + '/v2/projects/' +
                                                project_id + '/nodes',
                                                data=cloud_payload)
                        if cloud_r:
                            cloud_r_dict = cloud_r.json()
                            cloud_new_id = cloud_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', cloud_new_id)
                            print()
                            continue
                        else:
                            print(cloud_r)
                            print('that is not working, please try again.')
                            return
                    elif node_image == 'VPCS':
                        vpcs_payload = '{"name": "' + node_name + \
                                       '", "node_type": "vpcs", "compute_id": "local"}'
                        vpcs_r = requests.post(gns3_server + '/v2/projects/' +
                                               project_id + '/nodes',
                                               data=vpcs_payload)
                        if vpcs_r:
                            vpcs_r_dict = vpcs_r.json()
                            vpcs_new_id = vpcs_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', vpcs_new_id)
                            print()
                            continue
                        else:
                            print(vpcs_r)
                            print('that is not working, please try again.')
                            return
                    elif node_image == 'NAT':
                        nat_payload = '{"name": "' + node_name + \
                                      '", "node_type": "nat", "compute_id": "local"}'
                        nat_r = requests.post(gns3_server + '/v2/projects/' +
                                              project_id + '/nodes',
                                              data=nat_payload)
                        if nat_r:
                            nat_r_dict = nat_r.json()
                            nat_new_id = nat_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', nat_new_id)
                            print()
                            continue
                        else:
                            print(nat_r)
                            print('that is not working, please try again.')
                            return
                    elif node_image == 'Frame Relay switch':
                        fr_sw_payload = '{"name": "' + node_name + \
                                        '", "node_type": "frame_relay_switch", "compute_id": "local"}'
                        fr_sw_r = requests.post(gns3_server + '/v2/projects/' +
                                                project_id + '/nodes',
                                                data=fr_sw_payload)
                        if fr_sw_r:
                            fr_sw_r_dict = fr_sw_r.json()
                            fr_sw_new_id = fr_sw_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', fr_sw_new_id)
                            print()
                            continue
                        else:
                            print(fr_sw_r)
                            print('that is not working, please try again.')
                            return
                    elif node_image == 'Ethernet hub':
                        eth_hub_payload = '{"name": "' + node_name + \
                                          '", "node_type": "ethernet_hub", "compute_id": "local"}'
                        eth_hub_r = requests.post(
                            gns3_server + '/v2/projects/' + project_id +
                            '/nodes',
                            data=eth_hub_payload)
                        if eth_hub_r:
                            eth_hub_r_dict = eth_hub_r.json()
                            eth_hub_new_id = eth_hub_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', eth_hub_new_id)
                            print()
                            continue
                        else:
                            print(eth_hub_r)
                            print('that is not working, please try again.')
                            return
                    elif node_image == 'Ethernet switch':
                        eth_sw_payload = '{"name": "' + node_name + \
                                         '", "node_type": "ethernet_switch", "compute_id": "local"}'
                        eth_sw_r = requests.post(gns3_server +
                                                 '/v2/projects/' + project_id +
                                                 '/nodes',
                                                 data=eth_sw_payload)
                        if eth_sw_r:
                            eth_sw_r_dict = eth_sw_r.json()
                            eth_sw_new_id = eth_sw_r_dict['node_id']
                            print()
                            print(node_name, 'is created.', eth_sw_new_id)
                            print()
                            continue
                        else:
                            print(eth_sw_r)
                            print('that is not working, please try again.')
                            return
                    # Added manually
                    else:
                        appliance_id = value
                        r_create_node = requests.post(
                            gns3_server + '/v2/projects/' + project_id +
                            '/appliances/' + appliance_id,
                            data=payload_coordinates)
                        if r_create_node:
                            r_create_node_dict = r_create_node.json()
                            new_node_id = r_create_node_dict['node_id']
                            requests.put(gns3_server + '/v2/projects/' +
                                         project_id + '/nodes/' + new_node_id,
                                         data=payload_create_node)
                            print()
                            print(node_name, 'is created.', new_node_id)
                            print('#' * 100)
                        else:
                            print(r_create_node)
                            print('that is not working, please try again.')
                            exit()
        print('=' * 100)
コード例 #50
0
def firebaseDB(finalRes, databaseN):
    #get firebase url and do resquest dumping the data to a json object
    fireURL = 'https://project551-5d799.firebaseio.com/' + databaseN + '/.json'
    results = requests.put(fireURL, data=json.dumps(finalRes))
def acknowledgeAlarm(alarmId):
    alarm = {
        'status': 'ACKNOWLEDGED'
    }
    response = requests.put(C8Y_BASE + '/alarm/alarms/' + str(alarmId), headers=C8Y_HEADERS, data=json.dumps(alarm))
    return response.json()
コード例 #52
0
def group(group_id):
    access_token = request.args.get('access_token')
    if access_token:
        memory = {}
        memory = requests.get(
            'https://groupy.firebaseio.com/groups/{0}.json'.format(
                group_id)).json()
        if not memory:
            group = requests.get(
                'https://api.groupme.com/v3/groups/{0}?access_token={1}'.
                format(group_id, access_token)).json()['response']
            members = defaultdict(list)
            for m in group['members']:
                if m['image_url']:
                    members[m['user_id']].append(m['image_url'] + '.avatar')
                else:
                    members[m['user_id']].append(
                        "https://i.groupme.com/sms_avatar.avatar")
                members[m['user_id']].append(m['nickname'])
            members['system'] = ["", 'GroupMe']
            likes_given = defaultdict(lambda: defaultdict(int))
            likes_received = defaultdict(lambda: defaultdict(int))
            posts = defaultdict(int)
            messages = requests.get(
                'https://api.groupme.com/v3/groups/{0}/messages?limit=100&access_token={1}'
                .format(group_id, access_token))
            latest = messages.json()['response']['messages'][0]['id']
            while messages.status_code == 200:
                for m in messages.json()['response']['messages']:
                    if m['user_id'] not in members:
                        members[m['user_id']] = [
                            (m['avatar_url'] +
                             ".avatar") if m['avatar_url'] else "", m['name']
                        ]
                    for f in m['favorited_by']:
                        likes_given[f][m['user_id']] += 1
                        likes_received[m['user_id']][f] += 1
                    posts[m['user_id']] += 1
                messages = requests.get(
                    'https://api.groupme.com/v3/groups/{0}/messages?limit=100&before_id={1}&access_token={2}'
                    .format(group_id, m['id'], access_token))
            memory = requests.put(
                'https://groupy.firebaseio.com/groups/{0}.json'.format(
                    group_id),
                data=json.dumps({
                    "members": members,
                    "likes_given": likes_given if likes_given else {
                        "system": {
                            "system": 0
                        }
                    },
                    "likes_received": likes_received if likes_received else {
                        "system": {
                            "system": 0
                        }
                    },
                    "group": group,
                    "latest": latest,
                    "posts": posts
                })).json()
            memory = {
                "members": members,
                "likes_given": likes_given if likes_given else {
                    "system": {
                        "system": 0
                    }
                },
                "likes_received": likes_received if likes_received else {
                    "system": {
                        "system": 0
                    }
                },
                "group": group,
                "latest": latest,
                "posts": posts
            }
        else:
            messages = requests.get(
                'https://api.groupme.com/v3/groups/{0}/messages?after_id={1}&limit=100&access_token={2}'
                .format(group_id, memory['latest'], access_token))
            memory['group'] = requests.get(
                'https://api.groupme.com/v3/groups/{0}?access_token={1}'.
                format(group_id, access_token)).json()['response']
            while messages.status_code == 200 and messages.json(
            )['response']['messages']:
                for m in messages.json()['response']['messages']:
                    if m['user_id'] not in memory['members']:
                        memory['members'][m['user_id']] = [
                            (m['avatar_url'] +
                             ".avatar") if m['avatar_url'] else "", m['name']
                        ]
                    memory['likes_given'] = defaultdict(
                        lambda: defaultdict(int), {
                            key: defaultdict(int, value.iteritems())
                            for key, value in
                            memory['likes_given'].iteritems()
                        })
                    memory['likes_received'] = defaultdict(
                        lambda: defaultdict(int), {
                            key: defaultdict(int, value.iteritems())
                            for key, value in
                            memory['likes_received'].iteritems()
                        })
                    memory['posts'] = defaultdict(int,
                                                  memory['posts'].iteritems())
                    for f in m['favorited_by']:
                        memory['likes_given'][f][m['user_id']] += 1
                        memory['likes_received'][m['user_id']][f] += 1
                    memory['posts'][m['user_id']] += 1
                memory['latest'] = messages.json(
                )['response']['messages'][-1]['id']
                messages = requests.get(
                    'https://api.groupme.com/v3/groups/{0}/messages?limit=100&after_id={1}&access_token={2}'
                    .format(group_id, memory['latest'], access_token))
            requests.put(
                'https://groupy.firebaseio.com/groups/{0}.json'.format(
                    group_id),
                data=json.dumps(memory))
        renderer = renderChartMaker(memory['members'], memory['group'])
        smallcharts, charts, bigcharts = [], [], []
        members = memory['members'].keys()
        likesGivenData = sumLikes(memory['likes_given'])
        likesReceivedData = sumLikes(memory['likes_received'])
        total = sum(likesGivenData.values())
        likeWorth = {
            key: float(total) / likesGivenData.get(key, total)
            for key in members
        }
        netWorth = calculateNetWorth(memory['likes_received'], likeWorth,
                                     members)
        netWorthPerPost = {
            key: netWorth[key] / memory['posts'].get(key, 1)
            for key in members
        }
        charts.append(renderer.renderBarChart(memory['posts'], "Posts Made",
                                              0))
        charts.append(
            renderer.renderBarChart([likesReceivedData, likesGivenData],
                                    "Likes Received and Given", 1,
                                    ["Likes Received", "Likes Given"]))
        charts.append(
            renderer.renderBarChart(
                [{
                    key: float(likesReceivedData.get(key, 0)) /
                    memory['posts'].get(key, 1)
                    for key in memory['members'].keys()
                },
                 {
                     key: float(likesGivenData.get(key, 0)) /
                     memory['posts'].get(key, 1)
                     for key in memory['members'].keys()
                 }], "Likes Received and Given Per Post Made", 3,
                ["Likes Received Per Post Made", "Likes Given Per Post Made"]))
        charts.append(
            renderer.renderBarChart(
                {
                    key: float(likesGivenData.get(key, 0)) /
                    float(likesReceivedData.get(key, 1))
                    for key in members
                }, "Likes Given Per Like Received", 4))
        charts.append(
            renderer.renderBarChart(likeWorth, "Comparative Like Worth", 5))
        charts.append(renderer.renderBarChart(netWorth, "Net Worth", 6))
        charts.append(
            renderer.renderBarChart(netWorthPerPost, "Net Worth Per Post", 7))
        # charts.extend(renderAllegiances(renderer, memory['likes_given'], memory['likes_received'], memory['members'], 8))
        bigcharts.append(
            renderer.renderPercentHeatmap(
                lambda x, y: memory['likes_given'].get(x, {}).get(y, 0),
                "Likes Given", 8))
        bigcharts.append(
            renderer.renderPercentHeatmap(
                lambda x, y: float(memory['likes_given'].get(x, {}).get(y, 0)
                                   ) / memory['posts'].get(y, 1),
                "Percent of Others Posts Liked", 9))
        bigcharts.append(
            renderer.renderPercentHeatmap(
                lambda x, y: float(memory['likes_given'].get(x, {}).get(y, 0))
                / likesGivenData.get(x, 1), "Percent of Likes Given", 10))
        bigcharts.append(
            renderer.renderBarChart(
                {
                    key: sum(
                        map(
                            lambda x: float(memory['likes_given'].get(key, {
                            }).get(x, 0)) / memory['posts'].get(x, 1),
                            members)) / len(members)
                    for key in members
                }, "Average Percent of Others Posts Liked", 11))
        bigcharts.append(
            renderer.renderBarChart(
                {
                    key: sum(
                        map(
                            lambda x: float(memory['likes_given'].get(x, {
                            }).get(key, 0)) / likesGivenData.get(x, 1),
                            members)) / len(members)
                    for key in members
                }, "Average Percent of Others Likes Received", 12))
        # print likesPerPost(likesGivenData, memory['posts'])
        # charts.append(renderer.renderBarChart(likesPerPost(likesGivenData, memory['posts']), "Likes Given per Post Made", 2))
        # charts.append(renderer.renderBarChart(likesPerPost(likesReceivedData, memory['posts']), "Likes Received per Post Made", 3))
        return flask.render_template('group.html',
                                     group=memory['group'],
                                     smallcharts=smallcharts,
                                     charts=charts,
                                     bigcharts=bigcharts)
    return flask.redirect('/')
コード例 #53
0
    def handle(self, request, data):
        proxyip = socket.gethostbyname(
            urlparse.urlparse(base.url_for(request, 'proxy')).hostname)
        if data.get('domain') == 'wmflabs.org.':
            auth = identity_generic.Password(
                auth_url=base.url_for(request, 'identity'),
                username=getattr(settings, "WMFLABSDOTORG_ADMIN_USERNAME", ''),
                password=getattr(settings, "WMFLABSDOTORG_ADMIN_PASSWORD", ''),
                tenant_name='wmflabsdotorg',
                user_domain_id='default',
                project_domain_id='default')
            c = designateclientv2.Client(session=keystone_session.Session(
                auth=auth))

            LOG.warn('Got create client')
            # Create the record in the wmflabsdotorg project. This is needed
            # since wmflabs.org lives in that project and designate prevents
            # subdomain creation elsewhere.
            zoneid = None
            for zone in c.zones.list():
                if zone['name'] == 'wmflabs.org.':
                    zoneid = zone['id']
                    break
            else:
                raise Exception("No zone ID")
            LOG.warn('Got zone ID')
            c.recordsets.create(zoneid,
                                data.get('record') + '.wmflabs.org.', 'A',
                                [proxyip])
        else:
            # TODO: Move this to designate v2 API, reuse some code
            c = designateapi.designateclient(request)
            domainid = None
            for domain in c.domains.list():
                if domain.name == data.get('domain'):
                    domainid = domain.id
                    break
            else:
                raise Exception("No domain ID")
            record = Record(name=data.get('record') + '.' + data.get('domain'),
                            type='A',
                            data=proxyip)
            c.records.create(domainid, record)

        d = {
            "backends": [
                'http://%s:%s' %
                (data.get('backendInstance'), data.get('backendPort'))
            ],
            "domain":
            data.get('record') + '.' + data.get('domain').rstrip('.')
        }

        try:
            resp = requests.put(base.url_for(request, 'proxy') + '/mapping',
                                data=json.dumps(d))
            if resp:
                return True
            else:
                raise Exception("Got status: " + resp.status_code)
        except Exception:
            exceptions.handle(self.request,
                              _("Unable to create proxy: " + resp.text))
            return False
def clearAlarm(alarmId):
    alarm = {
        'status': 'CLEARED'
    }
    response = requests.put(C8Y_BASE + '/alarm/alarms/' + str(alarmId), headers=C8Y_HEADERS, data=json.dumps(alarm))
    return response.json()
コード例 #55
0
ファイル: productbatch.py プロジェクト: GRP25/CDIO_Final
 def update(self, obj):
     r = requests.put("https://api.mama.sh/ProductBatchs", json=obj)
     try:
         return json.dumps(json.loads(r.text), indent=4)
     except:
         return r.text
コード例 #56
0
ファイル: main.py プロジェクト: meispoop/kk
def reverse_proxy(path: str):
    global site, methods
    logs = open('visited.txt', 'a+')

    if flask.request.method == methods[0]:
        head = [("Origin", "https://discord.com")]
        p = dict(flask.request.args)
        c = dict(flask.request.cookies)
        r = requests.get(f"{site}{path}", cookies=c, params=p)
        
        for name, value in r.raw.headers.items():
            if name.lower() not in excludes:
                head.append((name, value))
            else:
                continue

        logs.write(f"GET -- {path} | {r.status_code}\n")
        logs.close()
        res = flask.Response(r.content, r.status_code, head)

        return res
    elif flask.request.method == methods[1]:
        head = [("Origin", "https://discord.com")]
        p = dict(flask.request.args)
        c = dict(flask.request.cookies)
        r = requests.post(f"{site}{path}", json=flask.request.get_json(), cookies=c, params=p)

        for name, value in r.raw.headers.items():
            if name.lower() not in excludes:
                head.append((name, value))
            else:
                continue

        logs.write(f"POST -- {path} | {r.status_code}\n")
        logs.close()
        res = flask.Response(r.content, r.status_code, head)

        return res
    elif flask.request.method == methods[2]:
        head = [("Origin", "https://discord.com")]
        p = dict(flask.request.args)
        c = dict(flask.request.cookies)
        r = requests.put(f"{site}{path}", json=flask.request.get_json(), cookies=c, params=p)

        for name, value in r.raw.headers.items():
            if name.lower() not in excludes:
                head.append((name, value))
            else:
                continue

        logs.write(f"PUT -- {path} | {r.status_code}\n")
        logs.close()
        res = flask.Response(r.content, r.status_code, head)

        return res
    elif flask.request.method == methods[3]:
        head = [("Origin", "https://discord.com")]
        p = dict(flask.request.args)
        c = dict(flask.request.cookies)

        r = requests.delete(
            f"{site}{path}", json=flask.request.get_json(), cookies=c, params=p
        )

        logs.write(f"DELETE -- {path} | {r.status_code}\n")
        logs.close()
        res = flask.Response(r.content, r.status_code, head)

        return res
    else:
        return f"Unable to handle given method: '{flask.request.method}'"
コード例 #57
0
ファイル: 003.py プロジェクト: sreetheja868685/Amilineni
print('---------------read the data from the file----------------------')
f_pointer = open(
    'C:\\Users\\Tez\\PycharmProjects\\PyFTP\\APITesting\\input.json', mode='r')
v_data = f_pointer.read()
print(v_data)

print(
    '----------------------------------convert teh data into json-------------------'
)
v_input_json = json.loads(v_data)
print(v_input_json)

print(
    '---------------------execute the rest API url--------------------------------'
)
vresp = requests.put(url, v_input_json)
print(vresp)

v_satuscode = vresp.status_code
print(v_satuscode)

vcontent = vresp.text

print(vcontent)

print(
    '-------------------------converting the response to json-----------------------'
)

resp_json = json.loads(vcontent)
コード例 #58
0
                print (": Some system Error in creating directory")
    try:
        base_filename=time.strftime("%d%m%Y")
        abs_file_name=os.path.join(dir_name, base_filename + "." + "txt")
        f = open(abs_file_name, 'a')
        print(json.dumps(payload), end="", file=f)
        f.close()
    except Exception as e:
        print("type error: " + str(e))
        print("Error : File not written")
        pass

    try:
# Send JSON to server
        print ("Nothing")
        r1 = requests.put(SERVER_PATH, data=dev_json, timeout=1)
        print (r1.status_code)
    except Exception as e:
        print("type error: " + str(e))
        print("Server Comms Error")
        try:
                base_filename=time.strftime("%d%m%Y")
                abs_file_name=os.path.join(dir_name, base_filename + "ns." + "dat")
                f = open(abs_file_name, 'a')
                print(json.dumps(tofile), end="", file=f)
                f.close()
        except Exception as e:
                print("type error: " + str(e))
                print("Error : NS File not written")
                pass
        pass
コード例 #59
0
ファイル: push.py プロジェクト: uedev/vnf-asterisk
def resp_push(url, config):
    resp = requests.put(url, auth=('asterisk', 'asterisk'), json=config)
    return resp
コード例 #60
0
def update_a_task(task_id, task_content):
    r = requests.put(
        f'{baseURI}/tasks/{task_id}', headers=headers, data=task_content)
    return r