def asyncresults(self, asyncjob,api_key,secret_key):

        cs_api = CloudstackAPI(api_key=api_key, secret_key=secret_key)
        qryasyncjob = cs_api.request(dict({'command':'queryAsyncJobResult', 'jobid':asyncjob}))
        qryasyncjob = dotdictify(qryasyncjob)
        qryasyncjob = qryasyncjob.queryasyncjobresultresponse

        for key, value in qryasyncjob.items():

            if key == 'jobstatus':
                if value == 0:
    
                    status=0
                    while status==0:

                        qryasyncjob = cs_api.request(dict({'command':'queryAsyncJobResult', 'jobid':asyncjob}))
                        qryasyncjob = dotdictify(qryasyncjob)
                        qryasyncjob = qryasyncjob.queryasyncjobresultresponse

                        for key, value in qryasyncjob.iteritems():
                            if key == 'jobstatus' : status = value
                            if key == 'jobstatus' : CloudstackAPI.jobresults = value
                        time.sleep(5)

                    queryasyncjobresponse = cs_api.request(dict({'command':'queryAsyncJobResult', 'jobid':asyncjob}))
                    return queryasyncjobresponse
Beispiel #2
0
    def __get_all_paged_entities(self, path, args):
        logger.info("Fetching data from paged url: %s", path)
        url = os.environ.get("base_url") + path
        access_token = get_token()
        next_page = url
        page_counter = 1
        while next_page is not None:
            if os.environ.get('sleep') is not None:
                logger.info("sleeping for %s milliseconds", os.environ.get('sleep') )
                sleep(float(os.environ.get('sleep')))

            logger.info("Fetching data from url: %s", next_page)
            if "$skiptoken" not in next_page:
                req = requests.get(next_page, params=args, headers={"Authorization": "Bearer " + access_token})

            else:
                 req = requests.get(next_page, headers={"Authorization": "Bearer " + access_token})

            if req.status_code != 200:
                logger.error("Unexpected response status code: %d with response text %s" % (req.status_code, req.text))
                raise AssertionError ("Unexpected response status code: %d with response text %s"%(req.status_code, req.text))
            res = dotdictify.dotdictify(json.loads(req.text))
            for entity in res.get(os.environ.get("entities_path")):

                yield(entity)

            if res.get(os.environ.get('next_page')) is not None:
                page_counter+=1
                next_page = res.get(os.environ.get('next_page'))
            else:
                next_page = None
        logger.info('Returning entities from %i pages', page_counter)
Beispiel #3
0
    def __get_all_entities(self, path):
        logger.info("Fetching data from url: %s", path)
        token = get_token(path)
        headers = {
            'Accept': 'application/json',
            'content_type': 'application/json'
        }
        url = os.environ.get('get_url') + "?access_token=" + token
        req = requests.get(url, headers=headers)

        if req.status_code != 200:
            logger.error(
                "Unexpected response status code: %d with response text %s" %
                (req.status_code, req.text))
            raise AssertionError(
                "Unexpected response status code: %d with response text %s" %
                (req.status_code, req.text))
        res = dotdictify.dotdictify(json.loads(req.text))
        if path == "user":
            for entity in res.get(os.environ.get("entities_path_user")):

                yield (entity)
        if path == "organization":
            for entity in res.get(os.environ.get("entities_path_org")):
                yield (entity)
        else:
            logger.info("method not recognized")
        logger.info('Returning entities from %s', path)
Beispiel #4
0
def get_token(path):
    headers = {}
    logger.info("Creating header")

    if path == "user":
        headers = {
            "client_id": os.environ.get('client_id_user'),
            "client_secret": os.environ.get('client_secret_user'),
            "grant_type": os.environ.get('grant_type')
        }
    elif path == "organization":
        headers = {
            "client_id": os.environ.get('client_id_org'),
            "client_secret": os.environ.get('client_secret_org'),
            "grant_type": os.environ.get('grant_type')
        }
    elif path == "post_user":
        headers = {
            "client_id": os.environ.get('client_id_post'),
            "client_secret": os.environ.get('client_secret_post'),
            "grant_type": os.environ.get('grant_type')
        }
    else:
        logger.info("undefined method")
        sys.exit()

    resp = requests.get(url=os.environ.get('token_url'),
                        headers=headers).json()
    token = dotdictify.dotdictify(resp).response.responseMessage.access_token
    logger.info("Received access token from " + os.environ.get('token_url'))
    return token
    def __get_all_paged_entities(self, path, url_parameters):
        logger.info("Fetching data from paged url: %s", path)
        url = os.environ.get("baseurl") + path
        url = call_url(url, url_parameters,
                       url_parameters.get(os.environ.get('startpage')))
        has_more_results = True
        page_counter = 1

        while has_more_results:
            if os.environ.get('sleep') is not None:
                logger.info("sleeping for %s milliseconds",
                            os.environ.get('sleep'))
                sleep(float(os.environ.get('sleep')))

            logger.info("Fetching data from url: %s", url)
            req = requests.get(url, headers=headers)
            if req.status_code != 200:
                logger.error(
                    "Unexpected response status code: %d with response text %s"
                    % (req.status_code, req.text))
                raise AssertionError(
                    "Unexpected response status code: %d with response text %s"
                    % (req.status_code, req.text))
            dict = dotdictify.dotdictify(json.loads(req.text))
            for entity in dict.results:
                yield entity
            if str_to_bool(dict.get(os.environ.get('next_page_path'))):
                page_counter += 1
                url = os.environ.get("baseurl") + path
                url = call_url(url, url_parameters, str(page_counter))
            else:
                has_more_results = False
        logger.info('Returning entities from %i pages', page_counter)
Beispiel #6
0
    def __get_all_references(self, path):
        logger.info('Fetching data from paged url: %s', path)
        url = os.environ.get("base_url") + path
        headers = {
            "Accept": "application/json",
            "Content-Type": "application/json",
            "Authorization": os.environ.get('token')
        }
        reference_data = json.loads(
            os.environ.get('reference_post').replace("'", "\""))
        total_amount = json.loads(
            requests.post(url,
                          data=json.dumps(reference_data),
                          headers=headers).text)["total"]
        counter = 0
        size = 10
        while counter < total_amount:
            req = requests.post(url,
                                data=json.dumps(reference_data),
                                headers=headers)
            res = dotdictify.dotdictify(json.loads(req.text))
            counter += size
            reference_data["offset"] = counter
            entities = res.get(os.environ.get("references_path"))
            for entity in entities:
                yield (entity.get("reference"))

        logger.info("returned from all pages")
Beispiel #7
0
    def __get_all_paged_entities(self, path):
        logger.info("Fetching data from paged url: %s", path)
        url = os.environ.get("base_url") + path
        next_page = url
        page_counter = 1
        while next_page is not None:
            if os.environ.get('sleep') is not None:
                logger.info("sleeping for %s milliseconds",
                            os.environ.get('sleep'))
                sleep(float(os.environ.get('sleep')))

            logger.info("Fetching data from url: %s", next_page)
            req = requests.get(next_page, headers=headers)
            if req.status_code != 200:
                logger.error(
                    "Unexpected response status code: %d with response text %s"
                    % (req.status_code, req.text))
                raise AssertionError(
                    "Unexpected response status code: %d with response text %s"
                    % (req.status_code, req.text))
            dict = dotdictify.dotdictify(json.loads(req.text))
            for entity in dict.get(os.environ.get("entities_path")):
                yield transform(entity)

            if dict.get(os.environ.get('next_page')) is not None:
                page_counter += 1
                next_page = dict.get(os.environ.get('next_page'))
            else:
                next_page = None
        logger.info('Returning entities from %i pages', page_counter)
Beispiel #8
0
    def __get_all_siteurls(self, posted_entities):
        logger.info('fetching site urls')
        access_token = get_token()
        for entity in posted_entities:
            url = "https://graph.microsoft.com/v1.0/groups/" + set_group_id(entity) + "/sites/root"
            req = requests.get(url=url, headers={"Authorization": "Bearer " + access_token})
            if req.status_code != 200:
                logger.info('no url')
            else:
                res = dotdictify.dotdictify(json.loads(req.text))
                res['_id'] = set_group_id(entity)

                yield res
Beispiel #9
0
def get_token():
    logger.info("Creating header")
    headers= {}
    payload = {
        "client_id":os.environ.get('client_id'),
        "client_secret":os.environ.get('client_secret'),
        "grant_type": os.environ.get('grant_type'),
        "resource": os.environ.get('resource')
    }
    #logger.info(payload)
    resp = requests.post(url=os.environ.get('token_url'), data=payload, headers=headers).json()
    token = dotdictify.dotdictify(resp).access_token
    logger.info("Received access token from " + os.environ.get('token_url'))
    return token
def transform(obj):
    res = {}
    for k, v in obj.items():
        if k == "image":
            if dotdictify.dotdictify(v).large.url is not None:
                res[k] = encode(v)

            else:
                pass
        try:
            _ = json.dumps(v)
        except Exception:
            pass
        else:
            res[k] = v
    return res
Beispiel #11
0
def transform(obj):
    res = {}
    for k, v in obj.items():
        if k == "image":
            if dotdictify.dotdictify(v).large.url is not None:
                logger.info("Encoding images from url to base64...")
                res[k] = encode(v)

            else:
                pass
        try:
            _ = json.dumps(v)
        except Exception:
            pass
        else:
            res[k] = v
    return res
    def __get_all_paged_entities(self, path):
        logger.info("Fetching data from paged url: %s", path)
        url = os.environ.get("base_url") + path
        next_page = url
        page_counter = 1
        while next_page is not None:
            logger.info("Fetching data from url: %s", next_page)
            req = requests.get(next_page, headers=headers)
            if req.status_code != 200:
                req = self.check_error(req, next_page, headers, 'get')
            dict = dotdictify.dotdictify(json.loads(req.text))
            for entity in dict.get(os.environ.get("entities_path")):
                yield transform(entity)

            if dict.get(os.environ.get('next_page')) is not None:
                page_counter += 1
                next_page = dict.get(os.environ.get('next_page'))
            else:
                next_page = None
        logger.info('Returning entities from %i pages', page_counter)
Beispiel #13
0
    def __get_all_paged_entities(self, path, args):
        logger.info("Fetching data from url: %s", path)
        url = os.environ.get("base_url") + path
        req = requests.get(url,
                           headers={
                               "Accept":
                               "Application/json",
                               "Authorization":
                               "Basic " + os.environ.get('basic_token')
                           })

        if req.status_code != 200:
            logger.error(
                "Unexpected response status code: %d with response text %s" %
                (req.status_code, req.text))
            raise AssertionError(
                "Unexpected response status code: %d with response text %s" %
                (req.status_code, req.text))
        res = dotdictify.dotdictify(json.loads(req.text))
        for entity in res['value']:

            yield (entity)
        logger.info('Returning entities')
Beispiel #14
0
	return data


def sub_dict(somedict, somekey, default = None):
 	return dict([ (somekey, somedict.get(somekey, default)) ])


dict_out ={}
dictofsubs ={}
data_dict = list_subs()

dict_out = sub_dict(data_dict, 'results')
dictofsubs = sub_dict(dict_out, 'id')

print "%%%%%%%%%%%%%%%%%%START DOT DICTIFY %%%%%%%%%%%%%%%%%%%%%%%"
dotdict= dotdictify.dotdictify(data_dict)

print dotdict.totalSize

print dotdict.__contains__("results.id")

print "#####$$$$$ PRINT SUB DICT OF DICT_OUT & TYPE OF DICT_OUT   ^^^^^^^^^^^^^^^^"
pprint(dict_out)
print type(dict_out)

print"$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"

print "New Test iteritems of sub dict DICT_OUT"
for key, value in dict_out.iteritems():
	if key == "id":
		print "EXTRACT  OF DICT INTO DICT"
Beispiel #15
0
    return data


def sub_dict(somedict, somekey, default=None):
    return dict([(somekey, somedict.get(somekey, default))])


dict_out = {}
dictofsubs = {}
data_dict = list_subs()

dict_out = sub_dict(data_dict, 'results')
dictofsubs = sub_dict(dict_out, 'id')

print "%%%%%%%%%%%%%%%%%%START DOT DICTIFY %%%%%%%%%%%%%%%%%%%%%%%"
dotdict = dotdictify.dotdictify(data_dict)

print dotdict.totalSize

print dotdict.__contains__("results.id")

print "#####$$$$$ PRINT SUB DICT OF DICT_OUT & TYPE OF DICT_OUT   ^^^^^^^^^^^^^^^^"
pprint(dict_out)
print type(dict_out)

print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"

print "New Test iteritems of sub dict DICT_OUT"
for key, value in dict_out.iteritems():
    if key == "id":
        print "EXTRACT  OF DICT INTO DICT"