def save(self): predicsis.log('Updating: ' + self.__class__.res_name() + '..', 1) post_data = self.__class__.parse_post_data(self.to_update) json_data = APIClient.request('patch', self.__class__.res_url() + '/' + self.id, post_data) j = json_data[self.__class__.res_name()] try: jid = j['job_ids'][0] status = 'pending' job = Job.retrieve(jid) status = job.status while ((status != 'completed') and (status != 'failed')): job = Job.retrieve(jid) status = job.status if status == 'failed': raise PredicSisError("Job failed! (job_id: " + job.id + ")") #time.sleep(1) json_data = APIClient.request('get', self.res_url() + '/' + self.id) obj = json_data[self.res_name()] for k, v in obj.iteritems(): if isinstance(v, dict): setattr(self, k, APIResource(v)) else: setattr(self, k, v) except KeyError: json_data = APIClient.request('get', self.res_url() + '/' + self.id) obj = json_data[self.res_name()] for k, v in obj.iteritems(): if isinstance(v, dict): setattr(self, k, APIResource(v)) else: setattr(self, k, v) except IndexError: raise PredicSisError("Job launching failed. Report this bug to [email protected]");
def request(cls, method, resource, post_data=None): predicsis.log(method.upper() + ' ' + predicsis.api_url + resource+ ' [' + str(post_data) + ']', 2) headers = {'Accept': 'application/json'} if (method == 'post') or (method == 'patch'): headers['Content-Type'] = 'application/json' headers['Authorization'] = 'Bearer ' + predicsis.api_token; content, code, json = cls.request_full(method, predicsis.api_url + resource, headers, post_data) return cls._interpret_response(content, code, json)
def create(cls, **data): if validate('c', cls.__name__, data) < 0: raise PredicSisError("Validation failed!") response = Model.retrieve(data.get('model_id')) prs_id = response.preparation_rules_set_id response = PreparationRules.retrieve(prs_id) var_id = response.variable_id Variable.dictionary_id = data.get('dictionary_id') response = Variable.retrieve(var_id) modalities_set_id = response.modalities_set_ids[0] response = ModalitiesSet.retrieve(modalities_set_id) modalities = response.modalities predicsis.log('Preparing data..', 1) dataset_id = -1 file_name = "./tmp.dat" try: if not exists(data.get('data')): predicsis.log('The file doesn\'t exist: ' + data.get('data') + '. Passing this value as a content.', 0) open(data.get('data'),'rb') if data.get('header') == None or data.get('separator') == None: if not (data.get('header') == None and data.get('separator') == None): predicsis.log('Either both separator and header should be set, or both should be left unset. The set parameter is skipped.', 0) dataset_id = Dataset.create(file=data.get('data'), name = data.get('name')).id else: dataset_id = Dataset.create(file=data.get('data'), header=data.get('header'), separator=data.get('separator'), name = data.get('name')).id except IOError: f = open(file_name,'w') f.write(data.get('data')) f.close() if data.get('header') == None or data.get('separator') == None: if not (data.get('header') == None and data.get('separator') == None): predicsis.log('Either both separator and header should be set, or both should be left unset. The set parameter is skipped.', 0) dataset_id = Dataset.create(file=file_name, name = data.get('name')).id else: dataset_id = Dataset.create(file=file_name, header=data.get('header'), separator=data.get('separator'), name = data.get('name')).id if dataset_id == -1: raise PredicSisError("Error creating your test dataset") scoresets = [] for modality in modalities: if data.get('header') == None or data.get('separator') == None: if not (data.get('header') == None and data.get('separator') == None): predicsis.log('Either both separator and header should be set, or both should be left unset. The set parameter is skipped.', 0) dataset = DatasetAPI.create(name=data.get('name'), classifier_id=data.get('model_id'), dataset_id=dataset_id, modalities_set_id=modalities_set_id, main_modality=modality, data_file = { "filename": data.get('file_name')}) else: dataset = DatasetAPI.create(name=data.get('name'), header=str(data.get('header')).lower(), separator=data.get('separator').encode('string_escape'), classifier_id=data.get('model_id'), dataset_id=dataset_id, modalities_set_id=modalities_set_id, main_modality=modality, data_file = { "filename": data.get('file_name')}) scoreset = cls(json.loads(str(dataset))) scoresets.append(scoreset) return scoresets
def create(cls, **data): if validate('c', cls.__name__, data) < 0: raise PredicSisError("Validation failed!") credentials = Credentials.retrieve('s3') payload = { 'Content-Type':'multipart/form-data', 'success_action_status':'201', 'acl':'private', 'policy':credentials.policy, 'AWSAccessKeyId':credentials.aws_access_key_id, 'signature':credentials.signature, 'key':credentials.key } predicsis.log('Uploading a file..', 1) if not exists(data.get('file')): raise PredicSisError('The file doesn\'t exist: ' + data.get('file') + '.') files = {'file': open(data.get('file'),'rb')} response = APIClient.request_full(method='post', url=credentials.s3_endpoint, headers=[],post_data=payload, files=files) if not response[1] == 201: raise PredicSisError('Upload failed by Amazon - retry.') xmlResponse = minidom.parseString(response[0]) keyList = xmlResponse.getElementsByTagName('Key') predicsis.log('Creating: dataset..', 1) source = Source.create(name=data.get('file'), key=str(keyList[0].firstChild.data)) sid = str(source.id) if data.get('header') == None or data.get('separator') == None: if not (data.get('header') == None and data.get('separator') == None): predicsis.log('Either both separator and header should be set, or both should be left unset. The set parameter is skipped.', 0) dapi = DatasetAPI.create(name=data.get('name'), source_ids=[sid]) else: dapi = DatasetAPI.create(name=data.get('name'), header=str(data.get('header')).lower(), separator=data.get('separator').encode('string_escape'), source_ids=[sid]) for i in range(0,len(dapi.preview)): dapi.preview[i] = '...not available in the SDK...'#dapi.preview[i].replace('"','*')# return cls(json.loads(str(dapi)))
def create(cls, **data): if validate('c', cls.__name__, data) < 0: raise PredicSisError("Validation failed!") predicsis.log('Creating: ' + cls.res_name() + '..', 1) post_data = cls.parse_post_data(data) json_data = APIClient.request('post', cls.res_url(), post_data) j = json_data[cls.res_name()] try: jid = j['job_ids'][0] status = 'pending' job = Job.retrieve(jid) status = job.status while ((status != 'completed') and (status != 'failed')): job = Job.retrieve(jid) status = job.status if status == 'failed': raise PredicSisError("Job failed! (job_id: " + job.id + ")") #time.sleep(1) return cls.retrieve(j['id']) except KeyError: return cls(j) except IndexError: raise PredicSisError("Job launching failed. Report this bug to [email protected]");
def request_full(cls, method, url, headers, post_data=None, files=None): kwargs = {} if verify_ssl_certs: kwargs['verify'] = ssl_certs_path else: kwargs['verify'] = False try: requests.packages.urllib3.disable_warnings() except AttributeError: predicsis.log('Impossible to shut down the SSL-related warnings - check the version of python and/or requests package.', 0) try: try: result = requests.request(method, url, headers=headers, data=post_data, files=files, timeout=80, **kwargs) except: try: result = requests.request(method, url, headers=headers, data=str(post_data), files=files, timeout=80, **kwargs) except TypeError, e: raise TypeError('Your "requests" library may be out of date. Error was: %s' % (e,)) content = result.content status_code = result.status_code if files == None and not method=='delete': jsonn = result.json() else: jsonn = result
def create(cls, **data): if validate('c', cls.__name__, data) < 0: raise PredicSisError("Validation failed!") predicsis.log('Retrieving: variables..', 1) target_id = -1 unused_ids = [] dico = Dictionary.retrieve(data.get('dictionary_id')) dataset_id = dico.dataset_id Variable.dic_id = data.get('dictionary_id') variables = Variable.retrieve_all() i = 1 for var in variables: if type(data.get('target_var')).__name__ == "str": if var.name == data.get('target_var'): target_id = var.id if not var.type == 'categorical': predicsis.log("Your variable is not detected as categorical - changing it manually.",0) var.update(type = 'categorical') var.save() elif type(data.get('target_var')).__name__ == "int": if i == data.get('target_var'): target_id = var.id if not var.type == 'categorical': predicsis.log("Your variable is not detected as categorical - changing it manually.",0) var.update(type = 'categorical') var.save() if not data.get('unused_vars') == None: if var.name in data.get('unused_vars'): var.update(use = False) var.save() elif i in data.get('unused_vars'): var.update(use = False) var.save() i+=1 if target_id == -1: raise PredicSisError("Your target variable doesn't exist in the dataset.") predicsis.log('Creating: target..', 1) modal = ModalitiesSet.create(variable_id = target_id, dataset_id = dataset_id) return cls(json.loads(str(modal)))
try: result = requests.request(method, url, headers=headers, data=post_data, files=files, timeout=80, **kwargs) except: try: result = requests.request(method, url, headers=headers, data=str(post_data), files=files, timeout=80, **kwargs) except TypeError, e: raise TypeError('Your "requests" library may be out of date. Error was: %s' % (e,)) content = result.content status_code = result.status_code if files == None and not method=='delete': jsonn = result.json() else: jsonn = result except Exception, e: cls._handle_request_error(e) predicsis.log('return status: ' + str(status_code), 2) if files==None and not method=='delete': predicsis.log(json.dumps(jsonn, indent=4), 3) elif not method=='delete': xmlResponse = minidom.parseString(result.content) predicsis.log(xmlResponse.toprettyxml(indent="\t"), 3) return content, status_code, jsonn @classmethod def request_direct(cls, url): return requests.get(url) @classmethod def _interpret_response(cls, content, code, json): if not (200 <= code < 300): cls._handle_api_error(content, code, json)
def retrieve_all(cls): predicsis.log('Retrieving all: ' + cls.res_name() + '..', 1) json_data = APIClient.request('get', cls.res_url()) j = json_data[cls.res_url().split('/')[-1]] return [cls(i) for i in j]
def validate(act, obj, data): cmandatory = { 'dataset' : ['file', 'name'], 'dictionary' : ['name'], 'target' : ['target_var', 'dictionary_id'], 'model' : ['variable_id', 'dataset_id'], 'scoreset' : ['name', 'model_id', 'dictionary_id', 'data', 'file_name'], 'report1' : ['type', 'dictionary_id', 'dataset_id'], 'report2' : ['type', 'dictionary_id', 'dataset_id', 'variable_id'], 'report3' : ['type', 'dictionary_id', 'dataset_id', 'model_id', 'main_modality', 'variable_id'] } coptional = { 'dataset' : ['header', 'separator', 'file_name'], 'dictionary' : ['description', 'dataset_id'], 'target' : ['unused_vars'], 'model' : ['name'], 'scoreset' : ['header', 'separator'], 'report1' : ['title'], 'report2' : ['title'], 'report3' : ['title'] } uoptional = { 'dataset' : ['name', 'header', 'separator'], 'dictionary' : ['name', 'description'], 'scoreset' : ['name', 'header', 'separator'], 'report' : ['title'] } if not obj.lower() in coptional.keys() + uoptional.keys(): predicsis.log('Unvalidated object [' + obj + ']', 1) return 0 if act == 'c': predicsis.log('Parameters: mandatory'+ str(cmandatory.get(obj.lower())) + ', optional' + str(coptional.get(obj.lower())) + ', passed' + str(data.keys()), 3) if obj == 'Report': if data.get('type') == None: predicsis.log('Missing parameter to create [Report]: type') return -1 if data.get('type') == 'univariate_unsupervised': obj = 'Report1' elif data.get('type') == 'univariate_supervised': obj = 'Report2' elif data.get('type') == 'classifier_evaluation': obj = 'Report3' if not all(x in data.keys() for x in cmandatory.get(obj.lower())): predicsis.log('Missing parameters to create [' + obj + ']: ' + str([item for item in cmandatory.get(obj.lower()) if item not in data.keys()]), -1) return -1 if act == 'c': list = [item for item in data.keys() if item not in (coptional.get(obj.lower()) + cmandatory.get(obj.lower()))] if len(list) > 0: predicsis.log('Unnecessary parameters to create [' + obj + ']: ' + str(list), 0) return 0 if act == 'u': predicsis.log('Parameters: optional' + str(uoptional.get(obj.lower())) + ', passed' + str(data.keys()), 3) list = [item for item in data.keys() if item not in uoptional.get(obj.lower())] if len(list) > 0: predicsis.log('Unnecessary parameters to update [' + obj + ']: ' + str(list), 0) return 0 return 1
def retrieve(cls, id): predicsis.log('Retrieving: ' + cls.res_name() + '..', 1) json_data = APIClient.request('get', cls.res_url() + '/' + id) j = json_data[cls.res_name()] return cls(j)
def delete(self): predicsis.log('Deleting: ' + self.__class__.res_name() + '..', 1) APIClient.request('delete', self.__class__.res_url() + '/' + self.id) for id in self.source_ids: APIClient.request('delete', 'sources/' + id)
def delete(self): predicsis.log('Deleting: ' + self.__class__.res_name() + '..', 1) APIClient.request('delete', self.__class__.res_url() + '/' + self.id)