def save(self):
     predicsis.log('Updating: ' + self.__class__.res_name() + '..', 1)
     post_data = self.__class__.parse_post_data(self.to_update)
     json_data = APIClient.request('patch', self.__class__.res_url() + '/' + self.id, post_data)
     j = json_data[self.__class__.res_name()]
     try:
         jid = j['job_ids'][0]
         status = 'pending'
         job = Job.retrieve(jid)
         status = job.status
         while ((status != 'completed') and (status != 'failed')):
             job = Job.retrieve(jid)
             status = job.status
             if status == 'failed':
                 raise PredicSisError("Job failed! (job_id: " + job.id + ")")
             #time.sleep(1)
         json_data = APIClient.request('get', self.res_url() + '/' + self.id)
         obj = json_data[self.res_name()]
         for k, v in obj.iteritems():
             if isinstance(v, dict):
                 setattr(self, k, APIResource(v))
             else:
                 setattr(self, k, v)
     except KeyError:
         json_data = APIClient.request('get', self.res_url() + '/' + self.id)
         obj = json_data[self.res_name()]
         for k, v in obj.iteritems():
             if isinstance(v, dict):
                 setattr(self, k, APIResource(v))
             else:
                 setattr(self, k, v)
     except IndexError:
         raise PredicSisError("Job launching failed. Report this bug to [email protected]");
    def create(cls, **data):
        if validate('c', cls.__name__, data) < 0:
            raise PredicSisError("Validation failed!")
        credentials = Credentials.retrieve('s3')
        payload = {
            'Content-Type':'multipart/form-data',
            'success_action_status':'201',
            'acl':'private',
            'policy':credentials.policy,
            'AWSAccessKeyId':credentials.aws_access_key_id,
            'signature':credentials.signature,
            'key':credentials.key
        }
        predicsis.log('Uploading a file..', 1)
        if not exists(data.get('file')):
			raise PredicSisError('The file doesn\'t exist: ' + data.get('file') + '.')
        files = {'file': open(data.get('file'),'rb')}
        response = APIClient.request_full(method='post', url=credentials.s3_endpoint, headers=[],post_data=payload, files=files)
        if not response[1] == 201:
            raise PredicSisError('Upload failed by Amazon - retry.')
        xmlResponse = minidom.parseString(response[0])
        keyList = xmlResponse.getElementsByTagName('Key')
        predicsis.log('Creating: dataset..', 1)
        source = Source.create(name=data.get('file'), key=str(keyList[0].firstChild.data))
        sid = str(source.id)
        if data.get('header') == None or data.get('separator') == None:
            if not (data.get('header') == None and data.get('separator') == None):
                predicsis.log('Either both separator and header should be set, or both should be left unset. The set parameter is skipped.', 0)
            dapi = DatasetAPI.create(name=data.get('name'), source_ids=[sid])
        else:
            dapi = DatasetAPI.create(name=data.get('name'), header=str(data.get('header')).lower(), separator=data.get('separator').encode('string_escape'), source_ids=[sid])
        for i in range(0,len(dapi.preview)):
            dapi.preview[i] = '...not available in the SDK...'#dapi.preview[i].replace('"','*')#
        return cls(json.loads(str(dapi)))
 def result(cls, scoresets):
     lines = []
     first = True
     for scoreset in scoresets:
         url = scoreset.data_file.url
         those_lines = APIClient.request_direct(url).text.split("\n")
         i = 0
         for l in those_lines:
             if first:
                 lines.append(l)
             else:
                 lines[i] += '\t' + l
             i+=1
         first = False
     return "\n".join(lines)
 def create(cls, **data):
     if validate('c', cls.__name__, data) < 0:
         raise PredicSisError("Validation failed!")
     predicsis.log('Creating: ' + cls.res_name() + '..', 1)
     post_data = cls.parse_post_data(data)
     json_data = APIClient.request('post', cls.res_url(), post_data)
     j = json_data[cls.res_name()]
     try:
         jid = j['job_ids'][0]
         status = 'pending'
         job = Job.retrieve(jid)
         status = job.status
         while ((status != 'completed') and (status != 'failed')):
             job = Job.retrieve(jid)
             status = job.status
             if status == 'failed':
                 raise PredicSisError("Job failed! (job_id: " + job.id + ")")
             #time.sleep(1)
         return cls.retrieve(j['id'])
     except KeyError:
         return cls(j)
     except IndexError:
         raise PredicSisError("Job launching failed. Report this bug to [email protected]");
 def retrieve_all(cls):
     predicsis.log('Retrieving all: ' + cls.res_name() + '..', 1)
     json_data = APIClient.request('get', cls.res_url())
     j = json_data[cls.res_url().split('/')[-1]]
     return [cls(i) for i in j]
 def retrieve(cls, id):
     predicsis.log('Retrieving: ' + cls.res_name() + '..', 1)
     json_data = APIClient.request('get', cls.res_url() + '/' + id)
     j = json_data[cls.res_name()]
     return cls(j)
 def delete(self):
     predicsis.log('Deleting: ' + self.__class__.res_name() + '..', 1)
     APIClient.request('delete', self.__class__.res_url() + '/' + self.id)
     for id in self.source_ids:
         APIClient.request('delete', 'sources/' + id)
 def delete(self):
     predicsis.log('Deleting: ' + self.__class__.res_name() + '..', 1)
     APIClient.request('delete', self.__class__.res_url() + '/' + self.id)