def tissue_ids(self): result = {} url = data_formats.urls['giant']['init_url'] html = dataio.curl(url, silent=False) soup = bs4.BeautifulSoup(html) for tis in soup.find('select', {'id': 'tissue'}).findAll('option'): result[int(tis.attrs['value'])] = tis.text return result
def query(self, tissue, entrez): url = data_formats.urls['giant']['url'] % ( tissue, 'entrez=%s' % '&entrez='.join([str(e) for e in entrez])) data = dataio.curl(url, silent=True, cache=False) self.dump.append({ 'tissue': tissue, 'entrez': entrez, 'url': url, 'data': json.loads(data) }) return data if data is None else json.loads(data)
def query(self, tissue, entrez): url = data_formats.urls['giant']['url'] % ( tissue, 'entrez=%s' % '&entrez='.join([str(e) for e in entrez])) data = dataio.curl(url, silent=True, cache=False, debug=self.debug, override_post=True) if self.debug: self.trace['url'] = url self.trace['raw'] = data self.trace['result'] = json.loads(data) return data if data is None else json.loads(data)