Example #1
0
    def load(self) -> List[BaseWellSchema]:
        filename = f'{self.index}-{self.name}.pkl'
        DEFAULT_LOGGER.info(f"Loading schemas from {filename}")

        with io.open(filename, 'rb') as handle:
            data = pickle.load(handle)

        return data
Example #2
0
    def run(self) -> List[str]:
        DEFAULT_LOGGER.info(f"Running {self.index} ({self.name})")
        urls = []
        gid = hruuid()
        schemas = self.drill(gid)
        self.cache(schemas)
        DEFAULT_LOGGER.info(f"Using index: {self.index}")
        response = requests.put(f'{ELASTIC_SEARCH_URL}/{self.index}')
        if not response.ok and 'already exists' not in response.json()['error']['root_cause'][0]['reason']:
            raise RuntimeError(response.json())
        
        for schema in schemas:
            url = self.upload(schema)
            urls.append(url)
    
        DEFAULT_LOGGER.info(f'Uploaded {len(urls)} data points as {gid}')

        self.meta = {'gid': gid}

        return urls
Example #3
0
 def cache(self, data: List[BaseWellSchema]):
     filename = f'{self.index}-{self.name}.pkl'
     DEFAULT_LOGGER.info(f"Caching {len(data)} schemas to {filename}")
     
     with io.open(filename, 'wb') as handle:
         pickle.dump(data, handle)