def teardown(self): """ Undo the mock patches set up for this test. """ with contextlib.suppress(RuntimeError): # may not be patched get_word_patcher.stop() with contextlib.suppress(RuntimeError): max_incorrect_guesses_patcher.stop()
def coerce(value): """ coerce takes a value and attempts to convert it to a float, or int. If none of the conversions are successful, the original value is returned. >>> coerce('3') 3 >>> coerce('3.0') 3.0 >>> coerce('foo') 'foo' >>> coerce({}) {} >>> coerce('{}') '{}' """ with contextlib2.suppress(Exception): loaded = json.loads(value) assert isinstance(loaded, numbers.Number) return loaded return value
def get_image_field(forms): form = None for f in forms: with suppress(Exception): if f.find_control('image'): form = f return form
def teardown_class(klass): """Clean up the mess.""" with contextlib.suppress(FileNotFoundError): if HAS_MPL: os.unlink('img.png') os.unlink('img_altaz.png') os.unlink('img_scrunch.png') os.unlink('delta_altaz.png') os.unlink('altaz_with_src.png') os.unlink('img_sdev.png') os.unlink('img_scrunch_sdev.png') os.unlink('test.hdf5') os.unlink('test_scan_list.txt') os.unlink('bubu.hdf5') for d in klass.config['list_of_directories']: hfiles = \ glob.glob(os.path.join(klass.config['datadir'], d, '*.hdf5')) for h in hfiles: os.unlink(h) out_iter_files = glob.glob('out_iter_*') for o in out_iter_files: os.unlink(o) out_fits_files = glob.glob( os.path.join(klass.config['datadir'], 'test_config*.fits')) out_hdf5_files = glob.glob( os.path.join(klass.config['datadir'], 'sim', '*/', '*.hdf5')) for o in out_fits_files + out_hdf5_files: os.unlink(o)
def process_queue(self) -> None: """执行任务队列中任务 广度遍历形式解析url对应网页中的url地址,并回送到任务队列 1,从任务队列unique queue中获取到url 2,下载解析url,获取网页中的url地址 3,将url地址加入到unique queue中 """ depth = 0 while len( self.success_set) < self.max_count and depth < self.max_depth: depth, url = self.queue.get() content = '' with suppress(Exception): print('requests depth:%d %d url:%s' % (depth, len(self.success_set), url)) response = requests.get(url, headers=self.headers, timeout=(3, 7)) if not response.ok: continue content = response.text.replace(" ", "") res_url = r"href=[\"\'](https?://[^/'\"\?><]+)" urls = re.findall(res_url, content, re.I | re.S | re.M) urls and self.success_set.add(url) [self.queue.put((depth + 1, url + self.suffix)) for url in urls]
def save_new_settings(self, configuration, image): self.rotate_sentinel_images() labels = [(self.configuration_label, b64encode(configuration).decode()) ] with contextlib.suppress(fabricio.host_errors): digests = self._get_digests(self.images) digests_bucket = json.dumps(digests, sort_keys=True) digests_bucket = b64encode(digests_bucket.encode()).decode() labels.append((self.digests_label, digests_bucket)) dockerfile = ('FROM {image}\n' 'LABEL {labels}\n').format( image=image or 'scratch', labels=' '.join( itertools.starmap('{0}={1}'.format, labels)), ) build_command = 'echo {dockerfile} | docker build --tag {tag} -'.format( dockerfile=shlex_quote(dockerfile), tag=self.current_settings_tag, ) try: fabricio.run(build_command) except fabricio.host_errors as error: fabricio.log( 'WARNING: {error}'.format(error=error), output=sys.stderr, color=colors.red, )
def main(output_filename="articles.csv", port="5000", type="all"): if sys.platform == 'win32': loop = asyncio.ProactorEventLoop() else: loop = asyncio.get_event_loop() # loop.add_signal_handler(signal.SIGINT, loop.stop) # asyncio.set_event_loop(loop) server_type = ServerType.from_name(type) if server_type == ServerType.Scraper: loop.run_until_complete(start_scraper(output_filename=output_filename)) elif server_type == ServerType.Backend: loop.run_until_complete(start_back(port=port)) elif server_type == ServerType.All: loop.create_task(start_scraper(output_filename=output_filename)) # loop.create_task(start_back(debug=True, threaded=False, port=port)) # TODO: Add signals support here try: loop.run_forever() except KeyboardInterrupt: pass finally: try: loop.stop() pending = asyncio.Task.all_tasks(loop=loop) for task in pending: task.cancel() with suppress(asyncio.CancelledError): loop.run_until_complete(task) except KeyboardInterrupt: pass
def save_new_settings(self, settings, image): self.rotate_sentinel_images() labels = [(self.compose_label, settings)] with contextlib.suppress(host_errors): images_info = self.get_images_info() if images_info: labels.append((self.images_info_label, images_info)) dockerfile = ('FROM {image}\n' 'LABEL {labels}\n').format( image=image or 'scratch', labels=' '.join( itertools.starmap('{0}={1}'.format, labels)), ) build_command = 'echo {dockerfile} | docker build --tag {tag} -'.format( dockerfile=shlex_quote(dockerfile), tag=self.current_settings_tag, ) try: fabricio.run(build_command) except host_errors as error: fabricio.log( 'WARNING: {error}'.format(error=error), output=sys.stderr, color=colors.red, )
def request(self, method, url, query_params=None, headers=None, body=None, post_params=None): try: # For `POST`, `PUT`, `PATCH`, `OPTIONS` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS']: if headers['Content-Type'] == 'application/json': r = self.session.request(method, url, params=query_params, headers=headers, json=body) if headers['Content-Type'] == 'application/x-www-form-urlencoded': r = self.session.request(method, url, params=query_params, headers=headers, json=post_params) if headers['Content-Type'] == 'multipart/form-data': r = self.session.request(method, url, params=query_params, headers=headers) # For `GET`, `HEAD`, `DELETE` else: r = self.session.request(method, url, params=query_params, headers=headers) r.raise_for_status() except RequestException as error: status = 0 msg = "{0}\n{1}".format(type(error).__name__, text_type(error)) with suppress(AttributeError): status = error.response.status_code raise ApiException(status=status, reason=msg) else: return r
def _get_settings(self, image): with contextlib.suppress(ImageNotFoundError): image_labels = image.info.get('Config', {}).get('Labels', {}) return ( image_labels.get(self.compose_label), image_labels.get(self.images_info_label), ) return None, None
def from_row(cls, row): attrs = AttrDict("title", "url", "language", "owner_username", "owner_url", "description", "hearing_impaired") with suppress(Exception): attrs.title = row.find("td", "a1").a.find_all("span")[1].text \ .strip() with suppress(Exception): attrs.url = SITE_DOMAIN + row.find("td", "a1").a.get("href") with suppress(Exception): attrs.language = row.find("td", "a1").a.find_all("span")[0].text \ .strip() with suppress(Exception): attrs.owner_username = row.find("td", "a5").a.text.strip() with suppress(Exception): attrs.owner_page = SITE_DOMAIN + row.find("td", "a5").a \ .get("href").strip() with suppress(Exception): attrs.description = row.find("td", "a6").div.text.strip() with suppress(Exception): attrs.hearing_impaired = bool(row.find("td", "a41")) return cls(**attrs.to_dict())
def remove_sentinel_images(self): images = [self.current_settings_tag, self.backup_settings_tag] with contextlib.suppress(ImageNotFoundError): images.append(Image(self.current_settings_tag).info['Parent']) images.append(Image(self.backup_settings_tag).info['Parent']) fabricio.run( 'docker rmi {images}'.format(images=' '.join(images)), ignore_errors=True, )
def rotate_sentinel_images(self, rollback=False): backup_tag = self.backup_settings_tag current_tag = self.current_settings_tag if rollback: backup_tag, current_tag = current_tag, backup_tag backup_images = [backup_tag] with contextlib.suppress(ImageNotFoundError): backup_images.append(Image(backup_tag).info['Parent']) with contextlib.suppress(fabricio.host_errors): # TODO make separate call for each docker command fabricio.run(('docker rmi {backup_images}' '; docker tag {current_tag} {backup_tag}' '; docker rmi {current_tag}').format( backup_images=' '.join(backup_images), current_tag=current_tag, backup_tag=backup_tag, ), )
def teardown_class(klass): """Cleanup.""" with contextlib.suppress(FileNotFoundError): os.unlink('scan.hdf5') for f in glob.glob(os.path.join(klass.datadir, 'spectrum', '*.pdf')): os.unlink(f) for f in glob.glob( os.path.join(klass.datadir, 'spectrum', '*.hdf5')): os.unlink(f)
def get(target, lastfm): image_source = get_image_source(target, lastfm) image = None for i in reversed(xrange(1, 5)): with suppress(Exception): image = image_source.get_cover_image(size=i) break return image
def _destroy_new_entities(self): def _get_items(group): return list(group(self.client)) for entity_group in self.DISPOSABLE_TYPES: for entity in _get_items(entity_group): if entity.id not in self._existing_elements_by_type[ entity_group]: with suppress(ServerError): entity.delete()
def rotate_sentinel_images(self, rollback=False): backup_tag = self.backup_settings_tag current_tag = self.current_settings_tag if rollback: backup_tag, current_tag = current_tag, backup_tag with contextlib.suppress(host_errors): fabricio.run(('docker rmi {backup_tag}' '; docker tag {current_tag} {backup_tag}' '; docker rmi {current_tag}').format( backup_tag=backup_tag, current_tag=current_tag, ), )
def _update_git(self, rev): # Default to master rev = rev or 'master' # Assume origin is called 'origin'. remote = 'origin' # Get all refs first self.run('git fetch --tags') # Checkout the rev we want self.run('git checkout {}'.format(rev)) # reset working state to the origin (only relevant to # branches, so suppress errors). with contextlib2.suppress(utils.CommandException): self.run('git reset --hard {remote}/{rev}'.format(**locals()))
def upgrade(cls, upd): upd.print_log('Update kubelet config...') with suppress(Exception): cls._backup_file_remote(cls.KUBELET_CONFIG_FILE) cls._replace_str_in_file_remote( cls.KUBELET_CONFIG_FILE, '--cadvisor_port=\S+', '') cls._replace_str_in_file_remote( cls.KUBELET_CONFIG_FILE, 'KUBELET_ADDRESS=".*"', 'KUBELET_ADDRESS="0.0.0.0"') helpers.run('systemctl restart kubelet')
def log_query(query, source): ''' Call this to log a query dict to the query log file (if the config permits it). The query is simply json dumped. If query logging is disabled then nothing happens. :param query: the query dict :param source: the source of the query (e.g. multisearch or basicsearch) ''' if is_enabled: # use suppress just to make sure nothing explodes whilst logging with suppress(Exception): logger.info(u'{}: {}'.format(source, json.dumps(query, sort_keys=True, ensure_ascii=False)))
def execute(expected_exceptions=None): if not self.server_status_is(vm, 'ACTIVE'): return False expected_exceptions = expected_exceptions or () with suppress(*expected_exceptions): with self.ssh_to_instance(env, vm, vm_keypair, username=vm_login, password=vm_password, vm_ip=vm_ip) as remote: result = remote.execute(command) results.append(result) return result.is_ok
def configure(self, ckan_config): setup(ckan_config) # register all custom query schemas for plugin in PluginImplementations(IVersionedDatastoreQuerySchema): for version, schema in plugin.get_query_schemas(): register_schema(version, schema) # reserve any requested slugs from .lib.query.slugs import reserve_slug for plugin in PluginImplementations(IVersionedDatastore): for reserved_pretty_slug, query_parameters in plugin.datastore_reserve_slugs( ).items(): with suppress(Exception): reserve_slug(reserved_pretty_slug, **query_parameters)
def prepare(self, tag=None, **kwargs): """ build Docker image (see 'docker build --help' for available options) """ for key, value in kwargs.items(): with contextlib.suppress(ValueError): kwargs[key] = utils.strtobool(value) options = utils.Options(kwargs) image = self.image[self.registry:tag: self.account] # type: docker.Image image.build( local=True, build_path=self.build_path, options=options, use_cache=True, )
def request(self, method, url, query_params=None, headers=None, body=None, post_params=None): try: # For `POST`, `PUT`, `PATCH`, `OPTIONS` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS']: if headers['Content-Type'] == 'application/json': r = self.session.request(method, url, params=query_params, headers=headers, json=body) if headers[ 'Content-Type'] == 'application/x-www-form-urlencoded': r = self.session.request(method, url, params=query_params, headers=headers, json=post_params) if headers['Content-Type'] == 'multipart/form-data': r = self.session.request(method, url, params=query_params, headers=headers) # For `GET`, `HEAD`, `DELETE` else: r = self.session.request(method, url, params=query_params, headers=headers) r.raise_for_status() except RequestException as error: status = 0 msg = "{0}\n{1}".format(type(error).__name__, text_type(error)) with suppress(AttributeError): status = error.response.status_code raise ApiException(status=status, reason=msg) else: return r
def convert_csv_string_to_list(value, delimiter=',', trim=False): """ Convert comma or other character delimited strings to a list. :param value: The value to convert.f :param delimiter: Optionally Change the default delimiter ',' if required. :param trim: Optionally trim the individual list items. :return: The delimited value as a list. """ if not isinstance(value, (string_types, text_type)): return value with suppress(AttributeError, ValueError): value = value.split(delimiter) if value else [] if trim: value = [_.strip() for _ in value] return value
def update(self, tag=None, registry=None, account=None, force=False): if not force: try: if self.image_id == self.image[registry:tag:account].info['Id']: self.start() # force starting container return False except ContainerNotFoundError: pass obsolete_container = self.get_backup_version() with contextlib.suppress(fabricio.Error): obsolete_container.delete(delete_image=True) try: backup_container = self.fork() backup_container.rename(obsolete_container.name) except fabricio.Error: pass # current container not found else: backup_container.stop() self.run(tag=tag, registry=registry, account=account) return True
def _load(partition_id): """ Load a partition from an external source. The default implementation yields items loaded and unpickled from a temporary file. After all items have been loaded the temporary file is removed. :param partition_id: Unique identifier which can be used to reload the partition. In the case of the default implemenation this is the path to the temporary file to load. :return: iterable which is loaded from the external source using partition_id. """ if os.path.exists(partition_id): try: with suppress(EOFError), open(partition_id, 'rb') as fileobj: while True: yield pickle.load(fileobj) finally: os.unlink(partition_id) else: raise StopIteration()
def test_with_exception_comparison(self, name, compare_in_dedicated_process): class Operation(object): def __init__(self, value=None, raise_error=None): self._value = value self.raise_error = raise_error @property @self.tape_recorder.intercept_input('input') def input(self): return self._value @self.tape_recorder.operation() def execute(self, value=None): if self.raise_error: raise Exception("error") if value is not None: return value return self.input Operation(3).execute() Operation(4).execute() with suppress(Exception): Operation(raise_error=True).execute() playback_counter = [0] def playback_function(recording): playback_counter[0] += 1 if playback_counter[0] == 2: operation = Operation(raise_error=True) elif playback_counter[0] == 3: return Operation().execute(5) else: operation = Operation() return operation.execute() def player(recording_id): return self.tape_recorder.play(recording_id, playback_function) with patch.object(InMemoryTapeCassette, 'iter_recording_ids', wraps=self.tape_cassette.iter_recording_ids): start_date = datetime.utcnow() - timedelta(hours=1) playable_recordings = find_matching_recording_ids( self.tape_recorder, category=Operation.__name__, lookup_properties=RecordingLookupProperties(start_date=start_date), ) runner = Equalizer(playable_recordings, player, result_extractor=return_value_result_extractor, comparator=exact_comparator, compare_execution_config=CompareExecutionConfig( keep_results_in_comparison=True, compare_in_dedicated_process=compare_in_dedicated_process )) comparison = list(runner.run_comparison()) self.assertEqual(EqualityStatus.Equal, comparison[0].comparator_status.equality_status) self.assertEqual(EqualityStatus.Different, comparison[1].comparator_status.equality_status) self.assertEqual(EqualityStatus.Different, comparison[2].comparator_status.equality_status) self.assertEqual(3, comparison[0].expected) self.assertEqual(4, comparison[1].expected) self.assertIsInstance(comparison[2].expected, Exception) self.assertTrue(comparison[1].actual_is_exception) self.assertFalse(comparison[1].expected_is_exception) self.assertEqual(3, comparison[0].actual) self.assertIsInstance(comparison[1].actual, Exception) self.assertEqual(5, comparison[2].actual) self.assertFalse(comparison[2].actual_is_exception) self.assertTrue(comparison[2].expected_is_exception)
def ingest_resource(version, config, resource, data, replace, api_key): ''' Ingest a new version of a resource's data. :param version: the new version :param config: the eevee config object :param resource: the resource dict :param data: the data to ingest (can be None if not using the API) :param replace: boolean indicating whether to replace the existing data or not :param api_key: the API key if the resource's CKAN URL is to be used as the source and the resource is private :return: True if the ingest was successful, False if not ''' # cache the resource id as we use it a few times resource_id = resource[u'id'] log.info(u'Starting validation for {}'.format(resource_id)) # create a stats entry so that preparation progress can be tracked prep_stats_id = stats.start_operation(resource_id, stats.PREP, version) try: data_file_name, data_file_metadata = prepare_resource( resource, version, prep_stats_id, data, api_key) except Exception as e: stats.mark_error(prep_stats_id, e) log.info(u'Prep failed for resource {} due to {}: {}'.format( resource_id, e.__class__.__name__, unicode(e))) if isinstance(e, exceptions.IngestionException): # these exceptions are expected (validation problems for example) return False else: raise else: stats.finish_operation(prep_stats_id, data_file_metadata[u'record_count']) log.info(u'Starting ingest for {}'.format(resource_id)) start = datetime.now() # create a stats entry so that progress can be tracked stats_id = stats.start_operation(resource_id, stats.INGEST, version, start) try: feeder = DatastoreFeeder(config, resource_id, version, data_file_name) converter = RecordToMongoConverter(version, start) ingester = Ingester(version, feeder, converter, config) # setup monitoring on the ingester so that we can update the database with stats about the # ingestion as it progresses stats.monitor_ingestion(stats_id, ingester) with InclusionTracker(ingester) as tracker: ingester.ingest() if replace: replace_feeder = ReplaceDeletionFeeder(version, resource_id, tracker, feeder.source) # note that we use the same converter to ensure the ingestion time is the same replace_ingester = Ingester(version, replace_feeder, converter, config) # TODO: should we merge the replace ingest stats with the main ingest stats? replace_ingester.ingest() # we really don't care about errors with suppress(Exception): # create a details row create_details(resource_id, version, data_file_metadata[u'fields'], data_file_metadata[u'file_hash']) return True except Exception as e: stats.mark_error(stats_id, e) log.exception( u'An error occurred during ingestion of {}'.format(resource_id)) return False finally: # make sure we clean up the intermediate data file if os.path.exists(data_file_name): os.remove(data_file_name)
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ items = [] def process_column_header(th): return th.span.get_text() if th.span else th.get_text() with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', class_='table2') if not torrent_table: log.debug( 'Data returned from provider does not contain any {0}torrents', 'confirmed ' if self.confirmed else '') return items torrent_rows = torrent_table.find_all('tr') labels = [ process_column_header(label) for label in torrent_rows[0] ] # Skip the first row, since it isn't a valid result for row in torrent_rows[1:]: cells = row.find_all('td') try: title_cell = cells[labels.index('Torrent Name')] verified = title_cell.find('img', title='Verified torrent') if self.confirmed and not verified: continue title_anchors = title_cell.find_all('a') if not title_anchors or len(title_anchors) < 2: continue title_url = title_anchors[0].get('href') title = title_anchors[1].get_text(strip=True) regex_result = id_regex.search( title_anchors[1].get('href')) alt_title = regex_result.group(1) if len(title) < len(alt_title): title = alt_title.replace('-', ' ') torrent_id = regex_result.group(2) info_hash = hash_regex.search(title_url).group(2) if not all([title, torrent_id, info_hash]): continue with suppress(RequestsConnectionError, Timeout): # Suppress the timeout since we are not interested in actually getting the results self.session.get(self.urls['update'], timeout=0.1, params={ 'torrent_id': torrent_id, 'infohash': info_hash }) download_url = 'magnet:?xt=urn:btih:{hash}&dn={title}{trackers}'.format( hash=info_hash, title=title, trackers=self._custom_trackers) # Remove comma as thousands separator from larger number like 2,000 seeders = 2000 seeders = try_int( cells[labels.index('Seed')].get_text( strip=True).replace(',', ''), 1) leechers = try_int(cells[labels.index('Leech')].get_text( strip=True).replace(',', '')) if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug( "Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1}", title, seeders) continue size = convert_size( cells[labels.index('Size')].get_text(strip=True)) or -1 pubdate_raw = cells[1].get_text().replace( 'Last', '1').replace('Yesterday', '24 hours') pubdate = self.parse_pubdate(pubdate_raw, human_time=True) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': pubdate, } if mode != 'RSS': log.debug( 'Found result: {0} with {1} seeders and {2} leechers', title, seeders, leechers) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): log.error('Failed parsing provider. Traceback: {0!r}', traceback.format_exc()) return items
def delete_instances(os_conn, instances): for instance in instances: with suppress(nova_exceptions.NotFound): instance.force_delete() os_conn.wait_servers_deleted(instances)
def downgrade(cls, upd): upd.print_log('Restore kubelet config...') with suppress(Exception): cls._restore_backup_remote(cls.KUBELET_CONFIG_FILE) helpers.run('systemctl restart kubelet')
def release_lock(self, path=None): """Release the lock on the currently-loaded session data.""" self.lock.close() with contextlib2.suppress(FileNotFoundError): os.remove(self.lock._path) self.locked = False
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ items = [] with BS4Parser(data, 'html5lib') as html: torrent_table = html('table', class_='table2') if mode != 'RSS' and torrent_table and len(torrent_table) < 2: logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) return torrent_table = torrent_table[0 if mode == 'RSS' else 1] torrent_rows = torrent_table('tr') # Skip the first row, since it isn't a valid result for result in torrent_rows[1:]: cells = result('td') try: verified = result('img', title='Verified torrent') if self.confirmed and not verified: continue url = result.find('a', rel='nofollow') title_info = result('a') info = title_info[1]['href'] if not all([url, title_info, info]): continue title = title_info[1].get_text(strip=True) torrent_id = id_regex.search(info).group(1) torrent_hash = hash_regex.search(url['href']).group(2) if not all([title, torrent_id, torrent_hash]): continue with suppress(requests.exceptions.Timeout): # Suppress the timeout since we are not interested in actually getting the results self.session.get(self.urls['update'], timeout=0.1, params={'torrent_id': torrent_id, 'infohash': torrent_hash}) # Remove comma as thousands separator from larger number like 2,000 seeders = 2000 seeders = try_int(cells[3].get_text(strip=True).replace(',', '')) leechers = try_int(cells[4].get_text(strip=True).replace(',', '')) size = convert_size(cells[2].get_text(strip=True)) or -1 download_url = 'magnet:?xt=urn:btih:{hash}&dn={title}{trackers}'.format( hash=torrent_hash, title=title, trackers=self._custom_trackers) if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': torrent_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) continue return items
def testing_db_conn(): data_path = os.path.normpath(tests.basil.prices.__path__[0] + '/../..') db_file = os.path.join(data_path, 'prices.db') with contextlib.suppress(OSError): os.remove(db_file) return "sqlite:///%s" % db_file
def run_until_exhausted(supervisor, session, fetches): """Run the given fetches until OutOfRangeError is triggered.""" with contextlib2.suppress(tf.errors.OutOfRangeError): while not supervisor.should_stop(): yield session.run(fetches)