def main(): setup_logging() prometheus_client.start_http_server(8000) logger.info( "Startup: profiler %s %s", os.environ['DATAMART_VERSION'], socket.gethostbyname(socket.gethostname()), ) Profiler() asyncio.get_event_loop().run_forever()
def main(): setup_logging() debug = os.environ.get('DEBUG') not in (None, '', 'no', 'off', 'false') prometheus_client.start_http_server(8000) logger.info("Startup: apiserver %s", os.environ['DATAMART_VERSION']) if debug: logger.error("Debug mode is ON") app = make_app(debug) app.listen(8002, xheaders=True, max_buffer_size=2147483648) loop = tornado.ioloop.IOLoop.current() loop.start()
def main(): setup_logging() debug = os.environ.get('DEBUG') not in (None, '', 'no', 'off', 'false') prometheus_client.start_http_server(8000) logger.info("Startup: coordinator %s", os.environ['DATAMART_VERSION']) if debug: logger.error("Debug mode is ON") app = make_app(debug) app.listen(8003, xheaders=True, max_buffer_size=2147483648) loop = tornado.ioloop.IOLoop.current() if debug: asyncio.get_event_loop().set_debug(True) check_cache() # Schedules itself to run periodically loop.start()
def main(): setup_logging() prometheus_client.start_http_server(8000) logger.info( "Startup: cache_cleaner %s %s", os.environ['DATAMART_VERSION'], socket.gethostbyname(socket.gethostname()), ) # Create cache directories os.makedirs('/cache/datasets', exist_ok=True) os.makedirs('/cache/aug', exist_ok=True) os.makedirs('/cache/user_data', exist_ok=True) check_cache() # Schedules itself to run periodically asyncio.get_event_loop().run_forever()
), size=resource['size'], ) if resource.get('description'): file_metadata['description'] = resource['description'] if 'description' in package_metadata: file_metadata['description'] += ( '\n\n' + package_metadata['description']) direct_url = resource['download_url'] or resource.get('url') if not direct_url: raise KeyError('download_url or url') # Discover this dataset self.record_dataset( dict( ckan_domain=domain['url'], ckan_package_id=package['id'], ckan_resource_id=resource['id'], ckan_record_updated=modified, direct_url=direct_url, ), file_metadata, dataset_id=dataset_id, ) if __name__ == '__main__': setup_logging() asyncio.get_event_loop().run_until_complete( CkanDiscoverer('datamart.ckan').run())