def setup(): logger = logging.getLogger() # Set the region on log records. default_factory = logging.getLogRecordFactory() logging.setLogRecordFactory(partial(region_record_factory, default_factory)) # Send logs directly via the logging client if possible. This ensures trace # ids are propogated and allows us to send structured messages. if environment.in_gae(): client = Client() handler = StructuredAppEngineHandler(client) handlers.setup_logging(handler, log_level=logging.INFO) for handler in logger.handlers: if not isinstance(handler, StructuredAppEngineHandler): logger.removeHandler(handler) else: logging.basicConfig() for handler in logger.handlers: # If writing directly to Stackdriver, send a structured message. if isinstance(handler, StructuredAppEngineHandler): handler.setFormatter(StructuredLogFormatter()) # Otherwise, the default stream handler requires a string. else: handler.setFormatter( logging.Formatter( "(%(region)s) %(module)s/%(funcName)s : %(message)s"))
def _file_pointer_for_path(self, path: GcsfsFilePath, encoding: str): """Returns a file pointer for the given path.""" # From the GCSFileSystem docs (https://gcsfs.readthedocs.io/en/latest/api.html#gcsfs.core.GCSFileSystem), # 'google_default' means we should look for local credentials set up via `gcloud login`. The project this is # reading from may have to match the project default you have set locally (check via `gcloud info` and set via # `gcloud config set project [PROJECT_ID]`. If we are running in the GAE environment, we should be able to query # the internal metadata for credentials. token = 'google_default' if not environment.in_gae() else 'cloud' return self.gcs_file_system.open(path.uri(), encoding=encoding, token=token)
def setup(): """Setup logging""" # Set the region on log records. default_factory = logging.getLogRecordFactory() logging.setLogRecordFactory(partial(region_record_factory, default_factory)) logger = logging.getLogger() # Send logs directly via the logging client if possible. This ensures trace # ids are propogated and allows us to send structured messages. if environment.in_gae(): client = Client() handler = StructuredAppEngineHandler(client) handlers.setup_logging(handler, log_level=logging.INFO) # Streams unstructured logs to stdout - these logs will still show up # under the appengine.googleapis.com/stdout Stackdriver logs bucket, # even if other logs are stalled on the global interpreter lock or some # other issue. stdout_handler = logging.StreamHandler(sys.stdout) handlers.setup_logging(stdout_handler, log_level=logging.INFO) for handler in logger.handlers: if not isinstance( handler, (StructuredAppEngineHandler, logging.StreamHandler)): logger.removeHandler(handler) else: logging.basicConfig() for handler in logger.handlers: # If writing directly to Stackdriver, send a structured message. if isinstance(handler, StructuredAppEngineHandler): handler.setFormatter(StructuredLogFormatter()) # Otherwise, the default stream handler requires a string. else: handler.setFormatter( logging.Formatter( "(%(region)s) %(module)s/%(funcName)s : %(message)s")) # Export gunicorn errors using the same handlers as other logs, so that they # go to Stackdriver in production. gunicorn_logger = logging.getLogger('gunicorn.error') gunicorn_logger.handlers = logger.handlers
def _get_metadata(url: str): if url in _metadata_cache: return _metadata_cache[url] if not allow_local_metadata_call: if environment.in_test() or not environment.in_gae(): raise RuntimeError( "May not be called from test, should this have a local override?" ) try: r = requests.get(BASE_METADATA_URL + url, headers=HEADERS, timeout=TIMEOUT) r.raise_for_status() _metadata_cache[url] = r.text return r.text except Exception as e: logging.error('Failed to fetch metadata [%s]: [%s]', url, e) return None