def get_datastore_client(config): project_id = config.get("PROJECT_ID", None) if config.get("emulate", False): credentials = DoNothingCreds() elif project_id is not None: credentials, _ = default_creds() else: credentials, project_id = default_creds() client = datastore.Client(project=project_id, credentials=credentials) return client
def get_bigtable_client(config): project_id = config.get("PROJECT_ID", None) if config.get("emulate", False): credentials = DoNothingCreds() elif project_id is not None: credentials, _ = default_creds() else: credentials, project_id = default_creds() client = bigtable.Client(admin=True, project=project_id, credentials=credentials) return client
def readout_log_db(table_id, filters, cols, date_filter=datetime.datetime(year=2019, day=30, month=3)): if date_filter.tzinfo is None: date_filter = chunkedgraph.UTC.localize(date_filter) credentials, project_id = default_creds() client = datastore.Client(project=project_id, credentials=credentials) log_db = flask_log_db.FlaskLogDatabase(table_id, client=client) query = log_db.client.query(kind=log_db.kind, namespace=log_db.namespace) for filter_ in filters: query.add_filter(*filter_) data = [] query_iter = query.fetch() for e in query_iter: if e["date"] > date_filter: col_data = [] for col in cols: col_data.append(e[col]) data.append(col_data) # if len(data) > 10000: # break return data
def get_datastore_client(config): project_id = config.get('project_id', 'pychunkedgraph') if config.get('emulate', False): credentials = DoNothingCreds() else: credentials, project_id = default_creds() client = datastore.Client(project=project_id, credentials=credentials) return client
def get_client(config): project_id = config.get('project_id', 'pychunkedgraph') if config.get('emulate', False): credentials = DoNothingCreds() else: credentials, project_id = default_creds() client = bigtable.Client(admin=True, project=project_id, credentials=credentials) return client
def __init__(self, default_dataset: str = None, path_to_key: str = None): self.default_dataset = default_dataset credentials, self.project_id = load_credentials_from_file( path_to_key, scopes=[ "https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/bigquery", ], ) if path_to_key else default_creds( scopes=[ "https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/bigquery", ] ) pandas_gbq.context.credentials = credentials pandas_gbq.context.project = self.project_id self.client = bigquery.Client(credentials=credentials, project=self.project_id) self.secretmanager_client = None
] LANGUAGE_CODE = "en-au" TIME_ZONE = "Australia/Melbourne" USE_I18N = True USE_L10N = True USE_TZ = True LOGIN_URL = "/admin/login/" LOGIN_ERROR_URL = LOGIN_URL LOGIN_REDIRECT_URL = "/admin/" # Default redirect after logging in. AUTHENTICATION_BACKENDS = ("django.contrib.auth.backends.ModelBackend", ) try: _, project_id = default_creds() except Exception: project_id = "development" if not project_id: project_id = env("PROJECT_ID", default="development") # django_cloudtask specific TASK_DOMAIN = env("TASK_DOMAIN", default=HOSTS[0]) PROJECT_ID = project_id PROJECT_REGION = "{{cookiecutter.gcp_region}}" TASK_SERVICE_ACCOUNT = f"{{cookiecutter.application_name}}-app@{PROJECT_ID}.iam.gserviceaccount.com" TASK_DEFAULT_QUEUE = "default" if DEBUG: