def api_get_identity():
    session = get_current_session()
    try:
        auth_plugin = get_auth_plugin()
    except PluginNotFoundException:
        google_user = users.get_current_user()
        if google_user:
            url = u'https://console.cloud.google.com/iam-admin/iam?project=%s' % app_identity.get_application_id()
            app_identity.get_application_id()
            modules = []
            for plugin in get_plugins():
                plugin_modules = plugin.get_modules()
                if plugin_modules:
                    modules.extend(plugin_modules)
            module_strings = [m.name for m in sorted(modules, key=lambda m: m.sort_order)]
            return IdentityTO(google_user.email(), [], module_strings, [], u'en', u'/logout', url)
        raise HttpUnAuthorizedException(u'no_authenticated_plugin')
    try:
        if not session or not auth_plugin.validate_session(session):
            raise HttpUnAuthorizedException(data={'login_url': get_base_url() + auth_plugin.get_login_url()})
        language = auth_plugin.get_user_language() or get_browser_language()
        modules = auth_plugin.get_visible_modules()
        logout_url = auth_plugin.get_logout_url()
        profile_url = auth_plugin.get_profile_url()
        permissions = []
        for plugin in get_plugins():
            permissions.extend(plugin.get_permissions())
        return IdentityTO(session.user_id, session.scopes, modules, permissions, language, logout_url, profile_url)
    except HttpException:
        raise
    except Exception:
        logging.exception('Exception while trying to get identity')
        raise HttpUnAuthorizedException(data={'login_url': get_base_url() + auth_plugin.get_login_url()})
예제 #2
0
def mapping_query(table_name, hpo_ids, dataset_id=None, project_id=None):
    """
    Get query used to generate new ids for a CDM table

    :param table_name: name of CDM table
    :param hpo_ids: identifies the HPOs
    :param dataset_id: identifies the BQ dataset containing the input table
    :param project_id: identifies the GCP project containing the dataset
    :return: the query
    """
    if dataset_id is None:
        dataset_id = bq_utils.get_dataset_id()
    if project_id is None:
        project_id = app_identity.get_application_id()
    subqueries = _mapping_subqueries(table_name, hpo_ids, dataset_id, project_id)
    union_all_query = UNION_ALL.join(subqueries)
    return '''
    WITH all_{table_name} AS (
      {union_all_query}
    )
    SELECT 
        src_table_id,
        src_{table_name}_id,
        ROW_NUMBER() OVER () AS {table_name}_id
    FROM all_{table_name}
    '''.format(union_all_query=union_all_query, table_name=table_name)
예제 #3
0
def most_common_heel_errors(app_id=None, dataset_id=None, hpo_ids=None):
    """
    :param app_id: Application Id
    :param dataset_id: Dataset Id
    :param hpo_ids: list of Hpo_ids
    :return: None
    """
    heel_errors = list()
    if app_id is None:
        app_id = app_identity.get_application_id()
    if dataset_id is None:
        dataset_id = bq_utils.get_dataset_id()
    if not os.path.exists(HEEL_ERRORS_JSON) and not os.path.exists(
            HEEL_ERRORS_CSV):
        for hpo_id in hpo_ids:
            if bq_utils.table_exists(
                    table_id='{hpo_id}_achilles_heel_results'.format(
                        hpo_id=hpo_id),
                    dataset_id=dataset_id):
                query = heel_error_query.format(app_id=app_id,
                                                dataset_id=dataset_id,
                                                hpo_id=hpo_id)
                query_job = bq_utils.query(query)
                result = bq_utils.response2rows(query_job)
                heel_errors.extend(result)
    with open(HEEL_ERRORS_JSON, 'w') as fp:
        json.dump(heel_errors, fp, sort_keys=True, indent=4)
    parse_json_csv()
예제 #4
0
    def get(self):
        access_token = self.get_oauth_token('https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()
        timestamp = datetime.datetime.now().strftime('%Y-%m-%d')

        backup_entities = self.get_backup_entities()
        backup_bucket = self.get_backup_bucket()
        output_url_prefix = "gs://{}/{}".format(backup_bucket, timestamp)

        entity_filter = {
            'kinds': backup_entities,
        }
        request = {
            'project_id': app_id,
            'output_url_prefix': output_url_prefix,
            'entity_filter': entity_filter
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token
        }
        url = 'https://datastore.googleapis.com/v1beta1/projects/%s:export' % app_id
        status, _ = self.fetch_url(
            url=url,
            payload=json.dumps(request),
            method=urlfetch.POST,
            headers=headers)

        status_sitevar = Sitevar.get_by_id('apistatus')
        if status == 200 and status_sitevar and 'backup' in status_sitevar.contents:
            status_sitevar.contents['backup']['db_export'] = timestamp
            status_sitevar.put()
예제 #5
0
class GoogleCloudStorage(object):
    storage = google.cloud.storage.Client(app_identity.get_application_id())

    @webapp2.cached_property
    def bucket(self):
        return self.storage.get_bucket('{}.appspot.com'.format(
            app_identity.get_application_id(), ))
예제 #6
0
    def get(self):
        access_token = self.get_oauth_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()
        timestamp = datetime.datetime.now().strftime('%Y-%m-%d')

        backup_entities = self.get_backup_entities()
        backup_bucket = self.get_backup_bucket()
        output_url_prefix = "gs://{}/{}".format(backup_bucket, timestamp)

        entity_filter = {
            'kinds': backup_entities,
        }
        request = {
            'project_id': app_id,
            'output_url_prefix': output_url_prefix,
            'entity_filter': entity_filter
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token
        }
        url = 'https://datastore.googleapis.com/v1beta1/projects/%s:export' % app_id
        status, _ = self.fetch_url(url=url,
                                   payload=json.dumps(request),
                                   method=urlfetch.POST,
                                   headers=headers)

        status_sitevar = Sitevar.get_by_id('apistatus')
        if status == 200 and status_sitevar and 'backup' in status_sitevar.contents:
            status_sitevar.contents['backup']['db_export'] = timestamp
            status_sitevar.put()
예제 #7
0
 def _stream_metadata(self, insert_all_data, dataset_id, table_id):
     partition = datetime.datetime.now().strftime("%Y%m%d")
     return self.service.tabledata().insertAll(
         projectId=app_identity.get_application_id(),
         datasetId=dataset_id,
         tableId='{}${}'.format(table_id, partition),
         body=insert_all_data).execute(num_retries=3)
예제 #8
0
def get_most_recent(app_id=None, drc_bucket=None, report_for=None):
    """
    Query audit logs for paths to the most recent datasources.json files in the DRC bucket.

    Note: Results are cached in a local json file to avoid unnecessary queries.
    :param app_id: identifies the GCP project
    :param drc_bucket: identifies the DRC bucket
    :param report_for: denotes which query to use b/w achilles and results
    :return: list of dict with keys `file_path`, `upload_timestamp`
    """
    if app_id is None:
        app_id = app_identity.get_application_id()
    if drc_bucket is None:
        drc_bucket = gcs_utils.get_drc_bucket()
        if report_for == common.REPORT_FOR_ACHILLES:
            if not os.path.exists(common.LATEST_REPORTS_JSON):
                query = LATEST_REPORTS_QUERY.format(app_id=app_id, drc_bucket=drc_bucket, year=common.LOG_YEAR)
                query_job = bq_utils.query(query)
                result = bq_utils.response2rows(query_job)
                with open(common.LATEST_REPORTS_JSON, 'w') as fp:
                    json.dump(result, fp, sort_keys=True, indent=4)
            with open(common.LATEST_REPORTS_JSON, 'r') as fp:
                return json.load(fp)
        elif report_for == common.REPORT_FOR_RESULTS:
            if not os.path.exists(common.LATEST_RESULTS_JSON):
                query = LATEST_RESULTS_QUERY.format(app_id=app_id, drc_bucket=drc_bucket, year=common.LOG_YEAR)
                query_job = bq_utils.query(query)
                result = bq_utils.response2rows(query_job)
                with open(common.LATEST_RESULTS_JSON, 'w') as fp:
                    json.dump(result, fp, sort_keys=True, indent=4)
            with open(common.LATEST_RESULTS_JSON, 'r') as fp:
                return json.load(fp)
 def __init__(self):
     self.app_id = app_identity.get_application_id()
     self.service = googleapiclient.discovery.build(
         'datastore',
         'v1',
         credentials=self.__create_credentials(),
         http=self.__create_http(),
     )
예제 #10
0
 def sync_query(self, query, timeout=30000, use_legacy_sql=False):
     query_data = {
         'query': query,
         'timeoutMs': timeout,
         'useLegacySql': use_legacy_sql
     }
     return self.service.jobs().query(
         projectId=app_identity.get_application_id(),
         body=query_data).execute(num_retries=3)
예제 #11
0
def send_emails_to_support(subject, body):
    cfg = get_config(NAMESPACE)
    sender = 'no-reply@%s.appspotmail.com' % app_identity.get_application_id()
    logging.debug('Sending email to support: %s\n %s', subject, body)
    for email in cfg.support_emails:
        logging.debug('Sending email to %s', email)
        mail.send_mail(sender=sender,
                       to=email,
                       subject=subject,
                       body=body)
예제 #12
0
def get_query_result(hpo_id,
                     query_string,
                     table_id,
                     query_wrapper,
                     is_subquery,
                     app_id=None,
                     dataset_id=None):
    """
    :param hpo_id: the name of the hpo_id for which validation is being done
    :param query_string: variable name of the query string stored in the constants
    :param table_id: Name of the table running analysis on
    :param query_wrapper: wrapper over the unioned query if required
    :param is_subquery: binary flag(true/false) to indicate if parsing is needed or not.
    :param app_id: name of the big query application id
    :param dataset_id: name of the big query dataset id
    :return: returns dictionary of rows
    """
    if app_id is None:
        app_id = app_identity.get_application_id()
    if dataset_id is None:
        dataset_id = bq_utils.get_dataset_id()
    query = None
    result = None
    if is_subquery:
        sub_queries = []
        for table in cdm.tables_to_map():
            hpo_table = '{hpo_id}_{table_name}'.format(hpo_id=hpo_id,
                                                       table_name=table)
            if bq_utils.table_exists(hpo_table):
                sub_query = query_string.format(hpo_id=hpo_id,
                                                app_id=app_id,
                                                dataset_id=dataset_id,
                                                domain_table=table)
                sub_queries.append(sub_query)
        unioned_query = main_constants.UNION_ALL.join(sub_queries)
        if unioned_query and query_wrapper is not None:
            query = query_wrapper.format(union_of_subqueries=unioned_query)
        else:
            query = unioned_query
    else:
        table_name = '{hpo_name}_{results_table}'.format(
            hpo_name=hpo_id, results_table=table_id)
        if bq_utils.table_exists(table_name):
            query = query_string.format(application=app_id,
                                        dataset=dataset_id,
                                        table_id=table_name)
    if query:
        # Found achilles_heel_results table(s), run the query
        response = bq_utils.query(query)
        result = bq_utils.response2rows(response)
    if result is None:
        result = []
    return result
예제 #13
0
    def outputjson(self, log):
        """Convert log entry into json to be written to loggingfs gofer."""
        _, level, message, _ = log
        message = self._truncate(message, self._MAX_LINE_SIZE)
        trace = logsutil.TraceID()
        severity = logsutil.LogLevelString(level)
        entry = {'severity': severity, 'message': message}
        project = app_identity.get_application_id()

        entry['logging.googleapis.com/trace'] = ('projects/%s/traces/%s' %
                                                 (project, trace))
        return json.dumps(entry)
예제 #14
0
    def _validate_authentication(self):
        import tba_config
        # Allow all requests in debug mode
        if tba_config.DEBUG:
            return

        incoming_app_id = self.request_state.headers.get('X-Appengine-Inbound-Appid', None)
        if incoming_app_id is None:
            raise remote.ApplicationError('Unauthenticated')

        from google.appengine.api.app_identity import app_identity
        if not app_identity.get_application_id() == incoming_app_id:
            raise remote.ApplicationError('Unauthenticated')
예제 #15
0
    def post(self):
        j = {}
        for k in self.request.arguments():
            j[k] = self.request.get(k)

        CallData(data=j).put()

        called = self.request.get('To')
        if called is None:
            xml = '''<?xml version="1.0" encoding="UTF-8" ?>
    <Response>
        <Say voice="alice" language="ja-JP">エラーがおきました</Say>
    </Response>
    '''
            self.response.headers['Content-Type'] = 'text/xml'
            self.response.write(xml)
            return

        status = RoombaStatus.get_by_id(called)
        if status is None:
            xml = '''<?xml version="1.0" encoding="UTF-8" ?>
    <Response>
        <Say voice="alice" language="ja-JP">ルンバが接続されていません</Say>
    </Response>
    '''
            self.response.headers['Content-Type'] = 'text/xml'
            self.response.write(xml)
            return

        OrderData(called=self.request.get('To'),
                  caller=self.request.get('From'),
                  data=j,
                  talk='',
                  command='START',
                  time=1,
                  oncall=True).put()

        words = [u'今日もお仕事お疲れ様、', u'今日はゆっくり休んでね', u'無理しないでね']
        xml = u'''<?xml version="1.0" encoding="UTF-8" ?>
<Response>
    <Say voice="alice" language="ja-JP">{word}。{msg}</Say>
    <Record action="http://{appid}.appspot.com/convert" timeout="3" maxlength="5" trim="trim-silence"></Record>
</Response>
'''.format(appid=get_application_id(),
           word=words[random.randint(0,
                                     len(words) - 1)],
           msg=u'ただ今掃除中。とめたい時はピーの後、停止と言ってね'
           if status.state == 'CLEANING' else u'掃除を始めるよ')

        self.response.headers['Content-Type'] = 'text/xml'
        self.response.write(xml)
예제 #16
0
    def _authenticated(self):
        # Allow all requests in debug mode
        if tba_config.DEBUG:
            return True

        # Ignore auth check during tests
        if self.testing:
            return True

        incoming_app_id = self.request_state.headers.get('X-Appengine-Inbound-Appid', None)
        if incoming_app_id is None:
            return False

        return app_identity.get_application_id() == incoming_app_id
예제 #17
0
    def _send_error_report(self, message,
                           report_location):
        payload = {
            'serviceContext': {
                'service': self.service,
            },
            'message': '{0}'.format(message),
            'context': {
                'reportLocation': report_location
            }
        }

        self.logging_client.projects().events().report(
            projectName='projects/{}'.format(app_identity.get_application_id()),
            body=payload).execute()
예제 #18
0
    def _validate_authentication(self):
        # Allow all requests in debug mode
        if tba_config.DEBUG:
            return

        # Ignore auth check during tests
        if self.testing:
            return

        incoming_app_id = self.request_state.headers.get('X-Appengine-Inbound-Appid', None)
        if incoming_app_id is None:
            raise remote.ApplicationError('Unauthenticated')

        if not app_identity.get_application_id() == incoming_app_id:
            raise remote.ApplicationError('Unauthenticated')
예제 #19
0
    def post(self):
        logging.info("Request headers:\n%s" %
                     "\n".join("- %s: %s" % h
                               for h in self.request.headers.iteritems()))
        body = self.request.body
        msg = mail.InboundEmailMessage(body)
        logging.debug("Received %.1fkB from '%s'" %
                      (len(body) / 1024., msg.sender))

        if msg.attachments:

            def attachment_info((filename, payload)):
                return "- '%s' (%.1fkB; %s)" \
                       % (filename,
                          len(payload.decode()) / 1024.,
                          mail._GetMimeType(filename))

            logging.debug("%d attachments:\n%s" %
                          (len(msg.attachments), "\n".join(
                              map(attachment_info, msg.attachments))))

        ts = datetime.utcfromtimestamp(
            email.utils.mktime_tz(email.utils.parsedate_tz(msg.date)))
        to = sorted({s.strip() for s in msg.to.split(",")})
        ent = Inmail(
            mail_timestamp=ts,
            sender=msg.sender,
            recipients=to,
            # the subject field is undefined if no subject
            subject=getattr(msg, 'subject', None),
            body=body)
        try:
            ent.put()
        except RequestTooLargeError:
            logging.exception("Failed to store Inmail")
            mail.send_mail(
                sender="info@%s.appspotmail.com" % get_application_id(),
                to=msg.sender,
                subject="Auto-reply: Too large email",
                body="Hello dear %s,\n\n"
                "An email you sent couldn't be received properly due to "
                "its excessive size (%.1fMB). Please retry with reduced "
                "attachments.\n\n"
                "Original email recipients: %s\n"
                "Subject: %s" % (msg.sender, len(body) / 1024.**2, msg.to,
                                 getattr(msg, 'subject', "(empty)")))
        else:
            logging.info("Stored email to %r" % ent.key)
예제 #20
0
def run_ppi_vocab_update(project_id, dataset_id):
    """
    runs the query which updates the ppi vocabulary in observation table

    :param project_id: Name of the project
    :param dataset_id: Name of the dataset where the queries should be run
    :return:
    """
    if project_id is None:
        project_id = app_identity.get_application_id()

    q = UPDATE_PPI_QUERY.format(project=project_id,
                                dataset=dataset_id,
                                vocabulary=VOCABULARY_DATASET)
    logging.debug('Query for PMI_Skip fix is {q}'.format(q=q))
    bq_utils.query(q=q)
    def post(self):
        self.verify_permissions()
        suggestion_id = int(self.request.get("suggestion_id"))
        verdict = self.request.get("verdict")
        message = self.request.get("user_message")

        email_body = None
        user = None
        event_key = None
        status = ''
        if verdict == "accept":
            status = 'accept'
            user, event_key, email_body = self._process_accepted(suggestion_id, message)

        elif verdict == "reject":
            suggestion = Suggestion.get_by_id(suggestion_id)
            event_key = suggestion.contents['event_key']
            user = suggestion.author.get()
            event = Event.get_by_id(event_key)
            suggestion.review_state = Suggestion.REVIEW_REJECTED
            suggestion.reviewer = self.user_bundle.account.key
            suggestion.reviewed_at = datetime.now()
            suggestion.put()

            status = 'reject'
            email_body = """Hi {},

We have reviewer your request for auth tokens for {} {} and have regretfully declined with the following message:

{}

If you have any questions, please don't heasitate to reach out to us at [email protected]

Thanks,
TBA Admins
""".format(user.display_name, event.year, event.name, message)

        # Notify the user their keys are available
        sender = "{}@appspot.gserviceaccount.com".format(app_identity.get_application_id())
        reply_to = sender if tba_config.DEBUG else "*****@*****.**"
        if email_body:
            mail.send_mail(sender=sender,
                           reply_to=reply_to,
                           to=user.email,
                           subject="The Blue Alliance Auth Tokens for {}".format(event_key),
                           body=email_body)
        self.redirect("/suggest/apiwrite/review?success={}".format(status))
def start_template(filename, job_name, params={}, retries=5):
    """
    Starts a dataflow template.

    Args:
        filename (str): The gcs filename for the template.
        job_name (str): The job name to use.
        params (dict): Job parameters.
        retries (int): Remaining retries. Set this to 0 to never retry.

    Returns:
        dict: The job response.
    """

    global gcs_dataflow_path
    if gcs_dataflow_path is None:
        gcs_dataflow_path = 'gs://{}/dataflow'.format(
            app_identity.get_default_gcs_bucket_name())

    body = {
        'jobName': job_name,
        'environment': {
            'tempLocation': '{}/temp_jobs'.format(gcs_dataflow_path)
        },
        'parameters': params
    }

    request = get_client().projects().locations().templates().launch(
        projectId=app_identity.get_application_id(),
        gcsPath='{}/templates/{}'.format(gcs_dataflow_path, filename),
        location=os.environ.get('DATAFLOW_REGION') or 'us-central1',
        body=body)

    logging.info('Starting template...')

    try:
        return request.execute().get('job', {})
    except HttpError as ex:
        if retries > 0:
            logging.warn(
                'Retrying failed request ({} retries remaining)'.format(
                    retries),
                exc_info=True)
            return start_template(filename, job_name, params, retries - 1)
        else:
            raise ex
예제 #23
0
def run_pmi_fix(project_id, dataset_id):
    """

    runs the query which adds skipped rows in survey before 2019-04-10 as PMI_Skip

    :param project_id: Name of the project
    :param dataset_id: Name of the dataset where the queries should be run

    :return:
    """
    if project_id is None:
        project_id = app_identity.get_application_id()

    q = PMI_SKIP_FIX_QUERY.format(project=project_id, dataset=dataset_id)
    logging.debug('Query for PMI_Skip fix is {q}'.format(q=q))
    bq_utils.query(q=q,
                   destination_table_id=OBSERVATION_TABLE_NAME,
                   destination_dataset_id=dataset_id,
                   write_disposition=bq_consts.WRITE_TRUNCATE)
예제 #24
0
def initialize(service_name):
    is_local_unittest = ('expect_tests' in sys.argv[0])
    if is_local_unittest:
        appengine_name = 'unittest'
        service_name = 'unittest'
        hostname = 'unittest'
    else:  # pragma: no cover
        appengine_name = app_identity.get_application_id()
        hostname = '%s, %s' % (modules.get_current_module_name(),
                               modules.get_current_version_name())

    # Only send events if we are running on the actual AppEngine.
    if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
        run_type = 'prod'  # pragma: no cover
    else:
        run_type = 'dry'

    config.setup_monitoring(run_type, hostname, service_name, appengine_name)
    logging.info(
        'Initialized event_mon with run_type=%s, hostname=%s, service_name=%s, '
        'appengine_name=%s', run_type, hostname, service_name, appengine_name)
예제 #25
0
def check_routes(routes):
    all_statuses = []
    for route in routes:
        if not route.profile.boxcar_send_push:
            continue
        if route.profile.silence_until and route.profile.silence_until > datetime.datetime.now():
            continue
        statuses = call_ns_api(route.station_from, route.station_to, route.departure_time_from)
        if not statuses:
            continue
        boxcar_access_token = route.profile.boxcar_access_token
        message, message_long = create_push_message(statuses)
        hashed_message = base64.b64encode(message)
        if route.latest_push_message != hashed_message:
            route.latest_push_message = hashed_message
            route.put()
            url = 'https://{}.appspot.com{}'.format(app_identity.get_application_id(),
                                                    uri_for('profile_edit', obj_id=route.profile.key.id()))
            send_push_notification(message, message_long, boxcar_access_token, url)
        all_statuses.extend(statuses)
    return all_statuses
예제 #26
0
def main(app_id, dataset_id, file_name, all_hpo=False, file_format=None):
    """
    Retrieve most prevalent errors from achilles heel results table(s)

    Retrieve most prevalent errors from achilles heel results table(s) and save
    results to a file at specified path

    :param app_id: Identifies the google cloud project containing the dataset
    :param dataset_id: Identifies the dataset where from achilles heel results
        should be obtained
    :param file_name: Path of file to save to
    :param all_hpo: If `True` query <hpo_id>_achilles_heel_results, otherwise
        just achilles_heel_results (default)
    :param file_format: csv or json
    """
    if app_id is None:
        app_id = app_identity.get_application_id()
    if dataset_id is None:
        dataset_id = bq_utils.get_dataset_id()
    if os.path.exists(file_name):
        # Do not overwrite existing
        #        raise RuntimeError('File {} already exists'.format(file_name))
        raise IOError('File %s already exists' % file_name)
    if file_format is None:
        # Attempt to determine format
        file_name_comps = file_name.lower().split('.')
        file_format = file_name_comps[-1]
    if file_format not in OUTPUT_FORMATS:
        raise ValueError('File format must be one of (%s)' %
                         ', '.join(OUTPUT_FORMATS))
    heel_errors = top_heel_errors(app_id, dataset_id, all_hpo)
    if file_format == CSV:
        save_csv(heel_errors, file_name)
    elif file_format == JSON:
        save_json(heel_errors, file_name)
    else:
        save_csv(heel_errors, file_name)
 def post(self):
   payloadType = 'text'
   msg = self.request.get('msg').strip()
   if not msg:
     logging.debug('loading body')
     msg = self.request.body
     if not msg:
       self.abort(400)
       return
     msg = json.loads(msg)
     payloadType = 'struct'
   cloud_logging = self._get_service()
   from google.appengine.api.app_identity import app_identity
   projectsId = app_identity.get_application_id()
   deviceid = 'test-test'
   logsId = 'barcamp2015'
   severity = 'DEBUG'
   logs = {
     'commonLabels': {
       'compute.googleapis.com/resource_id': deviceid,
       "compute.googleapis.com/resource_type": "instance",
     },
     'entries': [
       {
         "log": logsId,
         "metadata": {
           "serviceName": "compute.googleapis.com",
           "severity": severity,
         },
         payloadType + "Payload": msg,
       },
     ]
   }
   logging.debug(logs)
   resp = cloud_logging.projects().logs().entries().write(projectsId=projectsId, logsId=logsId, body=logs).execute()
   logging.info(json.dumps(resp, sort_keys=True, indent=4))
   self.response.write('log saved')
예제 #28
0
def send_email_back(recipient, subject, attachments, text_content=None, html_content=None):
    from sendgrid.helpers import mail
    from sendgrid.helpers.mail import Attachment
    import premailer

    logging.info("sending mail to %s (%s/%s)", recipient, SENDGRID_API_KEY, SENDGRID_SENDER)

    to_email = mail.Email(recipient)
    from_email = mail.Email(SENDGRID_SENDER)

    message = mail.Mail()

    message.set_from(from_email)
    message.set_subject(subject)

    personalization = mail.Personalization()
    personalization.add_to(to_email)
    message.add_personalization(personalization)

    if not text_content and not html_content:
        message.add_content(mail.Content("text/plain", global_body))

    if text_content:
        message.add_content(mail.Content("text/plain", text_content))

    if html_content:
        message.add_content(mail.Content("text/html", html_content))

    for att in attachments:
        data = att["data"]
        file_name = att["name"]

        if file_name.endswith(".htm") or file_name.endswith(".html"):
            stub_css = "https://%s.appspot.com/css/stub.css" % app_identity.get_application_id()
            data = re.sub(
                r'\"D:\\ssis\\SecureMail\\SecureMailTest\\MailItemImages\\BankB1\.gifstyle\.css&#xA;.*\"',
                '"%s"' % stub_css,
                data)

            logging.info("before transform(%s) %s", type(data), data)

            logging.info("using premailer for %s", file_name)

            data = data.decode("utf8")

            p = premailer.Premailer(data)
            data = p.transform().encode("utf8")

            logging.info("after transform(%s) %s", type(data), data)

        attachment = Attachment()
        attachment.set_content(base64.b64encode(data))
        attachment.set_type(att["type"])
        attachment.set_filename(att["name"])
        attachment.set_disposition("attachment")
        attachment.set_content_id(att["name"])
        message.add_attachment(attachment)

    data = json.dumps(message.get())

    logging.debug("sending %s", data)

    headers = {
        "Authorization": 'Bearer {0}'.format(SENDGRID_API_KEY),
        "Content-Type": "application/json",
        "Accept": 'application/json'
    }

    response = urlfetch.fetch(
        url="https://api.sendgrid.com/v3/mail/send",
        payload=data,
        method=urlfetch.POST,
        headers=headers)

    if response.status_code > 299:
        logging.error("response %s(%s)", response.content, response.status_code)
    else:
        logging.info("response %s(%s)", response.content, response.status_code)

    if response.status_code > 299:
        raise Exception("Failed to call sendgrid API")
예제 #29
0
from mcfw.cache import set_cache_key
from mcfw.consts import MISSING
from mcfw.rpc import arguments, returns, check_function_metadata, get_parameter_types, run, get_parameters, \
    get_type_details, serialize_value, parse_parameter

_CALL_ACTION_RESEND = 1
_CALL_ACTION_MUST_PROCESS = 2
_CALL_ACTION_DO_NOT_PROCESS = 3

BACKLOG_CONCURRENCY_PROTECTION_INTERVAL = 120
MESSAGE_LINGER_INTERVAL = 3600 * 24 * 20  # 20 days
MESSAGE_ALLOWED_FUTURE_TIME_INTERVAL = 3600 * 24
BACKLOG_MESSAGE_RETENTION_INTERVAL = 3600 * 24 + MESSAGE_LINGER_INTERVAL  # 21 days
BACKLOG_DUPLICATE_AVOIDANCE_RETENTION_INTERVAL = 3600 * 24  # 1 day

APPENGINE_APP_ID = get_application_id()
DO_NOT_SAVE_RPCCALL_OBJECTS = "DO_NOT_SAVE_RPCCALL_OBJECTS"
PERFORM_CALLBACK_SYNCHRONOUS = "PERFORM_CALLBACK_SYNCHRONOUS"
SKIP_ACCOUNTS = "SKIP_ACCOUNTS"
MOBILE_ACCOUNT = "MOBILE_ACCOUNT"
DEFER_KICK = "DEFER_KICK"
TARGET_MFR = "TARGET_MFR"
API_VERSION = u"av"
API_DIRECT_PATH_KEY = u"ap"
CALL_ID = u"ci"
FUNCTION = u"f"
PARAMETERS = u"a"
STATUS = u"s"
STATUS_SUCCESS = u"success"
STATUS_FAIL = u"fail"
RESULT = u"r"
예제 #30
0
파일: main.py 프로젝트: p3u/blog
import os
import jinja2
import webapp2
import logging
from db_reg import UserRegistration, PostRegistration, CommentRegistration
from db_reg import LikeRegistration
from google.appengine.ext import ndb
import blog_db
import encrypt
from google.appengine.api.app_identity import app_identity

template_dir = os.path.join(os.path.dirname(__file__), "templates")
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
                               autoescape=True)

logging.info("ABC " + app_identity.get_application_id())


class Handler(webapp2.RequestHandler):
    def write(self, *a, **kw):
        self.response.write(*a, **kw)

    def render_str(self, template, **params):
        t = jinja_env.get_template(template)
        return t.render(params)

    def render(self, template, **kw):
        self.write(self.render_str(template, **kw))


class SignupPage(Handler):
예제 #31
0
파일: config.py 프로젝트: eunchong/infra
def initialize(app=None, is_enabled_fn=None, cron_module='default',
               is_local_unittest=None):
  """Instruments webapp2 `app` with gae_ts_mon metrics.

  Instruments all the endpoints in `app` with basic metrics.

  Args:
    app (webapp2 app): the app to instrument.
    is_enabled_fn (function or None): a function returning bool if ts_mon should
      send the actual metrics. None (default) is equivalent to lambda: True.
      This allows apps to turn monitoring on or off dynamically, per app.
    cron_module (str): the name of the module handling the
      /internal/cron/ts_mon/send endpoint. This allows moving the cron job
      to any module the user wants.
    is_local_unittest (bool or None): whether we are running in a unittest.
  """
  if is_local_unittest is None:  # pragma: no cover
    # Since gae_ts_mon.initialize is called at module-scope by appengine apps,
    # AppengineTestCase.setUp() won't have run yet and none of the appengine
    # stubs will be initialized, so accessing Datastore or even getting the
    # application ID will fail.
    is_local_unittest = ('expect_tests' in sys.argv[0])

  if is_enabled_fn is not None:
    interface.state.flush_enabled_fn = is_enabled_fn

  if app is not None:
    instrument_wsgi_application(app)
    if is_local_unittest or modules.get_current_module_name() == cron_module:
      instrument_wsgi_application(handlers.app)

  # Use the application ID as the service name and the module name as the job
  # name.
  if is_local_unittest:  # pragma: no cover
    service_name = 'unittest'
    job_name = 'unittest'
    hostname = 'unittest'
  else:
    service_name = app_identity.get_application_id()
    job_name = modules.get_current_module_name()
    hostname = modules.get_current_version_name()
    runtime.set_shutdown_hook(_shutdown_hook)

  interface.state.target = targets.TaskTarget(
      service_name, job_name, shared.REGION, hostname, task_num=-1)
  interface.state.flush_mode = 'manual'
  interface.state.last_flushed = datetime.datetime.utcnow()

  # Don't send metrics when running on the dev appserver.
  if (is_local_unittest or
      os.environ.get('SERVER_SOFTWARE', '').startswith('Development')):
    logging.info('Using debug monitor')
    interface.state.global_monitor = monitors.DebugMonitor()
  else:
    logging.info('Using pubsub monitor %s/%s', shared.PUBSUB_PROJECT,
                 shared.PUBSUB_TOPIC)
    interface.state.global_monitor = monitors.PubSubMonitor(
        monitors.APPENGINE_CREDENTIALS, shared.PUBSUB_PROJECT,
        shared.PUBSUB_TOPIC)

  shared.register_global_metrics([shared.appengine_default_version])
  shared.register_global_metrics_callback(
      shared.INTERNAL_CALLBACK_NAME, _internal_callback)

  logging.info('Initialized ts_mon with service_name=%s, job_name=%s, '
               'hostname=%s', service_name, job_name, hostname)
예제 #32
0
 def _fcm_url(self):
     app_id = app_identity.get_application_id()
     return 'https://fcm.googleapis.com/v1/projects/{}/messages:send'.format(app_id)
예제 #33
0
from google.appengine.api.app_identity import app_identity
from google.appengine.api.urlfetch import DeadlineExceededError
import sys
from methods.email.admin import send_error

__author__ = 'dvpermyakov'

_APP_ID = app_identity.get_application_id()


def handle_500(request, response, exception):
    body = """URL: %s
User-Agent: %s
Exception: %s
Logs: https://appengine.google.com/logs?app_id=s~%s&severity_level_override=0&severity_level=3""" \
           % (request.url, request.headers['User-Agent'], exception, _APP_ID)
    if isinstance(exception, DeadlineExceededError) and ":9900/" in exception.message:
        send_error("iiko", "iiko deadline", body)
    else:
        send_error("server", "Error 500", body)

    exc_info = sys.exc_info()
    raise exc_info[0], exc_info[1], exc_info[2]
예제 #34
0
파일: settings.py 프로젝트: jsa/pore
import logging
import os

from google.appengine.api.app_identity.app_identity import get_application_id

try:
    appid = get_application_id()
except AttributeError:
    appid = None

IS_PROD = not os.environ.get('SERVER_SOFTWARE', "").startswith("Development/") \
          and appid == 'jsa-pore'

if IS_PROD:
    PRIMARY_HOST = "pore.savukoski.name"
else:
    PRIMARY_HOST = os.environ.get('HTTP_HOST', 'localhost:8080')

DEBUG = TEMPLATE_DEBUG = not IS_PROD

# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
TIME_ZONE = 'GMT'

# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'

SITE_ID = 1

USE_I18N = USE_L10N = False

USE_TZ = True
 def _fcm_url(self):
     from google.appengine.api.app_identity import app_identity
     app_id = app_identity.get_application_id()
     return 'https://fcm.googleapis.com/v1/projects/{}/messages:send'.format(app_id)
예제 #36
0
def render_query(query_str, **kwargs):
    project_id = app_identity.get_application_id()
    dataset_id = bq_utils.get_dataset_id()
    return query_str.format(project_id=project_id,
                            dataset_id=dataset_id,
                            **kwargs)
 def __create_gcs_output_url(gcs_folder_name):
     app_id = app_identity.get_application_id()
     output_url_prefix = "gs://staging.{}.appspot.com/{}" \
         .format(app_id, gcs_folder_name)
     return output_url_prefix
예제 #38
0
import logging
import webapp2
from google.appengine.api.app_identity import app_identity
from google.appengine.api import urlfetch
from google.appengine.api import mail
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler

appid = app_identity.get_application_id()
mail_sender = 'www@' + appid + '.appspotmail.com'


class RMailHandler(InboundMailHandler):
    def receive(self, msg):
        logging.info(msg.sender + " " + msg.subject)
        if msg.subject:
            UrlHandler(msg.subject, msg.sender)


def UrlHandler(url, receipt):
    try:
        logging.info("fetching " + url)
        result = urlfetch.Fetch(url, deadline=30)
        if result.status_code == 200:
            logging.info("done " + url)
            TMailHandler(receipt, result.content, "Re: " + url)
        else:
            logging.error(url + ":" + result.status)
    except Exception, e:
        print(e)
        return None
예제 #39
0
def initialize(app=None,
               is_enabled_fn=None,
               cron_module='default',
               is_local_unittest=None):
    """Instruments webapp2 `app` with gae_ts_mon metrics.

  Instruments all the endpoints in `app` with basic metrics.

  Args:
    app (webapp2 app): the app to instrument.
    is_enabled_fn (function or None): a function returning bool if ts_mon should
      send the actual metrics. None (default) is equivalent to lambda: True.
      This allows apps to turn monitoring on or off dynamically, per app.
    cron_module (str): the name of the module handling the
      /internal/cron/ts_mon/send endpoint. This allows moving the cron job
      to any module the user wants.
    is_local_unittest (bool or None): whether we are running in a unittest.
  """
    if is_local_unittest is None:  # pragma: no cover
        # Since gae_ts_mon.initialize is called at module-scope by appengine apps,
        # AppengineTestCase.setUp() won't have run yet and none of the appengine
        # stubs will be initialized, so accessing Datastore or even getting the
        # application ID will fail.
        is_local_unittest = ('expect_tests' in sys.argv[0])

    if is_enabled_fn is not None:
        interface.state.flush_enabled_fn = is_enabled_fn

    if app is not None:
        instrument_wsgi_application(app)
        if is_local_unittest or modules.get_current_module_name(
        ) == cron_module:
            instrument_wsgi_application(handlers.app)

    # Use the application ID as the service name and the module name as the job
    # name.
    if is_local_unittest:  # pragma: no cover
        service_name = 'unittest'
        job_name = 'unittest'
        hostname = 'unittest'
    else:
        service_name = app_identity.get_application_id()
        job_name = modules.get_current_module_name()
        hostname = modules.get_current_version_name()
        runtime.set_shutdown_hook(_shutdown_hook)

    interface.state.target = targets.TaskTarget(service_name,
                                                job_name,
                                                shared.REGION,
                                                hostname,
                                                task_num=-1)
    interface.state.flush_mode = 'manual'
    interface.state.last_flushed = datetime.datetime.utcnow()

    # Don't send metrics when running on the dev appserver.
    if (is_local_unittest or os.environ.get('SERVER_SOFTWARE',
                                            '').startswith('Development')):
        logging.info('Using debug monitor')
        interface.state.global_monitor = monitors.DebugMonitor()
    else:
        logging.info('Using https monitor %s with %s',
                     shared.PRODXMON_ENDPOINT,
                     shared.PRODXMON_SERVICE_ACCOUNT_EMAIL)
        interface.state.global_monitor = monitors.HttpsMonitor(
            shared.PRODXMON_ENDPOINT,
            monitors.DelegateServiceAccountCredentials(
                shared.PRODXMON_SERVICE_ACCOUNT_EMAIL,
                monitors.AppengineCredentials()))
        interface.state.use_new_proto = True

    interface.register_global_metrics([shared.appengine_default_version])
    interface.register_global_metrics_callback(shared.INTERNAL_CALLBACK_NAME,
                                               _internal_callback)

    # We invoke global callbacks once for the whole application in the cron
    # handler.  Leaving this set to True would invoke them once per task.
    interface.state.invoke_global_callbacks_on_flush = False

    standard_metrics.init()

    logging.info(
        'Initialized ts_mon with service_name=%s, job_name=%s, '
        'hostname=%s', service_name, job_name, hostname)