Esempio n. 1
0
def combined_leaderboard(db):
    require_key()
    backend.get_logger().log_text("combined_leaderboard requested",
                                  severity='INFO')
    db.execute("""
        SELECT s.username, s.steps, m.minutes
        FROM (
            SELECT username, SUM( steps ) AS steps
            FROM steps
            WHERE DAY > DATE_SUB( CURDATE( ) , INTERVAL 1 WEEK )
            GROUP BY username
            ) AS s
        INNER JOIN (
            SELECT username, ROUND( SUM( a.length_ms ) /1000 /60 ) AS minutes
            FROM activity a
            INNER JOIN activity_types t ON a.activity_type = t.id
            WHERE a.day > DATE_SUB( CURDATE( ) , INTERVAL 1 WEEK )
                AND a.activity_type NOT IN {}
            GROUP BY a.username
            ) AS m ON s.username = m.username
        ORDER BY s.steps DESC
        LIMIT 20""".format(bad_activities))
    result = []
    for r in db.fetchall():
        result += [r['username'], int(r['steps']), int(r['minutes'])]
    response.content_type = 'application/json'
    return json.dumps(result, indent=4)
Esempio n. 2
0
def get_users(db):
    require_key()
    backend.get_logger().log_text("user requested", severity='INFO')
    db.execute("SELECT username FROM google_fit")
    result = [u['username'] for u in db.fetchall()]
    response.content_type = 'application/json'
    return json.dumps(result, sort_keys=True, indent=4)
Esempio n. 3
0
def health_check(db):
    if db:
        return "database connection alive: " + str(db.connection)
    else:
        backend.get_logger().log_text("database connection is in a bad state",
                                      severity='CRITICAL')
        return HTTPError(httplib.INTERNAL_SERVER_ERROR,
                         "database connection is in a bad state")
Esempio n. 4
0
def user_activities(name, db):
    require_key()
    backend.get_logger().log_text("users/<name>/activities requested for: " +
                                  name,
                                  severity='INFO')
    activities = query_activities(db, name)
    response.content_type = 'application/json'
    return json.dumps(activities, sort_keys=True, indent=4)
Esempio n. 5
0
def steps_for_user(name, db):
    require_key()
    backend.get_logger().log_text("steps_for_user requested for: " + name,
                                  severity='INFO')
    db.execute("SELECT day, steps FROM steps WHERE username=%s", (name, ))
    result = dict([(r['day'], r['steps']) for r in db.fetchall()])
    response.content_type = 'application/json'
    return json.dumps(result, sort_keys=True, indent=4)
Esempio n. 6
0
def set_goal(name, goal, db):
    require_key()
    backend.get_logger().log_text(
        "set_goal requested for: name={name}, goal={goal}".format(name=name,
                                                                  goal=goal),
        severity='INFO')
    db.execute("REPLACE INTO activity_goals SET username=%s, minutes=%s",
               (name, goal))
    return "Goal set"
Esempio n. 7
0
def steps_for_user_last_day(name, db):
    require_key()
    backend.get_logger().log_text("steps_for_user/last_day requested for: " +
                                  name,
                                  severity='INFO')
    db.execute(
        """
        SELECT SUM(steps) as sum 
        FROM steps 
        WHERE username=%s AND day >= date_sub(CURDATE(), INTERVAL 1 DAY)
        """, (name, ))
    result = str(db.fetchone()['sum'])
    return result
Esempio n. 8
0
def activity_for_user(name, db):
    require_key()
    backend.get_logger().log_text("activity_for_user requested for: " + name,
                                  severity='INFO')
    db.execute(
        """
        SELECT a.day, ROUND(SUM(a.length_ms) / 1000 / 60) AS minutes 
        FROM activity a 
        INNER JOIN activity_types t ON a.activity_type=t.id 
        WHERE a.username=%s AND a.activity_type NOT IN {} 
        GROUP BY a.day
        """.format(bad_activities), (name, ))
    result = dict([(r['day'], int(r['minutes'])) for r in db.fetchall()])
    response.content_type = 'application/json'
    return json.dumps(result, sort_keys=True, indent=4)
Esempio n. 9
0
def steps_leaderboard(db):
    require_key()
    backend.get_logger().log_text("step_leaderboard requested",
                                  severity='INFO')
    db.execute("""
        SELECT username, SUM(steps) as steps 
        FROM steps 
        WHERE day > date_sub(CURDATE(), INTERVAL 1 WEEK) 
        GROUP BY username 
        ORDER BY steps DESC 
        LIMIT 20
        """)
    result = OrderedDict([(r['username'], int(r['steps']))
                          for r in db.fetchall()])
    response.content_type = 'application/json'
    return json.dumps(result, indent=4)
Esempio n. 10
0
def activity_leaderboard(db):
    require_key()
    backend.get_logger().log_text("activity_leaderboard requested",
                                  severity='INFO')
    db.execute("""
        SELECT username, ROUND(SUM(a.length_ms) / 1000 / 60) AS minutes 
        FROM activity a 
        INNER JOIN activity_types t ON a.activity_type=t.id 
        WHERE day > date_sub(CURDATE(), INTERVAL 1 WEEK) AND
            a.activity_type NOT IN {} 
        GROUP BY username 
        ORDER BY minutes DESC 
        LIMIT 20
        """.format(bad_activities))
    result = OrderedDict([(r['username'], int(r['minutes']))
                          for r in db.fetchall()])
    response.content_type = 'application/json'
    return json.dumps(result, indent=4)
Esempio n. 11
0
def insert_daily_fitness_data_impl(usernames,
                                   bucket_name=backend.DEFAULT_BUCKET):
    """
    Call Google Fitness API for users in the Cloud Datastore credentials kind, save the responses in Cloud Storage,
    insert the fitness data to Cloud BigQuery.
    key is retry[username][category]['countdown']
    if value >= 0, retry down to value -1 or set value to -2 for non-recoverable errors
    if value is None, op has succeeded
    :param usernames: a list of usernames to call Google Fitness API with
    :param bucket_name: save responses from Google Fitness API to a Google Cloud Storage bucket
    :return: The results of getting from Google Fitness API and inserting to Cloud BigQuery
    """
    retry = {}
    threads = []
    errors = []

    for username in usernames:
        t = Thread(target=insert_daily_fitness_data_thread,
                   args=(bucket_name, retry, username, errors))
        threads.append(t)
        t.start()

    for t in threads:
        t.join()

    if errors:
        backend.get_logger().log_struct(errors, severity='CRITICAL')
        return HTTPError('Thread execution error: ' + str(errors))

    is_error = False
    response.content_type = 'application/json'
    for username, category in retry.iteritems():
        for cat, cat_result in category.iteritems():
            if 'error' in cat_result:
                is_error = True
                break
    if is_error:
        return HTTPResponse(retry, httplib.INTERNAL_SERVER_ERROR)
    else:
        return retry
Esempio n. 12
0
from flask import Flask, render_template, request
import backend
import database
from datetime import datetime
from sqlite3 import Error

app = Flask(__name__)

logger = backend.get_logger()

pages = {'home': '/', 'current state': '/v1/current_state', 'request ambulance': '/v1/user/request'}


@app.route('/', methods=['GET', 'POST'])
def index():
    logger.info("User ip: {} accessed {} endpoint".format(request.remote_addr, request.url))
    return render_template('index.html', pages=pages)


# api for current state of devices
@app.route('/v1/current_state', methods=['GET', 'POST'])
def get_current_state():
    logger.info("User ip: {} accessed {} endpoint".format(request.remote_addr, request.url))
    request_dict = dict()
    device_id = request.args.get('id')
    if device_id is not None:
        request_dict['device_id'] = str(device_id)
    else:
        request_dict['device_id'] = ""
    request_dict['url'] = str(request.url)
    request_dict['url_access_date'] = str(datetime.now())
Esempio n. 13
0
import socket
import select
import backend
import struct

BUFSIZE = 65536

logger = backend.get_logger('socket backend')


class SocketBackend(backend.Backend):
    def __init__(self, helper):
        super(SocketBackend, self).__init__(helper)
        self.addr = None
        self.port = None
        self._srv_sock = None
        self._select_socks = list()
        self._rcv_bufs = dict()
        self._send_queue = dict()

    def do_connect(self, addr):
        '''
        Connect to a remote ObjectSharer at <addr>.
        If <uid> is specified it is associated with the client at <addr>.
        If <async> is False (default), wait for a reply.
        '''

        logger.debug('Connecting to %s', addr)
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.connect(backend.parse_addr(addr))
        logger.debug('Adding socket %s to select_socks', sock)
Esempio n. 14
0
def oauth2callback():
    urlparts = request.urlparts
    redirect_uri = "{}://{}{}".format(urlparts.scheme, urlparts.netloc,
                                      urlparts.path)
    timezone = request.query.get('state', None)

    flow = client.flow_from_clientsecrets(
        backend.get_client_secret_filename(),
        scope=[
            "profile", "email",
            'https://www.googleapis.com/auth/fitness.activity.read',
            'https://www.googleapis.com/auth/fitness.body.read'
        ],
        redirect_uri=redirect_uri)
    flow.params['access_type'] = 'offline'
    flow.params['prompt'] = 'consent'
    creds = flow.step2_exchange(code=request.query.code)
    http_auth = creds.authorize(httplib2.Http())
    user_info_service = build('oauth2', 'v2', http=http_auth)
    get_user_task = user_info_service.userinfo().get()
    ds = datastore.Client()
    u = get_user_task.execute()

    # insert to Cloud Datastore
    entity = datastore.Entity(key=ds.key(backend.DATASTORE_KIND, u['email']))
    now = datetime.utcnow()
    entity.update({
        'refresh_token': creds.refresh_token,
        'google_id': u['id'],
        'gender': u.get('gender'),
        'picture': u['picture'],
        'timezone': unicode(timezone),
        'last_updated': now
    })
    ds.put(entity)
    response.content_type = 'application/json'

    # insert to BigQuery dashboard dataset for user provisioning
    bigquery_client = bigquery.Client()
    query = """
            SELECT dataset_username, email
            FROM `{}.{}.{}` 
            WHERE email = '{}' OR dataset_username = '******'
            """.format(backend.GCP_project, dataset_dash, table_datasources,
                       u['email'], u['email'])
    query_job = bigquery_client.query(query)
    existing_row = list(query_job.result())
    if not existing_row:
        # provision user to bq_users table
        dataset_ref = bigquery_client.dataset(dataset_dash)
        table_ref = dataset_ref.table(table_users)
        table = bigquery_client.get_table(table_ref)
        rows_to_insert = [(u['email'], u['given_name'], u['family_name'], True)
                          ]
        errors = bigquery_client.insert_rows(table, rows_to_insert)
        if errors:
            backend.get_logger().log_text(str(errors), severity='ERROR')
            raise Exception(str(errors))
        else:
            backend.get_logger().log_text(
                'inserted user {} to table {}.{}.{}'.format(
                    u['email'], backend.GCP_project, dataset_dash,
                    table_users),
                severity='INFO')
        # provision user to bq_users_datasources table
        table_ref = dataset_ref.table(table_datasources)
        table = bigquery_client.get_table(table_ref)
        rows_to_insert = [(u['email'], backend.dataset_google_fit, u['email'],
                           'Google Fit', True)]
        errors = bigquery_client.insert_rows(table, rows_to_insert)
        if errors:
            backend.get_logger().log_text(str(errors), severity='ERROR')
            raise Exception(str(errors))
        else:
            backend.get_logger().log_text(
                'inserted user {} to table {}.{}.{}'.format(
                    u['email'], backend.GCP_project, dataset_dash,
                    table_datasources),
                severity='INFO')

    # required to serialize entity
    entity['last_updated'] = now.strftime('%Y-%m-%d %H:%M:%S %Z')
    return json.dumps(entity.items())
Esempio n. 15
0
import backend
from backend import config
from backend import SQLighter

MODE = config.mode
TOKEN = config.token
PROXYLIST = config.proxy
DB = config.db
BOTS_COUNT = config.bots_count
COMMANDS = config.commands
PATH = os.getcwd()
EPOCH = config.epoch
FEEDBACK = config.feedback_channel

bot = telebot.TeleBot(TOKEN)
logger = backend.get_logger('Main', f'{PATH}/log/deploymebot.log')
logger.info('DeployMeBot started')


class WebhookServer:
    @cherrypy.expose
    def index(self):
        length = int(cherrypy.request.headers['content-length'])
        json_string = cherrypy.request.body.read(length).decode("utf-8")
        update = telebot.types.Update.de_json(json_string)
        bot.process_new_updates([update])
        return ''


@bot.message_handler(commands=['start', 'help'])
def _(message):
Esempio n. 16
0
import socket
import select
import backend
import struct

BUFSIZE = 65536

logger = backend.get_logger('socket backend')

class SocketBackend(backend.Backend):

    def __init__(self, helper):
        super(SocketBackend, self).__init__(helper)
        self.addr = None
        self.port = None
        self._srv_sock = None
        self._select_socks = list()
        self._rcv_bufs = dict()
        self._send_queue = dict()

    def do_connect(self, addr):
        '''
        Connect to a remote ObjectSharer at <addr>.
        If <uid> is specified it is associated with the client at <addr>.
        If <async> is False (default), wait for a reply.
        '''

        logger.debug('Connecting to %s', addr)
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.connect(backend.parse_addr(addr))
        logger.debug('Adding socket %s to select_socks', sock)
Esempio n. 17
0
def require_key():
    key = request.query.get('key', '')
    if key != backend.API_key:
        backend.get_logger().log_text('request made with invalid API key',
                                      severity='WARNING')
        abort(httplib.UNAUTHORIZED, "invalid API key")