Esempio n. 1
0
    def __init__(self):

        # Authorize and create service
        credentials = self.get_credentials()
        http = credentials.authorize(httplib2.Http())
        self.service = discovery.build('calendar', 'v3', http=http)
Esempio n. 2
0
def main():
    if a.local_models_dir is None and a.cloud_model_names is None:
        raise Exception("must specify --local_models_dir or --cloud_model_names")

    if a.local_models_dir is not None:
        import tensorflow as tf
        os.environ['CUDA_VISIBLE_DEVICES'] = ''
        for name in os.listdir(a.local_models_dir):
            if name.startswith("."):
                continue

            print("loading model", name)

            with tf.Graph().as_default() as graph:
                sess = tf.Session(graph=graph)
                saver = tf.train.import_meta_graph(os.path.join(a.local_models_dir, name, "export.meta"))

                saver.restore(sess, os.path.join(a.local_models_dir, name, "export"))
                input_vars = json.loads(tf.get_collection("inputs")[0])
                output_vars = json.loads(tf.get_collection("outputs")[0])
                input = graph.get_tensor_by_name(input_vars["input"])
                output = graph.get_tensor_by_name(output_vars["output"])

                if name not in models:
                    models[name] = {}

                models[name]["local"] = dict(
                    sess=sess,
                    input=input,
                    output=output,
                )

    if a.cloud_model_names is not None:
        import oauth2client.service_account
        import googleapiclient.discovery
        import googleapiclient.discovery_cache.base
        import httplib2

        for name in a.cloud_model_names.split(","):
            if name not in models:
                models[name] = {}
            models[name]["cloud"] = None

        scopes = ["https://www.googleapis.com/auth/cloud-platform"]
        global project_id
        if a.credentials is None:
            credentials = oauth2client.client.GoogleCredentials.get_application_default()
            # use this only to detect the project
            import google.cloud.storage
            storage = google.cloud.storage.Client()
            project_id = storage.project
            if a.project is not None:
                project_id = a.project
        else:
            credentials = oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name(a.credentials, scopes)
            with open(a.credentials, "r") as f:
                project_id = json.loads(f.read())["project_id"]

        # due to what appears to be a bug, we cannot get the discovery document when specifying an http client
        # so grab it first, then the second build should use the cache
        class Cache(googleapiclient.discovery_cache.base.Cache):
            def __init__(self):
                self.cache = {}

            def get(self, url):
                return self.cache.get(url)

            def set(self, url, content):
                self.cache[url] = content

        cache = Cache()
        googleapiclient.discovery.build("ml", "v1beta1", credentials=credentials, cache=cache)
        global build_cloud_client
        build_cloud_client = lambda: googleapiclient.discovery.build("ml", "v1beta1", http=credentials.authorize(httplib2.Http(timeout=10)), cache=cache)

    print("listening on %s:%s" % (a.addr, a.port))
    ThreadedHTTPServer((a.addr, a.port), Handler).serve_forever()
Esempio n. 3
0
def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None):
  """A common initialization routine for samples.

  Many of the sample applications do the same initialization, which has now
  been consolidated into this function. This function uses common idioms found
  in almost all the samples, i.e. for an API with name 'apiname', the
  credentials are stored in a file named apiname.dat, and the
  client_secrets.json file is stored in the same directory as the application
  main file.

  Args:
    argv: list of string, the command-line parameters of the application.
    name: string, name of the API.
    version: string, version of the API.
    doc: string, description of the application. Usually set to __doc__.
    file: string, filename of the application. Usually set to __file__.
    parents: list of argparse.ArgumentParser, additional command-line flags.
    scope: string, The OAuth scope used.
    discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.

  Returns:
    A tuple of (service, flags), where service is the service object and flags
    is the parsed command-line flags.
  """
  if scope is None:
    scope = 'https://www.googleapis.com/auth/' + name

  # Parser command-line arguments.
  parent_parsers = [tools.argparser]
  parent_parsers.extend(parents)
  parser = argparse.ArgumentParser(
      description=doc,
      formatter_class=argparse.RawDescriptionHelpFormatter,
      parents=parent_parsers)
  flags = parser.parse_args(argv[1:])

  # Name of a file containing the OAuth 2.0 information for this
  # application, including client_id and client_secret, which are found
  # on the API Access tab on the Google APIs
  # Console <http://code.google.com/apis/console>.
  client_secrets = os.path.join(os.path.dirname(filename),
                                'client_secrets.json')

  # Set up a Flow object to be used if we need to authenticate.
  flow = client.flow_from_clientsecrets(client_secrets,
      scope=scope,
      message=tools.message_if_missing(client_secrets))

  # Prepare credentials, and authorize HTTP object with them.
  # If the credentials don't exist or are invalid run through the native client
  # flow. The Storage object will ensure that if successful the good
  # credentials will get written back to a file.
  storage = file.Storage(name + '.dat')
  credentials = storage.get()
  if credentials is None or credentials.invalid:
    credentials = tools.run_flow(flow, storage, flags)
  http = credentials.authorize(http = httplib2.Http())

  if discovery_filename is None:
    # Construct a service object via the discovery service.
    service = discovery.build(name, version, http=http)
  else:
    # Construct a service object using a local discovery document file.
    with open(discovery_filename) as discovery_file:
      service = discovery.build_from_document(
          discovery_file.read(),
          base='https://www.googleapis.com/',
          http=http)
  return (service, flags)
Esempio n. 4
0
def google():
    # Validate state token
    if request.args.get('state') != session['state']:
        response = make_response(json.dumps('Invalid state parameter.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response
    # Obtain authorization code
    code = request.data

    try:
        # Upgrade the authorization code into a credentials object
        oauth_flow = flow_from_clientsecrets('client_secret.json',
                                             scope='profile')
        oauth_flow.redirect_uri = 'postmessage'
        credentials = oauth_flow.step2_exchange(code)
    except FlowExchangeError:
        response = make_response(json.dumps('Failed to upgrade the \
                                            authorization code.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Check that the access token is valid.
    access_token = credentials.access_token
    url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
           % access_token)
    h = httplib2.Http()
    result = json.loads(h.request(url, 'GET')[1].decode('utf8'))
    # If there was an error in the access token info, abort.
    if result.get('error') is not None:
        response = make_response(json.dumps(result.get('error')), 500)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify that the access token is used for the intended user.
    gplus_id = credentials.id_token['sub']
    if result['user_id'] != gplus_id:
        response = make_response(
            json.dumps("Token's user ID doesn't match given user ID."), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify that the access token is valid for this app.
    if result['issued_to'] != client_json["web"]["client_id"]:
        response = make_response(
            json.dumps("Token's client ID does not match app's."), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    stored_access_token = session.get('access_token')
    stored_gplus_id = session.get('gplus_id')
    if stored_access_token is not None and gplus_id == stored_gplus_id:
        res = make_response(json.dumps({'name': session['username'],
                                        'email': session['email'],
                                        'pic': session['pic'],
                                        'alreadyLogged': True}), 200)
        res.headers['Content-Type'] = 'application/json'
        return res

    # Store the access token in the session for later use.
    session['access_token'] = credentials.access_token
    session['gplus_id'] = gplus_id

    # Get user info
    userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
    params = {'access_token': credentials.access_token, 'alt': 'json'}
    answer = requests.get(userinfo_url, params=params)
    data = answer.json()

    if data['name'] == '':
        name = data['email'].split('@')[0]
    else:
        name = data['name']

    session['username'] = name
    session['pic'] = data['picture']
    session['email'] = data['email']
    session['provider'] = 'google'

    user_l = db_session.query(User).filter(User.email == session['email'])
    user_list = user_l.all()
    # create user if not present
    if len(user_list) == 0:
        us = User(name=name, email=data['email'], pic=data['picture'])
        db_session.add(us)
        db_session.commit()
        user_d = db_session.query(User).filter(User.email == session['email'])
        user = user_d.one()
        session['id'] = user.id
    else:
        session['id'] = user_list[0].id

    res = make_response(json.dumps({'name': session['username'],
                                    'email': data['email'],
                                    'pic': data['picture'],
                                    'alreadyLogged': False}), 200)
    res.headers['Content-Type'] = 'application/json'
    return res
Esempio n. 5
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)
        try:
            import httplib2
            from apiclient.discovery import build
        except ImportError as e:
            raise BackendException(u"""\
PyDrive backend requires PyDrive and Google API client installation.
Please read the manpage for setup details.
Exception: %s""" % str(e))

        try:
            from pydrive2.auth import GoogleAuth
            from pydrive2.drive import GoogleDrive
            from pydrive2.files import ApiRequestError, FileNotUploadedError
        except ImportError as e:
            try:
                from pydrive.auth import GoogleAuth
                from pydrive.drive import GoogleDrive
                from pydrive.files import ApiRequestError, FileNotUploadedError
            except ImportError as e:
                raise BackendException(u"""\
PyDrive backend requires PyDrive installation.  Please read the manpage for setup details.
Exception: %s""" % str(e))

        # let user get by with old client while he can
        try:
            from oauth2client.client import SignedJwtAssertionCredentials
            self.oldClient = True
        except:
            from oauth2client.service_account import ServiceAccountCredentials
            from oauth2client import crypt
            self.oldClient = False

        if u'GOOGLE_DRIVE_ACCOUNT_KEY' in os.environ:
            account_key = os.environ[u'GOOGLE_DRIVE_ACCOUNT_KEY']
            if self.oldClient:
                credentials = SignedJwtAssertionCredentials(
                    parsed_url.username + u'@' + parsed_url.hostname,
                    account_key,
                    scopes=u'https://www.googleapis.com/auth/drive')
            else:
                signer = crypt.Signer.from_string(account_key)
                credentials = ServiceAccountCredentials(
                    parsed_url.username + u'@' + parsed_url.hostname,
                    signer,
                    scopes=u'https://www.googleapis.com/auth/drive')
            credentials.authorize(httplib2.Http())
            gauth = GoogleAuth()
            gauth.credentials = credentials
        elif u'GOOGLE_DRIVE_SETTINGS' in os.environ:
            gauth = GoogleAuth(
                settings_file=os.environ[u'GOOGLE_DRIVE_SETTINGS'])
            gauth.CommandLineAuth()
        elif (u'GOOGLE_SECRETS_FILE' in os.environ
              and u'GOOGLE_CREDENTIALS_FILE' in os.environ):
            gauth = GoogleAuth()
            gauth.LoadClientConfigFile(os.environ[u'GOOGLE_SECRETS_FILE'])
            gauth.LoadCredentialsFile(os.environ[u'GOOGLE_CREDENTIALS_FILE'])
            if gauth.credentials is None:
                gauth.CommandLineAuth()
            elif gauth.access_token_expired:
                gauth.Refresh()
            else:
                gauth.Authorize()
            gauth.SaveCredentialsFile(os.environ[u'GOOGLE_CREDENTIALS_FILE'])
        else:
            raise BackendException(
                u'GOOGLE_DRIVE_ACCOUNT_KEY or GOOGLE_DRIVE_SETTINGS environment '
                u'variable not set. Please read the manpage to fix.')
        self.drive = GoogleDrive(gauth)

        # Dirty way to find root folder id
        file_list = self.drive.ListFile({
            u'q':
            u"'Root' in parents and trashed=false"
        }).GetList()
        if file_list:
            parent_folder_id = file_list[0][u'parents'][0][u'id']
        else:
            file_in_root = self.drive.CreateFile({u'title': u'i_am_in_root'})
            file_in_root.Upload()
            parent_folder_id = file_in_root[u'parents'][0][u'id']
            file_in_root.Delete()

        # Fetch destination folder entry and create hierarchy if required.
        folder_names = parsed_url.path.split(u'/')
        for folder_name in folder_names:
            if not folder_name:
                continue
            file_list = self.drive.ListFile({
                u'q':
                u"'" + parent_folder_id + u"' in parents and trashed=false"
            }).GetList()
            folder = next(
                (item
                 for item in file_list if item[u'title'] == folder_name and
                 item[u'mimeType'] == u'application/vnd.google-apps.folder'),
                None)
            if folder is None:
                folder = self.drive.CreateFile({
                    u'title':
                    folder_name,
                    u'mimeType':
                    u"application/vnd.google-apps.folder",
                    u'parents': [{
                        u'id': parent_folder_id
                    }]
                })
                folder.Upload()
            parent_folder_id = folder[u'id']
        self.folder = parent_folder_id
        self.id_cache = {}
def test_success():
    http = httplib2.Http()
    resp, content = http.request('http://some_hopefully_nonexistant_domain:80/', 'GET')
    eq_(content, "WSGI intercept successful!\n")
    assert test_wsgi_app.success()
import httplib2

http = httplib2.Http('.cache')


def post_service(url, headers, body):
    response, content = http.request(url, 'POST', headers=headers, body=body)
    print(response.status)
    print(content)


def put_service(url, headers, body):
    response, content = http.request(url, 'PUT', headers=headers, body=body)
    print(response.status)
    print(content)


def delete_service(url):
    response, content = http.request(url, 'DELETE')
    print(response.status)
    print(content)


def get_service(url, headers):
    response, content = http.request(url, 'GET', headers=headers)
    print(response.status)
    print(str(content))


if __name__ == '__main__':
    url = 'http://127.0.0.1:5000/url/2'
Esempio n. 8
0
import httplib2
import json
import sys

print("Running Endpoint Tester....\n")
address = input(
    "Please enter the address of the server you want to access, \n If left blank the connection will be set to 'http://localhost:5000':   "
)
if address == '':
    address = 'http://localhost:5000'
#Making a GET Request
print("Making a GET Request for /puppies...")
try:
    url = address + "/puppies"
    h = httplib2.Http()
    resp, result = h.request(url, 'GET')
    if resp['status'] != '200':
        raise Exception('Received an unsuccessful status code of %s' %
                        resp['status'])
except Exception as err:
    print("Test 1 FAILED: Could not make GET Request to web server")
    print(err.args)
    sys.exit()
else:
    print("Test 1 PASS: Succesfully Made GET Request to /puppies")

#Making a POST Request
print("Making a POST request to /puppies...")
try:
    url = address + "/puppies"
    h = httplib2.Http()
Esempio n. 9
0
from bs4 import BeautifulSoup, SoupStrainer
import httplib2

http = httplib2.Http()
status, response = http.request(
    "http://vccs-web-dev-1715195167.eu-west-2.elb.amazonaws.com/vehicle_checkers/enter_details"
)

for link in BeautifulSoup(response,
                          'html.parser',
                          parse_only=SoupStrainer('a')):
    if link.has_attr('href'):
        print(link['href'])
Esempio n. 10
0
def remap_genome(genome,
                 original_assembly,
                 target_assembly,
                 original_species,
                 target_species,
                 write_log=False):
    """
    Given a set of chromosomes from one species with a given assembly version.
    Remap the coordinates of TAD borders to a new assembly and/or to a new
    species.

    :para genome: a dict containing all chromosomes computed
    :param original_assembly: i.e.: 'NCBI36'
    :param target_assembly:  i.e.: 'GRCh37', if None, no remapping will be done
    :param original_species: i.e.: 'Homo_sapiens'
    :param target_species: i.e.: 'mus_musculus', if None, no search for syntenic
       regions will be done
    :param False write_log: path where to write a log file

    :returns: new genome dictionary with remapped TAD borders, and a trace
       dictionnary that allows to reconstruct the process of remapping and finding
       syntenic regions.
    """
    global HTTP
    HTTP = httplib2.Http(".cache")
    trace = {}

    if write_log:
        log = open(write_log, 'w')
    else:
        log = sys.stdout

    new_genome = {}
    for crm in genome:
        print('\n   Chromosome:', crm)
        # remap and syntenic region
        if original_assembly:
            if target_assembly:
                print('\n     remapping from %s to %s:' %
                      (original_assembly, target_assembly))
            if target_species:
                print('        and searching syntenic regions from %s to %s' %
                      (original_species.capitalize().replace('_', ' '),
                       target_species.capitalize().replace('_', ' ')))
            convert_chromosome(genome[crm],
                               new_genome,
                               original_species,
                               from_map=original_assembly,
                               to_map=target_assembly,
                               to_species=target_species,
                               synteny=True if target_species else False,
                               mapping=True if target_assembly else False,
                               trace=trace)
        for t in sorted(trace[crm]):
            try:
                log.write('%4s : %2s:%9s-%9s -> %2s:%9s-%9s -> %2s:%9s-%9s\n' %
                          (
                              int(t),
                              trace[crm][t]['from']['crm'],
                              trace[crm][t]['from']['start'],
                              trace[crm][t]['from']['end'],
                              trace[crm][t]['mapped to']['chr'],
                              trace[crm][t]['mapped to']['start'],
                              trace[crm][t]['mapped to']['end'],
                              trace[crm][t]['syntenic at']['chr'],
                              trace[crm][t]['syntenic at']['start'],
                              trace[crm][t]['syntenic at']['end'],
                          ))
            except KeyError:
                log.write('%4s : %2s:%9s-%9s -> %22s -> %22s\n' % (
                    int(t),
                    trace[crm][t]['from']['crm'],
                    trace[crm][t]['from']['start'],
                    trace[crm][t]['from']['end'],
                    'None',
                    'None',
                ))

    if write_log:
        log.close()

    return new_genome, trace
Esempio n. 11
0
def youtube():
    if 'credentials' not in session:
        return redirect(url_for('oauth2callback'))
    credentials = client.OAuth2Credentials.from_json(session['credentials'])
    if credentials.access_token_expired:
        return redirect(url_for('oauth2callback'))
    else:
        spotify_id = session.get("spotify_id")
        db = dataset.connect('sqlite:///mydatabase.db')
        table = db["user_info"]
        playlist_name = session.get("playlist_name")

        http_auth = credentials.authorize(httplib2.Http())
        print(http_auth)

        yt_service = discovery.build(YOUTUBE_API_SERVICE_NAME,
                                     YOUTUBE_API_VERSION,
                                     http=credentials.authorize(
                                         httplib2.Http()))  #,http_auth
        playlists_insert_response = yt_service.playlists().insert(
            part="snippet,status",
            body=dict(snippet=dict(
                title=playlist_name,
                description=
                "Playlist created by SpotifYT from my Spotify Playlist" + " " +
                "(" + playlist_name + ")"),
                      status=dict(privacyStatus="public"))).execute()

        channels_list = yt_service.channels().list(part="snippet",
                                                   mine=True).execute()

        channel = channels_list.get("items", [])
        chan_id = str([chan["id"] for chan in channel][0])

        if len(table) == 0:  # insert first user
            table.insert(
                dict(spotify_id=spotify_id, youtube_id=chan_id, counts=1))
        elif table.find(spotify_id=spotify_id, youtube_id=chan_id) != {}:
            result = table.find_one(spotify_id=spotify_id)
            s_id = result["spotify_id"]
            y_id = result["youtube_id"]
            old_counts = result["counts"]
            new_counts = old_counts + 1
            table.update(
                dict(spotify_id=s_id, youtube_id=y_id, counts=new_counts),
                ["spotify_id", "youtube_id"])
        else:
            table.insert(
                dict(spotify_id=spotify_id, youtube_id=chan_id, counts=1))

        ytplaylist_id = playlists_insert_response["id"]
        yt_playlist_url = "https://www.youtube.com/playlist?list=" + ytplaylist_id

        song_options = session.get("song_options")

        youtube = discovery.build(YOUTUBE_API_SERVICE_NAME,
                                  YOUTUBE_API_VERSION,
                                  developerKey=DEVELOPER_KEY)
        # search for the most viewed video of songs in the playlist
        video_ids = []
        for n in range(len(song_options)):

            search_response = youtube.search().list(q=song_options[n],
                                                    part="id,snippet",
                                                    maxResults=5).execute()

            for search_result in search_response.get("items", []):

                if search_result["id"]["kind"] == "youtube#video":
                    v_id = search_result["id"]["videoId"]
                    video_ids += [v_id]
                    break

        for x in video_ids:
            add_video_request = yt_service.playlistItems().insert(
                part="snippet",
                body={
                    'snippet': {
                        'playlistId': ytplaylist_id,
                        'resourceId': {
                            'kind': 'youtube#video',
                            'videoId': x
                        }
                    }
                }).execute()
        return (render_template("youtubeplaylist.html",
                                youtube_url=yt_playlist_url,
                                id=ytplaylist_id))
Esempio n. 12
0
def map_tad(tad,
            crm,
            resolution,
            from_species,
            synteny=True,
            mapping=True,
            trace=None,
            **kwargs):
    """
    Converts coordinates of 1 TAD border in a given chromosome. Convertion in
    terms of change in assembly version, or in terms of syntenic regions between
    species (or both).
    """
    beg = int(tad['end'] * resolution)
    end = int((tad['end'] + 1) * resolution)
    ori_crm = crm
    ## keep trace
    trace.setdefault(crm, {})
    if not tad['end'] in trace[crm]:
        trace[crm][tad['end']] = {
            'from': {
                'crm': crm,
                'start': beg,
                'end': end
            }
        }

    coords = {}
    global HTTP
    if mapping:
        errors = 0
        while errors < 100:
            try:
                coords = remap_segment(crm, beg, end, from_species, **kwargs)
                if isinstance(coords, int):
                    if coords > beg:
                        beg = int(tad['end'] * resolution)
                        end = coords
                    else:
                        beg = int((tad['end'] - 1) * resolution)
                        end = coords
                else:
                    if not 'mapped to' in trace[crm][tad['end']]:
                        if isinstance(coords, dict):
                            trace[crm][tad['end']]['mapped to'] = coords
                        else:
                            trace[crm][tad['end']]['syntenic at'] = {
                                'chr': None,
                                'start': None,
                                'end': None
                            }
                    break
            except Exception as e:
                errors += 1
                # print e.message
                print('\n... reconnecting (mapping)...')
                # print ' ' * ((i%50) + 9 + (i%50)/10),
                sleep(1)
                HTTP = httplib2.Http(".cache")
        else:
            raise Exception('ERROR: not able to remap %s:%s-%s\n' %
                            (crm, beg, end))
    if synteny and isinstance(coords, dict):
        crm, beg, end = coords['chr'], coords['start'], coords['end']
        errors = 0
        while errors < 100:
            try:
                coords = syntenic_segment(crm, beg, end, from_species,
                                          **kwargs)
                if isinstance(coords, int):
                    if coords > beg:
                        beg = int(tad['end'] * resolution)
                        end = coords
                    else:
                        beg = int((tad['end'] - 1) * resolution)
                        end = coords
                else:
                    if not 'syntenic at' in trace[ori_crm][tad['end']]:
                        if isinstance(coords, dict):
                            trace[ori_crm][tad['end']]['syntenic at'] = coords
                        else:
                            trace[ori_crm][tad['end']]['syntenic at'] = {
                                'chr': None,
                                'start': None,
                                'end': None
                            }
                    break
            except Exception as e:
                errors += 1
                # print str(e)
                print('\n... reconnecting (synteny)...')
                if errors == 2 and beg > resolution:
                    print('extending region left')
                    beg -= resolution
                if errors == 4:
                    print('extending region right')
                    beg += resolution
                    end += resolution
                # print ' ' * ((i%50) + 9 + (i%50)/10),
                sleep(1)
                HTTP = httplib2.Http(".cache")
        else:
            raise Exception('ERROR: not able to find synteny %s:%s-%s\n' %
                            (crm, beg, end))
    return coords
Esempio n. 13
0
def gconnect():

    # Validate state token received from Google against the available token
    if request.args.get('state') != login_session['state']:

        # Create and return a 401 error to the user that the tokens are not\
        # the same
        response = make_response(json.dumps('Invalid state token'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # If this request does not have `X-Requested-With` header, this could be\
    # a CSRF
    if not request.headers.get('X-Requested-With'):
        abort(403)

    # Obtain the authorization code received from Google
    code = request.data

    # Try to obtain a credentials object from the authorization code received\
    # from Google
    try:
        # Upgrade the authorization code into a credentials object
        oauth_flow = flow_from_clientsecrets(
            '/var/www/FLASKAPPS/catalogueapp/client_secrets.json', scope='')
        oauth_flow.redirect_uri = 'postmessage'
        credentials = oauth_flow.step2_exchange(code)

    # notify the user that a credentials object could not be obtained from\
    # the authorization code provided by the server
    except FlowExchangeError:
        response = make_response(
            json.dumps('Failed to upgrade the authorization code.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Check that the access token is valid by using it to make a call to Google
    access_token = credentials.access_token

    # Assemble the url, call Google's servers, and save the result
    url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s' %
           access_token)
    h = httplib2.Http()
    result = json.loads(h.request(url, 'GET')[1])

    # Check the resul of the call, if there was an error in the access token\
    # info, abort.
    if result.get('error') is not None:

        # Notify the user of the Google Server error, 500
        response = make_response(json.dumps(result.get('error')), 500)
        response.headers['Content-Type'] = 'application/json'
        return response

    # obtain the user id within the credentials object used to make the\
    # connection request
    google_user_id = credentials.id_token['sub']

    # Verify that the access token is used by the intended user
    if result['user_id'] != google_user_id:

        # notify the user that the user id of the request maker and of the\
        # information received don't match
        response = make_response(
            json.dumps("Token's user ID doesn't match given user ID."), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify that the access token is valid for this app.
    if result['issued_to'] != CLIENT_ID:

        # notify the user that the id of the client does not match the\
        # application's
        response = make_response(
            json.dumps("Token's client ID does not match app's."), 401)
        print "Token's client ID does not match app's."
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify if there's a stored access token and whether the user id is\
    # already stored, this means the user is already logged in
    stored_access_token = login_session.get('access_token')
    stored_google_user_id = login_session.get('google_user_id')
    if stored_access_token is not None and google_user_id ==\
            stored_google_user_id:

        # notify the user that they're already logged in
        response = make_response(
            json.dumps('Current user is already \
            connected.'), 200)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Store the access token in the session for later use.
    login_session['access_token'] = credentials.access_token
    login_session['google_user_id'] = google_user_id

    # Assemble the request, Get the user info data
    userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
    params = {'access_token': credentials.access_token, 'alt': 'json'}
    answer = requests.get(userinfo_url, params=params)
    data = answer.json()

    # Assign the user info data received to the login session
    login_session['username'] = data['name']
    login_session['picture'] = data['picture']
    login_session['email'] = data['email']

    # the return variable containing the final connection/login information\
    # the user will see
    output = ''

    # get the user id associated with email from the database
    user_id = getUserID(login_session['email'])

    # if the user is not in the database
    if not user_id:

        # create a new user and obtain the id
        user_id = createUser(login_session)

        # notify the user that a new account has been created for them
        output += '<h2>A new user account has been created for you with the\
             following information: %s </h2> </br></br></br></br>'                                                                   % \
            getUserInfo(user_id)

    # update/set the user id of the login session
    login_session['user_id'] = user_id

    # print the login info to the user
    output += '<h1>Welcome, '
    output += login_session['username']
    output += '!</h1>'
    output += '<img src="'
    output += login_session['picture']
    output += ' " style = "width: 300px; height: 300px;border-radius: 150px;\
        -webkit-border-radius: 150px;-moz-border-radius: 150px;"> '

    # flash a message to the user in the landing page to confirm the login
    flash("you are now logged in as %s" % login_session['username'])

    # return the html code to the login.html page for rendering
    return output
Esempio n. 14
0
def build_request(http, *args, **kwargs):
    new_http = httplib2.Http()
    return apiclient.http.HttpRequest(new_http, *args, **kwargs)
Esempio n. 15
0
def authn(args):
    credentials = get_credentials(args)
    http = credentials.authorize(httplib2.Http())
    return credentials, http
Esempio n. 16
0
#!/usr/bin/python
import os, sys
import serial
import time
import httplib2
import private

ser = serial.Serial('/dev/ttyACM0', 115200, timeout=5)
h = httplib2.Http(".cache")
ser.write(str.encode("a"))
lines = ser.readline()
length = len(lines.decode())
while length < 10:
    ser.write(str.encode("a"))
    lines = ser.readline()
    length = len(lines.decode())
serialStringO = str(
    time.time()) + "," + lines[:-2].decode() + "," + private.inputPassword
#serialStringL = serialStringO.split(',')
try:
    #serialData = [float(i) for i in serialStringL]
    #(resp_headers, content) = h.request(urllib.parse.quote_plus("http://karben14.com/pi2/index.php?data=" + serialStringO), "GET")
    (resp_headers, content) = h.request(
        "http://karben14.com/pi2/index.php?data=" + serialStringO, "GET")
    #print(serialData)
    print((resp_headers, content))
except:
    print("Float->String error")
def get_geojson(params):
    """
    This function accepts a dictionary of parameters and returns a GeoJSON representation of the requested layer. This
    takes a format similar to the following example:

    {
        "host": "mf2.dit.ie:8080",
        "layer": "cso:ctygeom",
        "srs_code": 29902,
        "properties": ["countyname", ],
        "geom_field": "geom",
        "filter_property": "countyname",
        "filter_values": ["Cork", "Kerry"]
    }

    You can filter the set of features returned by adjusting "filter_values". This is a list of values that must
    be present in "filter_property". In the above example you'd get the counties of Cork and Kerry plus Cork City.
    Similarly, you can filter the properties returned to reduce their number. If you use this feature, you'll need to
    set "geom_field" to the name of the geometry field. Geoserver can give you this.

    All values in the dictionary are optional except "host" and "layer".

    :param Dictionary as above:
    :return: Parsed GeoJSON or exception as appropriate
    """


    import urllib.parse
    from urllib.parse import urlparse
    import httplib2
    import os, os.path
    import json
    import xml.etree.ElementTree as etree

    #
    # Check that the parameters exist and/or sensible. Because the filter can contain some 'odd' characters such as '%'
    # and single quotes the filter text needs to be url encoded so that text like "countyname LIKE '%Cork%'" becomes
    # "countyname%20LIKE%20%27%25Cork%25%27" which is safer for URLs
    #
    if "host" not in params:
        raise ValueError("Value for 'host' required")
    if "layer" not in params:
        raise ValueError("Value for 'layer' required")
    if "srs_code" in params and params["srs_code"]:
        srs_text = "&srsName=epsg:{}".format(params["srs_code"])
    else:
        srs_text = ""
    if "properties" in params and params["properties"]:
        item_string = ""
        for item in params["properties"]:
            item_string += str(item) + ","
        if "geom_field" in params and params["geom_field"]:
            item_string += str(params["geom_field"])
        property_text = "&PROPERTYNAME={}".format(item_string)
    else:
        property_text = ""
    if "filter_property" in params and params["filter_property"] and params["filter_values"]:
        filter_text = "{filter_property} LIKE '%{filter_values}%'".format(filter_property=params["filter_property"], filter_values=params["filter_values"][0])
        for item in range(1, len(params["filter_values"])):
            filter_text += "OR {filter_property} LIKE '%{filter_values}%'".format(filter_property=params["filter_property"], filter_values=params["filter_values"][item])
        filter_text = urllib.parse.quote(filter_text)
        filter_text = "&CQL_FILTER=" + filter_text
    else:
        filter_text = ""

    url = "http://{host}/geoserver/ows?" \
          "service=WFS&version=1.0.0&" \
          "request=GetFeature&" \
          "typeName={layer}&" \
          "outputFormat=json".format(host=params["host"], layer=params["layer"])
    url += srs_text
    url += property_text
    url += filter_text

    #
    # Make a directory to hold downloads so that we don't have to repeatedly download them later, i.e. they already
    # exist so we get them from a local directory. This directory is called .httpcache".
    #
    scriptDir = os.path.dirname(__file__)
    cacheDir = os.path.normpath(os.path.join(scriptDir, ".httpcache"))
    if not os.path.exists(cacheDir):
        os.mkdir(cacheDir)

    #
    # Go to the web and attempt to get the resource
    #
    try:
        h = httplib2.Http()
        print (url)
        response_headers, response = h.request(url)
        response = response.decode()

        print ("Response receieved")

        #
        # Geoserver only sends valid data in the requested format, in our case GeoJSON, so if we get a response back in
        # XML format we know that we have an error. We do minimal parsing on the xml to extract the error text and raise
        # an exception based on it.
        #
        if response[:5] == "<?xml":
            response = etree.fromstring(response)
            xml_error = ""
            for element in response:
                xml_error += element.text
            raise Exception(xml_error)
        else:
            return json.loads(response)

    except httplib2.HttpLib2Error as e:
        print(e)
Esempio n. 18
0
def fbconnect():
    """
        Called when User tries to login through Facebook
        Once the login is successful-
            1. State token is validated
            2. user data is fetched from Facebook
               and store in login_session object
    """

    if request.args.get('state') != login_session['state']:
        response = make_response(json.dumps('Invalid state parameter.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response
    access_token = request.data
    print "access token received %s " % access_token
    app_id = json.loads(open('fb_client_secrets.json',
                             'r').read())['web']['app_id']
    app_secret = json.loads(open('fb_client_secrets.json',
                                 'r').read())['web']['app_secret']
    url = ("https://graph.facebook.com/oauth/access_token?" +
           "grant_type=fb_exchange_token&client_id=%s" % (app_id) +
           "&client_secret=%s&fb_exchange_token=%s" %
           (app_secret, access_token))
    h = httplib2.Http()
    result = h.request(url, 'GET')[1]
    print("result = %s" % result)

    # Use token to get user info from API
    userinfo_url = "https://graph.facebook.com/v2.8/me"
    '''
        Due to the formatting for the result from the server token exchange
        we have to split the token first on commas and select the first index
        which gives us the key : value for the server access token then we
        split it on colons to pull out the actual token value and replace
        the remaining quotes with nothing so that it can be used directly
        in the graph api calls
    '''
    token = result.split(',')[0].split(':')[1].replace('"', '')
    print(token)

    url = ("https://graph.facebook.com/v2.8/me?" +
           "access_token=%s&fields=name,id,email" % token)
    h = httplib2.Http()
    result = h.request(url, 'GET')[1]
    print("url sent for API access:%s" % url)
    print("API JSON result: %s" % result)
    data = json.loads(result)
    login_session['provider'] = 'facebook'
    login_session['username'] = data["name"]
    login_session['email'] = data["email"]
    login_session['facebook_id'] = data["id"]

    # The token must be stored in the login_session in order to properly logout
    login_session['access_token'] = token

    # Get user picture
    url = ("https://graph.facebook.com/v2.8/me/picture?" +
           "access_token=%s&redirect=0&height=200&width=200" % token)
    h = httplib2.Http()
    result = h.request(url, 'GET')[1]
    data = json.loads(result)

    login_session['picture'] = data["data"]["url"]

    # see if user exists
    user_id = getUserID(login_session['email'])
    if not user_id:
        user_id = createUser(login_session)
    login_session['user_id'] = user_id

    output = ''
    output += '<h1>Welcome, '
    output += login_session['username']

    output += '!</h1>'
    output += '<img src="'
    output += login_session['picture']
    output += ' " style = "width: 300px; height: 300px;border-radius: 150px;\
                           -webkit-border-radius: 150px;-moz-border-radius: \
                           150px;"> '

    flash("Now logged in as %s" % login_session['username'])
    return output
def test_https_success():
    http = httplib2.Http()
    resp, content = http.request('https://some_hopefully_nonexistant_domain/', 'GET')
    assert test_wsgi_app.success()
Esempio n. 20
0
def main():
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
Esempio n. 21
0
} WHERE {
  ?person a foaf:Person ;
    foaf:name ?name ;
    ?prop ?value .
}"""
repository = 'test1'
endpoint = "http://localhost:8080/openrdf-sesame/repositories/%s" % repository

print "POSTing SPARQL query to %s" % endpoint
params = {'query': query}
headers = {
    'content-type': 'application/x-www-form-urlencoded',
    'accept': 'text/plain'
}
(response, content) = httplib2.Http().request(endpoint,
                                              'POST',
                                              urllib.urlencode(params),
                                              headers=headers)
print "Response %s" % response.status
#print "N3 %s" % content
graph = rdflib.ConjunctiveGraph()
graph.parse(rdflib.parser.StringInputSource(content), format="nt")

#for triple in graph.triples((None,None,None)):
#    print triple
print "Loaded %d triples" % len(graph)

FOAF = rdflib.Namespace('http://xmlns.com/foaf/0.1/')
RDF = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')


class Person(rdfalchemy.rdfSubject):
Esempio n. 22
0
 def authorize(self, credentials):
     return credentials.authorize(httplib2.Http())
Esempio n. 23
0
def connection(url, user, password, UA, timeout, brutemode):

    username = user
    pwd = password

    http = httplib2.Http(timeout=timeout,
                         disable_ssl_certificate_validation=True)

    # HTTP POST Data
    body = bodyCMS(username, pwd, brutemode)

    # Headers
    headers = headersCMS(UA, body, brutemode)

    try:

        if brutemode == "std":
            response, content = http.request(url,
                                             'POST',
                                             headers=headers,
                                             body=urllib.urlencode(body))

            if str(response.status)[0] == "4" or str(
                    response.status)[0] == "5":
                print('[X] HTTP error, code: ' + str(response.status))
                os._exit(1)

            if responseCMS(response) == "1":
                print('\n')
                print('[!] Password FOUND!!!')
                print('')
                print('[!] Username: '******' Password: '******'POST',
                                             headers=headers,
                                             body=body)

            if str(response.status)[0] == "4" or str(
                    response.status)[0] == "5":
                print('[X] HTTP error, code: ' + str(response.status))
                os._exit(1)

            # Remove all blank and newline chars
            xmlcontent = content.replace(" ", "").replace("\n", "")

            if not "faultCode" in xmlcontent:
                print('\n')
                print('[!] Password FOUND!!!')
                print('')
                print('[!] Username: '******' Password: '******'\n[X] Connection Timeout')
        os._exit(1)
    except socket.error:
        print('\n[X] Connection Refused')
        os._exit(1)
    except httplib.ResponseNotReady:
        print('\n[X] Server Not Responding')
        os._exit(1)
    except httplib2.ServerNotFoundError:
        print('\n[X] Server Not Found')
        os._exit(1)
    except httplib2.HttpLib2Error:
        print('\n[X] Connection Error!!')
        os._exit(1)
Esempio n. 24
0
 def get(self):
     http = httplib2.Http()
     self.response, self.content = http.request("http://ip.jsontest.com/",
                                                "GET")
     self._async_callback(self.response, self.content)
Esempio n. 25
0
    # Create the AdWordsUser and set the OAuth2 credentials.
    client = AdWordsClient(
        headers={
            'developerToken': '%s++USD' % email,
            'clientCustomerId': client_customer_id,
            'userAgent': 'OAuth2 Example',
            'oauth2credentials': credential
        })

    # OAuth2 credentials objects can be reused
    credentials = client.oauth2credentials
    print 'OAuth2 authorization successful!'

    # OAuth2 credential objects can be refreshed via credentials.refresh() - the
    # access token expires after 1 hour.
    credentials.refresh(httplib2.Http())

    # Note: you could simply set the credentials as below and skip the previous
    # steps once access has been granted.
    client.oauth2credentials = credentials

    campaign_service = client.GetCampaignService(version='v201209')

    # Get all campaigns.
    # Construct selector and get all campaigns.
    selector = {'fields': ['Id', 'Name', 'Status']}
    campaigns = campaign_service.Get(selector)[0]

    # Display results.
    if 'entries' in campaigns:
        for campaign in campaigns['entries']:
Esempio n. 26
0
 def setUp(self):
   super(RetriableHttplib2Test, self).setUp()
   self.http = infra_libs.RetriableHttp(httplib2.Http())
   self.http._http.request = mock.create_autospec(self.http._http.request,
                                                  spec_set=True)
Esempio n. 27
0
#!/usr/bin/python

import sys
import httplib2

if len(sys.argv) < 2:
    print sys.argv[0] + ": <url>"
    sys.exit(1)

webclient = httplib2.Http()
header, content = webclient.request(sys.argv[1], "GET")

for field, value in header.items():
    print field + ": " + value
Esempio n. 28
0
 def test_authorize(self):
   http = infra_libs.RetriableHttp(httplib2.Http())
   creds = infra_libs.get_signed_jwt_assertion_credentials(
     'valid_creds.json',
     service_accounts_creds_root=DATA_DIR)
   creds.authorize(http)
Esempio n. 29
0
def gconnect():
    # Validate state token
    if request.args.get('state') != login_session['state']:
        response = make_response(json.dumps('Invalid state parameter.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response
    # Obtain authorization code
    code = request.data

    try:
        # Upgrade the authorization code into a credentials object
        oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
        oauth_flow.redirect_uri = 'postmessage'
        credentials = oauth_flow.step2_exchange(code)
    except FlowExchangeError:
        response = make_response(
            json.dumps('Failed to upgrade the authorization code.'), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Check that the access token is valid.
    access_token = credentials.access_token
    url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
           % access_token)
    h = httplib2.Http()
    result = json.loads(h.request(url, 'GET')[1])
    # If there was an error in the access token info, abort.
    if result.get('error') is not None:
        response = make_response(json.dumps(result.get('error')), 500)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify that the access token is used for the intended user.
    gplus_id = credentials.id_token['sub']
    if result['user_id'] != gplus_id:
        response = make_response(
            json.dumps("Token's user ID doesn't match given user ID."), 401)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Verify that the access token is valid for this app.
    if result['issued_to'] != CLIENT_ID:
        response = make_response(
            json.dumps("Token's client ID does not match app's."), 401)
        print "Token's client ID does not match app's."
        response.headers['Content-Type'] = 'application/json'
        return response

    stored_access_token = login_session.get('access_token')
    stored_gplus_id = login_session.get('gplus_id')
    if stored_access_token is not None and gplus_id == stored_gplus_id:
        response = make_response(json.dumps('Current user is already connected.'),
                                 200)
        response.headers['Content-Type'] = 'application/json'
        return response

    # Store the access token in the session for later use.
    login_session['access_token'] = credentials.access_token
    login_session['gplus_id'] = gplus_id

    # Get user info
    userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
    params = {'access_token': credentials.access_token, 'alt': 'json'}
    answer = requests.get(userinfo_url, params=params)

    data = answer.json()

    login_session['username'] = data['name']
    login_session['picture'] = data['picture']
    login_session['email'] = data['email']
    
     # see if user exists, if it doesn't make a new one
    user_id = getUserID(data["email"])
    if not user_id:
        user_id = createUser(login_session)
    login_session['user_id'] = user_id
    
    output = ''
    output += '<h1>Welcome, '
    output += login_session['username']
    output += '!</h1>'
    output += '<img src="'
    output += login_session['picture']
    output += ' " style = "width: 300px; height: 300px;border-radius: 150px;-webkit-border-radius: 150px;-moz-border-radius: 150px;"> '
    flash("you are now logged in as %s" % login_session['username'])
    print "done!"
    return output
Esempio n. 30
0
    config = ConfigParser.ConfigParser()
    config.read('./thingspeak.config')
    apiKey = config.get('THINGSPEAK', 'write_api_key')
    reportTime = config.getint('THINGSPEAK', 'report_time')
   
    ts_headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"}

    target = urlparse('http://localhost:5000/getAllRawSensorData')
    method = 'GET'
    body = ''
    headers = {
    'Accept': 'application/json',
    'Content-Type': 'application/json; charset=UTF-8'
    }

    h = http.Http()
  except:
    e = sys.exc_info()[0]
    logger.critical(e)
    #print e
    

while (1):
  logger.info('Writing data to thingspeak ')
  try:
    ts_conn    = httplib.HTTPConnection("api.thingspeak.com:80")
    response, content = h.request(target.geturl(), method, body, headers)
    data = json.loads(content)
    
    logger.info(data)
    now = datetime.datetime.utcnow()