Esempio n. 1
0
 def execute_query(self,
                   query=None,
                   ddl=None,
                   bucket='default',
                   password=None,
                   iteration=100):
     try:
         if password is not None:
             result, client = self._sdk_connection(bucket,
                                                   host_ip=self.host_ip,
                                                   password=password)
         else:
             result, client = self._sdk_connection(bucket,
                                                   host_ip=self.host_ip)
         print client
         temp = []
         if ddl is not None:
             create_index_1 = 'create index simple_name_' + bucket + " on " + bucket + "(name)"
             create_index_2 = 'create index simple_type_' + bucket + " on " + bucket + "(type)"
             client.n1ql_query(create_index_1).execute()
             client.n1ql_query(create_index_2).execute()
             return
         if query is None:
             for i in range(0, iteration):
                 if i % 2 == 0:
                     test_query = 'select name from ' + bucket + ' where name is not NULL'
                 else:
                     test_query = 'select type from ' + bucket + ' where type is not NULL'
                 rs = client.n1ql_query(N1QLQuery(test_query))
                 for r in rs:
                     temp.append(r)
                     temp = []
         else:
             for i in range(0, iteration):
                 rs = client.n1ql_query(N1QLQuery(query))
                 for r in rs:
                     temp.append(r)
                     temp = []
             print "Finished Querying"
     except Exception, ex:
         print "Exception from execute query"
         print ex
def join_couchbase():
    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "SELECT a.City FROM Flights_25 f USE KEYS '100' JOIN Airports a ON KEYS f.Dest"
        )
        i += 1
    t2 = time.time()
    # for row in bucket_Flights_25.n1ql_query(query):
    #     print(row)
    print("-Flights_25- join latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_25- join Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "SELECT a.City FROM Flights_50 f USE KEYS '100' JOIN Airports a ON KEYS f.Dest"
        )
        i += 1
    t2 = time.time()
    print("-Flights_50- join latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_50- join Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "SELECT a.City FROM Flights_75 f USE KEYS '100' JOIN Airports a ON KEYS f.Dest"
        )
        i += 1
    t2 = time.time()
    print("-Flights_75- join latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_75- join Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "SELECT a.City FROM Flights f USE KEYS '100' JOIN Airports a ON KEYS f.Dest"
        )
        i += 1
    t2 = time.time()
    print("-Flights- join latency time : ", (t2 - t1) / nb_operations)
    print("-Flights- join Throughput time : ", nb_operations / (t2 - t1))
    async def get_annotation(self, txn, oid, id):
        async for row in self._cb.n1ql_query(
                N1QLQuery(
                    '''
SELECT zoid, tid, state_size, resource, type, state, id, parent_id
FROM `{}`
WHERE
    of = $1 AND id = $2
'''.format(self._bucket), oid, id)):
            row['state'] = base64.b64decode(row['state'])
            return row
Esempio n. 4
0
def get_index_day_kline(index, startdate, enddate):
    cb = Bucket(DBURL)

    cql = 'SELECT * FROM `' + BUCKET_NAME + \
          '` WHERE type="index_kday" and code=$code and date>$startdate and date<$enddate ORDER BY date'
    r = cb.n1ql_query(
        N1QLQuery(cql,
                  code='index_' + index,
                  startdate=startdate,
                  enddate=enddate))
    return map(_['stock'], r)
Esempio n. 5
0
    def next(self, key, doc):
        query = copy.deepcopy(next(self.queries))
        args = query['args'].format(key=key, **doc)

        n1ql_query = N1QLQuery(query['statement'], *eval(args))
        n1ql_query.cross_bucket = True
        n1ql_query.adhoc = False
        n1ql_query.consistency = query.get('scan_consistency',
                                           'not_bounded')

        return n1ql_query
    def getPharmacy(self, attribute_value):

        basequery = 'Select * from `'+self.instance_type + \
            '` where type="cp_pharmacy" and pharmacynpi=$v1'

        query = N1QLQuery(basequery, v1=attribute_value)
        result = None
        for rowresult in self.cb.n1ql_query(query):
            result = rowresult[self.instance_type]

        return result
Esempio n. 7
0
def db_get_last_successful_build(version):
    builds = []
    q = N1QLQuery("SELECT max(build_num) FROM `build-history` WHERE version = '{0}' AND jobType = 'parent_build' AND result = 'passed'".format(version))
    rows = db.n1ql_query(q)
    for row in rows:
        builds.append(row['$1'])
        break
    if builds:
        return builds[0]
    else:
        return 0
Esempio n. 8
0
    def test_n1ql(self):

        default_bucket = self.cb
        yield from (default_bucket.connect() or asyncio.sleep(0.01))

        q = N1QLQuery("SELECT mockrow")
        it = default_bucket.n1ql_query(q)
        yield from it.future

        data = list(it)
        self.assertEqual('value', data[0]['row'])
Esempio n. 9
0
def get_user_by_email(bucket: Bucket, email: str):
    query_str = f"SELECT *, META().id as doc_id FROM {COUCHBASE_BUCKET_NAME} WHERE type = $type AND email = $email;"
    q = N1QLQuery(
        query_str, bucket=COUCHBASE_BUCKET_NAME, type=USERPROFILE_DOC_TYPE, email=email
    )
    q.consistency = CONSISTENCY_REQUEST
    doc_results = bucket.n1ql_query(q)
    users = results_to_model(doc_results, doc_model=UserInDB)
    if not users:
        return None
    return users[0]
Esempio n. 10
0
    def n1ql_query(self, n1ql, consistent=False):
        retry_i = 0
        if not n1ql.strip().endswith(";"):
            n1ql = n1ql + ";"

        n1qlObj = N1QLQuery(n1ql)
        n1qlObj.timeout = 3600

        if self._creds is not None:
            n1qlObj.set_option("creds", self._creds)
            # n1qlObj.set_option("creds", [{"user": "******", "pass": "******"}])
            # n1qlObj.adhoc = False
            # n1qlObj.consistency = 'not_bounded'
            # n1qlObj.cross_bucket = True

        if consistent:
            n1qlObj.consistency = CONSISTENCY_REQUEST

        n1qlObj.adhoc = True

        logging.debug("Executing: " + n1ql)

        while True:
            try:
                result = self._cb.n1ql_query(n1qlObj)

                if not n1ql.upper().lstrip(' \t\n\r').startswith(
                        "SELECT") and 'RETURNING' not in n1ql.upper():
                    result.execute()

                logging.info("--- Query executed successfully ---")
                return result

            except KeyboardInterrupt:
                raise
            # except N1QLError as e:
            #     raise
            except:
                logging.error("Failed query %s" % n1ql)
                logging.exception("--- Failed to execute query. ---")

                if retry_i < len(self.retry_timeouts):
                    logging.debug(
                        "--- Waiting for {0} seconds before trying again ---".
                        format(self.retry_timeouts[retry_i]))
                    time.sleep(self.retry_timeouts[retry_i])
                    logging.debug(
                        "--- Trying one more time (attempt #{0}) ---".format(
                            retry_i))
                else:
                    logging.info("--- Giving up on query ---")
                    logging.info(n1ql)
                    return False
                retry_i += 1
    def getPrescription(self, attribute_value):

        basequery = 'Select * from `'+self.instance_type + \
            '` where type="prescription" and prescription_id=$v1'

        query = N1QLQuery(basequery, v1=attribute_value)
        result = None
        for rowresult in self.cb.n1ql_query(query):
            result = rowresult[self.instance_type]

        return result
    def getNDCDrug(self, attribute_value):

        basequery = 'Select * from `'+self.instance_type + \
            '` where type="ndc_drugs" and ndc=$v1'

        query = N1QLQuery(basequery, v1=attribute_value)
        result = None
        for rowresult in self.cb.n1ql_query(query):
            result = rowresult[self.instance_type]

        return result
Esempio n. 13
0
def N1QLFetchAirports(search, field='airportname'):
    query = "SELECT airportname, city FROM `travel-sample` WHERE LOWER(airportname) LIKE $1"
    simple_query = "SELECT meta().id FROM `travel-sample` WHERE LOWER(airportname) LIKE $1"
    param = "%" + search.lower() + "%"
    try:
        q = N1QLQuery(query, param)
        res = bucket.n1ql_query(q).execute()
    except N1QLError as e:
        q = N1QLQuery(simple_query, param)
        docMetas = bucket.n1ql_query(q)
        ids = [meta['id'] for meta in docMetas]
        res = []
        try:
            res = bucket.get_multi(ids)
        except CBErr.CouchbaseNetworkError as e:
            res = e.all_results
            failed = [k for k, v in res.items() if v.value == None]
            failedReplicas = bucket.get_multi(failed, replica=True)
            # TODO: Check for failed gets here too
            res.update(failedReplicas)
    return res
Esempio n. 14
0
    def _execute_n1ql(self, bucket):
        s = ' '.join(self.statement).replace(BUCKET_PLACEHOLDER, bucket.bucket)

        print 'QUERY:', s
        print 'PARAMS:', self.params.values

        nq = N1QLQuery(s, *self.params.values)
        nq.consistency = CONSISTENCY_REQUEST
        # nq.consistent_with_all(bucket)

        # Bug here, PYCBC-290, if we return the iterator
        return bucket.n1ql_query(nq)
def get_doc_results_by_type(bucket: Bucket, *, doc_type: str, skip=0, limit=100):
    query_str = f"SELECT *, META().id as doc_id FROM {config.COUCHBASE_BUCKET_NAME} WHERE type = $type LIMIT $limit OFFSET $skip;"
    q = N1QLQuery(
        query_str,
        bucket=config.COUCHBASE_BUCKET_NAME,
        type=doc_type,
        limit=limit,
        skip=skip,
    )
    q.consistency = CONSISTENCY_REQUEST
    result = bucket.n1ql_query(q)
    return result
Esempio n. 16
0
def get_liste(liste, user):
    print(user)
    answer = 'Contenu de la liste : ' + liste
    print(answer)
    query = N1QLQuery(
        'SELECT * FROM `Listes` WHERE liste=$filtre1 AND user_id=$filtre2',
        filtre1=liste,
        filtre2=user)
    for row in cb.n1ql_query(query):
        answer += '\n'
        answer += row['Listes']['name']
    bot.sendMessage(user, answer)
    async def get_children(self, txn, parent, keys):
        items = []
        async for row in self._cb.n1ql_query(
                N1QLQuery(
                    '''
SELECT zoid, tid, state_size, resource, type, state, id
FROM `{}`
WHERE parent_id = $1 AND id IN $2
'''.format(self._bucket), parent, keys)):
            row['state'] = base64.b64decode(row['state'])
            items.append(row)
        return items
Esempio n. 18
0
def get_events(request):
    query_string = ("SELECT meta(`events`).id, *  FROM `events`")
    query = N1QLQuery(query_string)
    cb = request.couch.open_bucket('events')
    events = []
    for row in cb.n1ql_query(query):
        id = row['id']
        event = row['events']
        event['id'] = id
        events.append(event)

    return events
def show_couchbase_summary():
    try:
    # get last inserted document by timestamp
        last_inserted_query = N1QLQuery('SELECT * FROM %s ORDER BY timestamp ASC default LIMIT 1;' % COUCHBASE_BUCKET)
        last_document = cb_client.n1ql_query(last_inserted_query).get_single_result()
    except HTTPError:
        last_document = None
  
    # get bucket document number
    stats = get_bucket_stats()
    documents_num = stats['itemCount'] 

    # get strongest relation between terms
    strongest_rel_query = N1QLQuery('SELECT * FROM %s WHERE related_terms' % COUCHBASE_BUCKET)
    strongest_rel_terms = []
    couchbase_summary = {
        "documents_num": documents_num,
        "last_document": last_document,
        "strongest_rel_terms": strongest_rel_terms
    }
    return Response(json.dumps(couchbase_summary))
def write_Flights_couchbase():
    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "INSERT INTO Flights_25 ( KEY, VALUE )VALUES( 'New',{'TaxiIn': 31,'SecurityDelay': 0,'DepTime': 901,'DepDelay': 1,'WeatherDelay': 0,'CRSArrTime': 1750,'id': '1','DayofMonth': 15,'DayOfWeek': 6,'TaxiOut': 22,'Dest': 'JFK','CRSElapsedTime': 350,'ArrDelay': 0,'AirTime': 299,'CarrierDelay': 0,'CRSDepTime': 900,'Diverted': 0,'Distance': 2586,'UniqueCarrier': 'DL','NASDelay': 0,'Cancelled': 0,'TailNum': 'N645DL','Origin': 'SFO','LateAircraftDelay': 0,'Month': 12,'ActualElapsedTime': 349,'Year': 2007,'ArrTime': 1750,'CancellationCode': '} ) RETURNING * ;"
        )
        i += 1
    t2 = time.time()
    print("-Flights_25- Writing latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_25- Writing Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "INSERT INTO Flights_50 ( KEY, VALUE )VALUES( 'New',{'TaxiIn': 31,'SecurityDelay': 0,'DepTime': 901,'DepDelay': 1,'WeatherDelay': 0,'CRSArrTime': 1750,'id': '1','DayofMonth': 15,'DayOfWeek': 6,'TaxiOut': 22,'Dest': 'JFK','CRSElapsedTime': 350,'ArrDelay': 0,'AirTime': 299,'CarrierDelay': 0,'CRSDepTime': 900,'Diverted': 0,'Distance': 2586,'UniqueCarrier': 'DL','NASDelay': 0,'Cancelled': 0,'TailNum': 'N645DL','Origin': 'SFO','LateAircraftDelay': 0,'Month': 12,'ActualElapsedTime': 349,'Year': 2007,'ArrTime': 1750,'CancellationCode': '} ) RETURNING * ;"
        )
        i += 1
    t2 = time.time()
    print("-Flights_50- Writing latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_50- Writing Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "INSERT INTO Flights_75 ( KEY, VALUE )VALUES( 'New',{'TaxiIn': 31,'SecurityDelay': 0,'DepTime': 901,'DepDelay': 1,'WeatherDelay': 0,'CRSArrTime': 1750,'id': '1','DayofMonth': 15,'DayOfWeek': 6,'TaxiOut': 22,'Dest': 'JFK','CRSElapsedTime': 350,'ArrDelay': 0,'AirTime': 299,'CarrierDelay': 0,'CRSDepTime': 900,'Diverted': 0,'Distance': 2586,'UniqueCarrier': 'DL','NASDelay': 0,'Cancelled': 0,'TailNum': 'N645DL','Origin': 'SFO','LateAircraftDelay': 0,'Month': 12,'ActualElapsedTime': 349,'Year': 2007,'ArrTime': 1750,'CancellationCode': '} ) RETURNING * ;"
        )
        i += 1
    t2 = time.time()
    print("-Flights_75- Writing latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_75- Writing Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery(
            "INSERT INTO Flights ( KEY, VALUE )VALUES( 'New',{'TaxiIn': 31,'SecurityDelay': 0,'DepTime': 901,'DepDelay': 1,'WeatherDelay': 0,'CRSArrTime': 1750,'id': '1','DayofMonth': 15,'DayOfWeek': 6,'TaxiOut': 22,'Dest': 'JFK','CRSElapsedTime': 350,'ArrDelay': 0,'AirTime': 299,'CarrierDelay': 0,'CRSDepTime': 900,'Diverted': 0,'Distance': 2586,'UniqueCarrier': 'DL','NASDelay': 0,'Cancelled': 0,'TailNum': 'N645DL','Origin': 'SFO','LateAircraftDelay': 0,'Month': 12,'ActualElapsedTime': 349,'Year': 2007,'ArrTime': 1750,'CancellationCode': '} ) RETURNING * ;"
        )
        i += 1
    t2 = time.time()
    print("-Flights- Writing latency time : ", (t2 - t1) / nb_operations)
    print("-Flights- Writing Throughput time : ", nb_operations / (t2 - t1))
def read_Flights_couchbase():
    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery("SELECT * FROM Flights_25 WHERE Dest = 'JFK'")
        i += 1
    # for row in bucket_Flights_25.n1ql_query(query):
    #     print(row)
    t2 = time.time()
    print("-Flights_25- Reading latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_25- Reading Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery("SELECT * FROM Flights_50 WHERE Dest = 'JFK'")
        i += 1
    # for row in bucket.n1ql_query(query):
    #     print(row)
    t2 = time.time()
    print("-Flights_50- Reading latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_50- Reading Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery("SELECT * FROM Flights_75 WHERE Dest = 'JFK'")
        i += 1
    # for row in bucket.n1ql_query(query):
    #     print(row)
    t2 = time.time()
    print("-Flights_75- Reading latency time : ", (t2 - t1) / nb_operations)
    print("-Flights_75- Reading Throughput time : ", nb_operations / (t2 - t1))

    t1 = time.time()
    for i in (0, nb_operations - 1):
        query = N1QLQuery("SELECT * FROM Flights WHERE Dest = 'JFK'")
        i += 1
    # for row in bucket.n1ql_query(query):
    #     print(row)
    t2 = time.time()
    print("-Flights- Reading latency time : ", (t2 - t1) / nb_operations)
    print("-Flights- Reading Throughput time : ", nb_operations / (t2 - t1))
Esempio n. 22
0
def clean_failedinstsall_vms(poolId):
    print("*** Cleaning failedInstall VMs ***")
    query = 'select * from `QE-server-pool` where ("%s" in poolId or poolId="%s") and state like ' \
			'"failedInstall%%";' % (
    str(poolId), str(poolId))
    print('Running: %s' % query)
    row_iter = cb.n1ql_query(N1QLQuery(query))
    fixed_ips = []
    index = 1
    for row in row_iter:
        try:
            print('********************************')
            ipcount = len(row['QE-server-pool'])
            server = row['QE-server-pool']['ipaddr']
            print('Server#%d: %s' % (index, server))
            index = index + 1
            ssh = paramiko.SSHClient()
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            ssh.connect(server, username=username, password=password, timeout=10)
            uninstall_clean_cb(ssh, server)
            cmds = 'rpm -qa | grep couchbase-server'
            out_2 = ssh_exec_cmd(ssh, server, cmds)
            cmds = 'yum -y install yum-utils'
            ssh_exec_cmd(ssh, server, cmds)
            cmds = 'yum-complete-transaction --cleanup-only; yum clean all'
            ssh_exec_cmd(ssh, server, cmds)
            cmds = 'iptables -F'
            ssh_exec_cmd(ssh, server, cmds)
            # cmds = 'mkdir /var/lib/rpm/backup | cp -a /var/lib/rpm/__db* /var/lib/rpm/backup/ |
			# rm
            # -f /var/lib/rpm/__db.[0-9][0-9]* | rpm --quiet -qa | rpm --rebuilddb | yum clean all'
            # ssh_exec_cmd(ssh,server,cmds)
            if out_2 == '':
                fixed_ips.append(server)
                setpoolstate(poolId, server, "failedInstall",
                             "available")  # ssh_stdin, ssh_stdout,   # ssh_stderr =
				# ssh.exec_command('reboot')

        except Exception as e:
            print('Connection Failed: %s' % server)
            print(e)
            pass
        ssh.close()
        print('********************************\n\n\n')

    if (len(fixed_ips) > 0):
        print("*** Fixed IPs list: ***")
        for ip in fixed_ips:
            # setpoolstate(poolId,ip,"failedInstall","available")
            print(ip)
    else:
        print("*** Fixed IPs: None ***")
    async def initialize(self, loop=None):
        from acouchbase.bucket import Bucket

        self._cb = Bucket(os.path.join(self._dsn, self._bucket),
                          username=self._username,
                          password=self._password)
        await self._cb.connect()

        installed_indexes = []
        primary_installed = False
        async for row in self._cb.n1ql_query(
                N1QLQuery('select * from system:indexes')):
            if row['indexes']['namespace_id'] != self._bucket:
                continue
            if row['indexes'].get('is_primary'):
                primary_installed = True
            else:
                installed_indexes.append(
                    row['indexes']['index_key'][0].strip('`'))

        if len(installed_indexes) == 0:
            logger.info('Initializing bucket, can take some time')

        if not primary_installed:
            logger.warning('Creating primary index')
            async for row in self._cb.n1ql_query(  # noqa
                    'CREATE PRIMARY INDEX ON {bucket}'.format(
                        bucket=self._bucket)):
                pass

        for field in self._indexes_fields:
            if field in installed_indexes:
                continue
            statement = self._create_statement.format(bucket=self._bucket,
                                                      index_name=field,
                                                      field_name=field)
            logger.warning('Creating index {}'.format(statement))
            async for row in self._cb.n1ql_query(  # noqa
                    statement.format(bucket=self._bucket)):
                pass

        for field in get_index_fields():
            if 'json.{}'.format(field) in installed_indexes:
                continue
            statement = self._create_statement.format(bucket=self._bucket,
                                                      field='json.' + field,
                                                      index_name='json_' +
                                                      field)
            logger.warning('Creating index {}'.format(statement))
            async for row in self._cb.n1ql_query(  # noqa
                    statement.format(bucket=self._bucket)):
                pass
    def getClaimTransaction(self, attribute_value):

        basequery = 'Select * from `'+self.instance_type + \
            '` where type="claim" and auth_id=$v1 and sequenceNumber=$v2'

        query = N1QLQuery(basequery,
                          v1=attribute_value[0],
                          v2=attribute_value[1])
        result = None
        for rowresult in self.cb.n1ql_query(query):
            result = rowresult[self.instance_type]

        return result
Esempio n. 25
0
def query_city(bkt, city):
    query = N1QLQuery(
        'SELECT airportname FROM `travel-sample` '
        'WHERE city=$1 AND type="airport"', city)

    # Uncomment the following line to make the query optimized for
    # repeated invocations.
    # The query string is compiled, and the the compiled form is
    # stored in the client (as a dictionary value to the query string
    # itself).
    #
    # q.adhoc = False
    return bkt.n1ql_query(query)
Esempio n. 26
0
def get_jsons_from_type(type):
	query = N1QLQuery('SELECT * FROM `dashboard` WHERE type=$filtre1', filtre1=type)
	json_liste = cb.n1ql_query(query)
	
	for row in json_liste: 
		cb_status = row["dashboard"]["etat"]
		jeedom_status = get_status(row["dashboard"]["id_etat"])

		if cb_status != jeedom_status:
			id = row["dashboard"]["id_etat"]
			key = 'key::' + type + '::' + str(id)
			upsert_doc_from_key(key,jeedom_status)
			logger.info('id : %s , cb_status : %s => %s' % (id, cb_status, jeedom_status))
Esempio n. 27
0
    def findall(self, fromloc, toloc):
        """
        Return flights information, cost and more for a given flight time
        and date
        """

        queryleave = convdate(request.args['leave'])
        queryprep = "SELECT faa as fromAirport,geo FROM `travel-sample` \
                    WHERE airportname = $1 \
                    UNION SELECT faa as toAirport,geo FROM `travel-sample` \
                    WHERE airportname = $2"

        res = db.n1ql_query(N1QLQuery(queryprep, fromloc, toloc))
        flightpathlist = [x for x in res]

        # Extract the 'toAirport' and 'fromAirport' values.
        queryto = next(x['toAirport'] for x in flightpathlist
                       if 'toAirport' in x)
        queryfrom = next(x['fromAirport'] for x in flightpathlist
                         if 'fromAirport' in x)

        queryroutes = "SELECT a.name, s.flight, s.utc, r.sourceairport, r.destinationairport, r.equipment \
                        FROM `travel-sample` AS r \
                        UNNEST r.schedule AS s \
                        JOIN `travel-sample` AS a ON KEYS r.airlineid \
                        WHERE r.sourceairport = $1 AND r.destinationairport = $2 AND s.day = $3 \
                        ORDER BY a.name ASC;"

        # http://localhost:5000/api/flightpaths/Nome/Teller%20Airport?leave=01/01/2016
        # should produce query with OME, TLA faa codes
        resroutes = db.n1ql_query(
            N1QLQuery(queryroutes, queryto, queryfrom, queryleave))
        routelist = []
        for x in resroutes:
            x['flighttime'] = math.ceil(random() * 8000)
            x['price'] = math.ceil(x['flighttime'] / 8 * 100) / 100
            routelist.append(x)
        response = make_response(jsonify({"data": routelist}))
        return response
Esempio n. 28
0
def db_get_builds_by_number(version, buildNum):
    #
    # Get all jobs in a particular build
    #
    rows = []
    buildList = []

    query = "SELECT * FROM `build-history` WHERE version='{0}' AND build_num={1} ORDER BY distro".format(version, buildNum)
    q = N1QLQuery(query)
    rows = db.n1ql_query(q)
    for row in rows:
        buildList.append(row['build-history'])
    return buildList
Esempio n. 29
0
    def test_timeout(self):
        q = N1QLQuery('SELECT foo')
        q.timeout = 3.75
        self.assertEqual('3.75s', q._body['timeout'])
        self.assertEqual(3.75, q.timeout)

        def setfn():
            q.timeout = "blah"

        self.assertRaises(ValueError, setfn)

        # Unset the timeout
        q.timeout = 0
        self.assertFalse('timeout' in q._body)
Esempio n. 30
0
    def next(self, key, doc):
        statement, args, scan_consistency = next(self.queries)
        if 'key' in args:
            args = [key]
        else:
            args = args.format(**doc)
            args = eval(args)

        n1ql_query = N1QLQuery(statement, *args)
        n1ql_query.cross_bucket = True
        n1ql_query.adhoc = False
        n1ql_query.consistency = scan_consistency or 'not_bounded'

        return n1ql_query