def main(): args = docopt(__doc__) sql_str = args['SQL'] or sys.stdin.read() debug = args['--debug'] execute = args['--execute'] random_seed = args['--random-seed'] if debug: logging.basicConfig(level=logging.DEBUG) parsed = parse(sql_str) relations, conditions = get_relations_and_conditions(parsed) sample_size = parsed.sample_size if parsed.sample_size != '' else None query = Query( relations, conditions=conditions, columns=parsed.column_definitions, sample_size=sample_size, random_seed=random_seed, is_top_level=True ) result = query.execute() result_str = result.get_cmd_str(output_column_names=True) if execute: # explicitly use bash instead of the default for subprocess(..., shell=True) which is sh result_str = "({})".format(result_str) proc = subprocess.Popen(['/bin/bash', '-c', result_str]) else: result_str = result_str + "\n" print(result_str, end="")
def load(self): query = Query(self._connection) query.SELECT("gid", self.name) query.SELECT("the_geom", self.name, "AsText") query.SELECT(self._primary, self.name) for key, gen in self._fields: query.SELECT(key, self.name) whereList = [] try: for entry in self._subset: item = self.name + "." + self._primary + "='" + entry + "'" whereList.append(item) query.where = " OR ".join(whereList) except TypeError: pass polyDict = PolygonDictionary() pairs = [] for entry in query: d = [] for key, gen in self._fields: d.append((gen, entry[key])) data = Dictionary() data.update(d) p = GeneralizedPolygon(enum.ABSTRACT, entry["gid"], entry["the_geom"], data, self) polyKey = keygen(entry[self._primary]) pairs.append((polyKey, p)) polyDict.update(pairs) return polyDict
def run(port, name, config): STUB_RESPONSE = '0.0.0.0' try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(('',port)) except Exception as e: print 'failed to create socket %s' % e sys.exit(1) print 'Running dns server' try: while 1: data, addr = s.recvfrom(1024) p=Query(data) # if host is our cdn target if name in p.domain: response = handle_response(addr[0], config) else: response = p.question(p.domain) if not response: # couldn't find host response = STUB_RESPONSE s.sendto(p.answer(response), addr) print '%s -> %s' % (p.domain, response) except KeyboardInterrupt: print 'Keyboard Interrupt' s.close()
def _delete(self): Query.execute( 'DELETE FROM %s WHERE id=%%s' % self.__table__ , self.id, self.__db__ ) return True
def test_english_highlighted_noop(self): phrases = [ Phrase('phrase type', { 'english' : 'foo' }, []), Phrase('phrase type', { 'english' : 'bar' }, []), Phrase('phrase type', { 'english' : 'baz' }, []) ] query = Query('the_table', phrases) query.set_base_atoms([Atom('atom type', 'ba')]) self.assertEqual(query.get_english_highlighted(), 'Foo bar baz?')
def growth(playerId): sql = Query() queries = sql.quering_select("select Round(AVG(AVG1),3) from HitterGames where playerId="+str(playerId)+" Group by year,month") points = getPointArray(tupleToList(queries)) queries = tupleToList(sql.quering_select("select year,month from HitterGames where playerId="+str(playerId)+" Group by year,month")) return {"date":queries, "points":points}
def __init__(self, dbconn, table): Query.__init__(self, dbconn) self._where = None self._target = None self.target(table)
def random_tweet(): tweet = TwitterAPI.get_random_tweet() tweet_id = tweet['id'] # Store the random tweet with open('tweets/%d.json' % tweet_id, 'w') as f: f.write(json.dumps(tweet)) # Get our additional information query = Query(tweet['text'] + " " + tweet['user']['name']) query.run() article = query.get_result_article() article_file_name = query.get_result_filename() # Replace all the links in the article if article is not None: article = article.replace('/wiki/', 'https://en.wikipedia.org/wiki/') if article is not None: markup = TwitterAPI.get_tweet_markup(tweet_id) parameters = { 'article_html': article, 'article_filename': article_file_name, 'tweet_html': markup, 'ratings': [1, 2, 3, 4, 5], 'tweet_id': tweet_id, } return render_template('tweet.html', **parameters) else: # We need to find another tweet return random_tweet()
def textBasedUi ( ): """ Offers 6 options to user. Handles input errors. """ userChoice = 0 query = Query( pIndex ) userInput = "" cosines = 0 #Value for list of cosine values for page ranking while userInput != 'quit': print("Enter a query, type (quit) to exit") userInput = getWord() userInput = ' '.join(userInput) #convert list of normalized words from user input into single string if queryWeight == 'nnn': print("Weighting Query: NNN") qWeights = queryNNN(userInput) cosines = scoring(qWeights) else: print("Weighting Query: LTC") qWeights = queryLTC(userInput) cosines = scoring(qWeights) if userInput != 'quit': displayResults(query.stringQuery(userInput), cosines) print("QUITTING") '''
def flash(sqlsession, token, device): error = False errors = [] token = None device = None if type(token) == int: token = sqlsession.query(models.Token).filter_by(id=token).first() if type(device) == int: device = sqlsession.query(models.Device).filter_by(id=device).first() if device: if token: try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((config.api_host, config.api_port)) temporary_token = helpers.generate_token() q = Query() q.create_register_webui(config.webui_token, temporary_token) s.send(q.to_command()) q.create_flash(temporary_token, token.value, device.pubkey) s.send(q.to_command()) q.create_unregister(temporary_token) s.send(q.to_command()) s.close() except Exception: error = True errors.append(('flash', 'Connection to device failed.')) else: error = True errors.append(('token', 'Token does not exist.')) else: error = True errors.append(('device', 'Device does not exist.')) return error, errors
def find(cls, limit=1000, where=None, order="", operator_where=Query.AND): where = where or {} q = Query(cls).select('*') deleted_keys = [] for key in where: values = where[key] # example of usages: # .find(Person, where={'code_language': (Query.IN, ['ruby', 'python'])) if isinstance(values, tuple): val = values[1] if isinstance(values[1], list): val = tuple(values[1]) q = q.where({key: val}, values[0]) deleted_keys.append(key) for key in deleted_keys: del where[key] q.where(where, operator_where=operator_where).limit(limit).order(order) result = query(q) if cls.__name__ in result: return result[cls.__name__] return []
def requestAvatarId(self, cred): # pylint: disable=R0201 """get user id from database""" args = cred.username.split(SERVERMARK) if len(args) > 1: if args[0] == 'adduser': cred.username = args[1] password = args[2] with Transaction(): query = Query('insert into player(name,password) values(?,?)', list([cred.username.decode('utf-8'), password.decode('utf-8')])) if not query.success: if query.msg.startswith('ERROR: constraint failed') \ or 'not unique' in query.msg: template = m18nE('User %1 already exists') logInfo(m18n(template, cred.username)) query.msg = srvMessage(template, cred.username) else: logInfo(query.msg) return fail(credError.UnauthorizedLogin(query.msg)) elif args[1] == 'deluser': pass query = Query('select id, password from player where name=?', list([cred.username.decode('utf-8')])) if not len(query.records): template = 'Wrong username: %1' logInfo(m18n(template, cred.username)) return fail(credError.UnauthorizedLogin(srvMessage(template, cred.username))) userid, password = query.records[0] # checkPassword uses md5 which cannot handle unicode strings (python 2.7) defer1 = maybeDeferred(cred.checkPassword, password.encode('utf-8')) defer1.addCallback(DBPasswordChecker._checkedPassword, userid) return defer1
def select(*columns): cursor = Model.conn.cursor() query = Query(cursor) if columns: query.select(*columns) return query
def simmilar(playerId): sql = Query() p = tupleToList(sql.quering_select("Select AVG from HitterProfiles where playerId="+str(playerId))) players_avg = tupleToList(sql.quering_select("Select playerId from HitterProfiles where (AVG-"+str(p)+")>=0 and playerId!="+str(playerId)+" ORDER BY (AVG-"+str(p)+")")) result = [] result.append(players_avg[0]) result.append(players_avg[1]) result.append(players_avg[2]) return result
def test_where_does_not_mutate_query(self): with mock.patch("query.Repo") as Repo: q = Query(TunaCasserole).where(my_attr=5) q.where(my_attr=3) list(q) repo = Repo.return_value repo.where.assert_called_with([], my_attr=5) repo.where.return_value.select.assert_called_with( "id", "created_at", "updated_at", "my_attr")
def reserveGameId(self, gameid): """the game server proposes a new game id. We check if it is available in our local data base - we want to use the same gameid everywhere""" with Internal.db: query = Query('insert into game(id,seed) values(?,?)', (gameid, self.connection.url), mayFail=True, failSilent=True) if query.rowcount() != 1: return Message.NO return Message.OK
def exist(cls, where=None, uuid=None): where = where or {} if uuid: where['uuid'] = uuid q = Query(cls).select(["count(*)"]).where(where).limit(1) sql, where = q.query() connection.my_connection().execute(sql, where) results = connection.my_connection().fetchall() return results[0][0] > 0
def test_validation(self): atom_a = Atom('a', 'a') atom_b = Atom('b', 'b') atoms = [atom_a, atom_b] phrase_aa = Phrase('phrase aa', {}, [atom_a, atom_a]) phrase_ab = Phrase('phrase bb', {}, [atom_a, atom_b]) query_good = Query('query good', [phrase_ab]) query_bad = Query('query bad', [phrase_aa]) self.assertTrue(query_good.validate(atoms)) self.assertFalse(query_bad.validate(atoms))
def test_ordering_does_not_mutate(self): with mock.patch("query.Repo") as Repo: q = Query(TunaCasserole) q.order_by(id="asc") list(q.order_by(id="desc")) repo = Repo.return_value repo.order_by.assert_called_with(id="desc") order = repo.order_by.return_value order.select.assert_called_with("id", "created_at", "updated_at", "my_attr")
def get(self, filename): """ Return an object for the given filename. """ filename = os.path.realpath(filename) if not os.path.exists(filename): raise OSError('no such file or directory %s' % filename) q = Query(self, filename=filename) yield kaa.inprogress(q) yield q.get()
def process_query(session, toklist, result): """ Check whether the parse tree is describes a query, and if so, execute the query, store the query answer in the result dictionary and return True """ q = Query(session) if not q.parse(toklist, result): # if Settings.DEBUG: # print("Unable to parse query, error {0}".format(q.error())) result["error"] = q.error() return False if not q.execute(): # This is a query, but its execution failed for some reason: return the error # if Settings.DEBUG: # print("Unable to execute query, error {0}".format(q.error())) result["error"] = q.error() return True # Successful query: return the answer in response result["response"] = q.answer() # ...and the query type, as a string ('Person', 'Entity', 'Title' etc.) result["qtype"] = qt = q.qtype() result["key"] = q.key() if qt == "Person": # For a person query, add an image (if available) img = get_image_url(q.key(), enclosing_session=session) if img is not None: result["image"] = dict( src=img.src, width=img.width, height=img.height, link=img.link, origin=img.origin, name=img.name, ) return True
def __init__(self, dbconn, table): Query.__init__(self, dbconn) self._target = None self._cols = None self._values = None self._values_cache = None self._options = {} self._opt_allow = ['ignore'] self.target(table)
def test_sql(self): phrases = [ Phrase('actor', { 'dims' : 'athlete' }, []), Phrase('actor', { 'dims' : 'country' }, []), Phrase('medals', { 'stats' : 'sum(gold_medals)' }, []), Phrase('medals', { 'stats' : 'sum(silver_medals)', 'limit' : '500' }, []), Phrase('most', { 'orderbys' : 'sum(gold_medals) desc', 'limit' : '300' }, []), Phrase('when', { 'conditions' : 'year = 2008' }, []), Phrase('what_sport', { 'conditions' : "sport = 'Swimming'" }, []) ] query = Query('medal_table', phrases) sql = "SELECT athlete, country, sum(gold_medals), sum(silver_medals) FROM medal_table WHERE year = 2008 AND sport = 'Swimming' GROUP BY athlete, country ORDER BY sum(gold_medals) desc LIMIT 300" self.assertEqual(query.get_sql(), sql)
def search(self, search_terms, page_size=25): """Perform a search and get back a results object """ url = self.description.get_best_template() query = Query(url) # set up initial values query.searchTerms = search_terms query.count = page_size # run the results return Results(query, agent=self.agent)
def test_to_and_from_dict(self): x = Query("a", 10, True, "blahblah", ["bar"]) x.debug = True y = x.as_dict() z = Query.from_dict(y) self.assertEquals(x.text, z.text) self.assertEquals(x.max_hits, z.max_hits) self.assertEquals(x.exact_match, z.exact_match) self.assertEquals(x.current_filename, z.current_filename) self.assertEquals(x.open_filenames, z.open_filenames) self.assertEquals(x.debug, z.debug)
class DatabasePolygonMap(PolygonMap): def __init__(self, mapSource, connection, subset): PolygonMap.__init__(self, mapSource, subset) self._connection = connection self._matchQuery = None def load(self): query = Query(self._connection) query.SELECT("gid", self.name) query.SELECT("the_geom", self.name, "AsText") query.SELECT(self._primary, self.name) for key, gen in self._fields: query.SELECT(key, self.name) whereList = [] try: for entry in self._subset: item = self.name + "." + self._primary + "='" + entry + "'" whereList.append(item) query.where = " OR ".join(whereList) except TypeError: pass polyDict = PolygonDictionary() pairs = [] for entry in query: d = [] for key, gen in self._fields: d.append((gen, entry[key])) data = Dictionary() data.update(d) p = GeneralizedPolygon(enum.ABSTRACT, entry["gid"], entry["the_geom"], data, self) polyKey = keygen(entry[self._primary]) pairs.append((polyKey, p)) polyDict.update(pairs) return polyDict def loadMatchingPolygonData(self, mapName, fieldID): if not self._matchQuery: self._matchQuery = Query(self._connection) self._matchQuery.FROM("%(mapName)s") for key, gen in self._fields: self._matchQuery.SELECT(key, self.name) self._matchQuery.where = "%(mapName)s.gid=%(fieldID)s AND ST_Equals" self._matchQuery.where += "(%(mapName)s.the_geom," + self.name self._matchQuery.where += ".the_geom);" self._matchQuery.SELECT self._matchQuery.setVariable("mapName", mapName) self._matchQuery.setVariable("fieldID", fieldID) data = [] for entry in self._matchQuery: for key, gen in self._fields: data.append((gen, entry[key])) return data
def run(self): while self.running: if time.time() - self.last_ping > config.ping_interval + config.pong_interval: self.logger.info('Got no PING from %s in time. Closing connection' % self.other) return self.shutdown() elif time.time() - self.last_ping > config.ping_interval and not self.pinged: query = Query() query.create_ping(config.server_token) self.connection.sendall(query.to_command()) self.pinged = True self.logger.info('Sent PING to %s.' % self.other) try: self.update() data = self.connection.recv(1024) self.data += data if len(data) == 0: return self.shutdown() data_stack = self.data.split(b'\r\n') self.data = data_stack.pop() for data in [x.decode('utf-8', errors='replace') for x in data_stack]: self.update_queues(data) except socket.timeout as e: if e.args[0] == 'timed out': continue else: self.logger.exception('Shutting down connection thread (%s, %d) due to caught exception:' % (self.address[0], self.address[1])) self.logger.exception(e) return self.shutdown() except OSError as e: if e.args[0] == 9: self.logger.info('Connection lost.') return self.shutdown() else: raise e except socket.error as e: self.logger.exception('Shutting down connection thread (%s, %d) due to caught exception:' % (self.address[0], self.address[1])) self.logger.exception(e) return self.shutdown() except Exception as e: if len(e.args[0]) == 0: self.logger.exception('3Shutting down connection thread (%s, %d) due to caught exception:' % (self.address[0], self.address[1])) self.logger.exception(e) return self.shutdown() return
def test_instantiate_query(self): q = Query( get_data=lambda: 'hej', query_name='q1', key_column='col1', mapping={'col1': 'not analyzed'}, non_data_fields=[]) self.assertEquals(q.key_column, 'col1') self.assertEquals(q.query_name, 'q1') self.assertEquals(q.mapping, {'col1': 'not analyzed'}) self.assertEquals(q.non_data_fields, []) self.assertEquals(q.get_data(), 'hej')
def __init__(self, dbconn, *tables): Query.__init__(self, dbconn) self._options = None self._cols = None self._target = None self._where = None self._join = None self._groupby = None self._orderby = None self._limit = None self.target(*tables)
def init_prog(self): #Intanciar class logs logs = Logs() #Intanciar class notificaciones notificaciones = Notificaciones() #Intanciar class query sql_q = Query() #Inicio del programa logs.escribe_log('INICIO programa') #Obteniendo documentos a dec3 logs.escribe_log('Obteniendo documentos dec3') sql_q.get_doctos_dec() logs.escribe_log('FIN programa')
def test_displays_as_query_with_records(self, Repo): Repo.return_value.select.return_value.fetchall.return_value = [ (1, 7, datetime.datetime(2016, 1, 1)) ] # Recall that TunaCasserole overrides #from_dict to return # 'mytestvalue' so that is what it will repr as self.assertEqual( repr(Query(TunaCasserole)), "<lazy_record.Query [{'created_at': 7, 'id': 1, " "'updated_at': datetime.datetime(2016, 1, 1, 0, 0)}]>")
def test_query_with_total(self): num_participants = 5 query = Query([], None, 10, None, include_total=True) results = self.dao.query(query) self.assertEqual(results.total, 0) for i in range(num_participants): participant = Participant(participantId=i, biobankId=i) self._insert(participant) results = self.dao.query(query) self.assertEqual(results.total, num_participants)
def _init_class(cls): from Properties import Property cls.kind = cls.__name__ cls.kinds[cls.kind] = cls cls.hybrid_model = type(cls.kind, (HybridModel, ), {}) cls._owners = cls._link_owners() cls.all = Query() cls._properties = ModelAttribute.connect(cls, kind=Property) cls._queries = ModelAttribute.connect(cls, kind=Query) cls._schema = ModelSchema(cls, cls._properties, cls._queries)
def getLastQuoteDate(self, ticker, symbol_table_name, price_table_name, order): start = time.time() query = Query(self.connection).getLastQuote(ticker, symbol_table_name, price_table_name, order) # print(query) result = psql.read_sql(query, con=self.connection) end = time.time() print("Fetching from DB completed in", end - start, "seconds") return result['price_date']
def loadBatchQueries(self, config): refWindowSize = config["query"]["refWindowSize"] minSup = config["query"]["minSup"] queries = [] queryList = config["query"]["querypoints"] for i in range(0, len(queryList), 2): startTS = queryList[i] endTS = queryList[i + 1] queries.append(Query(startTS, endTS, refWindowSize, minSup)) return queries
def __init__(self, token, owner, name, team): self._gh = RemoteRepo(token, owner=owner, name=name, team=team, max_page_size=60, min_page_size=7) self._token = token self.default_branch_name = self._gh.default_branch self.ssh_url = self._gh.ssh_url
def get_cve_generator(config): cve_feed_gen = CVEParser(config) strip_spaces = config.get('strip_spaces') pattern_file = config.get('pattern_file') with open(pattern_file) as f: requirements_contents = re.split('\r?\n', f.read()) for requirement in requirements_contents: if not requirement or len(requirement.strip()) == 0: continue if len(requirement) > 1: cve_feed_gen.add_desired_query( Query(requirement, strip_padding=strip_spaces)) else: cve_feed_gen.add_desired_query( Query(requirement, strip_padding=strip_spaces)) return cve_feed_gen
def _update(self): query = [ 'UPDATE %s SET ' % self.__table__, ','.join(['`%s`=%%s' % f for f in self._updated]), ' WHERE id=%s ' ] values = [getattr(self, f) for f in self._updated] values.append(self.id) cursor = Query.execute(' '.join(query), values, self.__db__)
def update(self, **kwargs): self._check_deleted() if self._changed or kwargs: map(self._check_column_in_table, kwargs.keys()) self._sql.add_update_kwargs(kwargs) Query().execute(**self._sql.build_update()) self._changed = False self._set_sql_builder() return self else: raise PyPgException('No data to update for this row.')
def test_empty(self): query = Query("") self.assertQueryMatches(query, self.dummyQuery( ast=( 'match_and', ('match_tag_exists', 'target_type'), ('match_tag_exists', 'unit') ), target_modifiers=[Query.derive_counters], patterns=['target_type=', 'unit='] ))
def test_first_gets_first_few_records(self, Repo): records = Query(TunaCasserole).where(my_attr=5).where(id=7).first(5) repo = Repo.return_value repo.where.assert_called_with([], my_attr=5, id=7) where = repo.where.return_value where.limit.assert_called_with(5) limit = where.limit.return_value limit.select.assert_called_with("id", "created_at", "updated_at", "my_attr") select = limit.select.return_value select.fetchall.assert_called_once_with()
def availableRulesets(): """returns all rulesets defined in the database plus all predefined rulesets""" templateIds = ( x[0] for x in Query("SELECT id FROM ruleset WHERE id<0").records) result = [Ruleset(x) for x in templateIds] for predefined in PredefinedRuleset.rulesets(): if predefined not in result or predefined.name not in [ x.name for x in result ]: result.append(predefined) return result
def query(): try: request_data = request.get_json() print(request_data['ex_code']) ex_code = request_data['ex_code'] ex_type = request_data['ex_type'] q = Query('https://www.kuaidi100.com', ex_type, ex_code) kuaidi_data = q.query_express() data_dict = json.loads(kuaidi_data) new_dict = { 'errmsg': data_dict['message'], 'status': data_dict['status'], 'ex_code': ex_code, 'ex_type': ex_type, 'desc': data_dict['data'] } response = make_response(json.dumps(new_dict, ensure_ascii=False)) return response except Exception as e: return json.dumps({'errmsg': str(e), 'status': 500, 'desc': ''})
def newId(minus=False): """returns an unused ruleset id. This is not multi user safe.""" func = 'min(id)-1' if minus else 'max(id)+1' result = -1 if minus else 1 records = Query("select %s from ruleset" % func).records if records and records[0] and records[0][0]: try: result = int(records[0][0]) except ValueError: pass return result
def saveStartTime(self): """save starttime for this game""" starttime = datetime.datetime.now().replace(microsecond=0).isoformat() args = list( [starttime, self.seed, int(self.autoPlay), self.ruleset.rulesetId]) args.extend([p.nameid for p in self.players]) args.append(self.gameid) Query( "update game set starttime=?,seed=?,autoplay=?," "ruleset=?,p0=?,p1=?,p2=?,p3=? where id=?", tuple(args))
def __init__(self): self.q_template_dict = { 0: self.get_movie_rating, 1: self.get_movie_releasedate, 2: self.get_movie_type, 3: self.get_movie_introduction, 4: self.get_movie_actor_list, 5: self.get_actor_info, 6: self.get_actor_act_type_movie, 7: self.get_actor_act_movie_list, 8: self.get_movie_rating_bigger, 9: self.get_movie_rating_smaller, 10: self.get_actor_movie_type, 11: self.get_cooperation_movie_list, 12: self.get_actor_movie_num, 13: self.get_actor_birthday } # 连接数据库 self.graph = Query()
def __init__(self): self.q_template_dict = { 0: self.get_bp, 1: self.get_numberofchild, 2: self.get_sibling, 3: self.get_birthname, 4: self.get_father, 5: self.get_birthdate, 6: self.get_haircolor, 7: self.get_author, 8: self.get_spouse, 9: self.get_movie_rating_smaller, 10: self.get_actor_movie_type, 11: self.get_cooperation_movie_list, 12: self.get_actor_movie_num, 13: self.get_actor_birthday } # 连接数据库connect to database self.graph = Query()
def tests_inclusion(self, Repo): repo = Repo.return_value fetchall_return = [(15, 2, 33)] fetchall = mock.Mock(return_value=fetchall_return) select_mock = mock.Mock(fetchall=fetchall) repo.select.return_value = mock.Mock(fetchall=fetchall) self.assertIn({ 'created_at': 2, 'id': 15, 'updated_at': 33 }, Query(TunaCasserole).all())
def test_constructs_object_with_information(self, Repo): repo = Repo.return_value fetchall_return = [(2, 33, 15)] fetchall = mock.Mock(return_value=fetchall_return) select_mock = mock.Mock(fetchall=fetchall) repo.select.return_value = mock.Mock(fetchall=fetchall) self.assertEqual( list(Query(TunaCasserole).all())[0], { "id": 2, "updated_at": 15, "created_at": 33 })
def cache_stop_words(self): cached_words = {} stop_words = set(stopwords.words('english')) # For every index, cache the stop words for line in self.final_index: index = Query.fast_eval(line) if index[0] in stop_words: cached_words[index[0]] = index[1] return cached_words
def __updateServerInfoInDatabase(self): """we are online. Update table server.""" lasttime = datetime.datetime.now().replace(microsecond=0).isoformat() with Internal.db: serverKnown = Query( 'update server set lastname=?,lasttime=? where url=?', (self.username, lasttime, self.url)).rowcount() == 1 if not serverKnown: Query( 'insert into server(url,lastname,lasttime) values(?,?,?)', (self.url, self.username, lasttime)) # needed if the server knows our name but our local data base does not: Players.createIfUnknown(self.username) playerId = Players.allIds[self.username] with Internal.db: if Query( 'update passwords set password=? where url=? and player=?', (self.password, self.url, playerId)).rowcount() == 0: Query( 'insert into passwords(url,player,password) values(?,?,?)', (self.url, playerId, self.password))
def clause(data): if not isinstance(data, dict): return data ttype = str(data.get('type')).lower() if ttype == 'query': from query import Query return Query.load(data) meth = globals().get(ttype) if not meth: raise Exception('query unsupported type: %s, query: %s' % (ttype, data)) return meth(data)
def search(): free_text = request.args.get('text', default='*', type=str) query = Query(free_text) scores = index_score.score(query) docs = get_docs(scores) response = app.response_class(response=json.dumps(docs), status=200, mimetype='application/json') return response
def extract_query(): doc = curdoc() args = doc.session_context.request.arguments if doc and doc.session_context else {} qd = bokeh_arguments_to_query_dict(args) logger.info('args: %s', args) logger.info('qd: %s', qd) search, filters = normalize_search_querydict(qd) content_type_arg = args.get('content_type') content_type = content_type_arg[0].decode( ) if content_type_arg else 'sponsors' query = Query(content_type=content_type, search=search, filters=filters) return query
def advSearchBtnCallBack(self, qsobj, window): """Called from <search> Button on home page Arguments: qsobj -- object of the query text box window -- current window <not needed remove this from logic> """ qr = Query(qsobj.get()) self.currentQuery = qr self.topSearch = False self.currentQuery.topSearch = False self.currentQuery.advSearch = True self.advSearchFrame()
def testInsert_getFailsForWithdrawnParticipant(self): self._make_summary() self.dao.insert(self._make_physical_measurements()) self.participant.version += 1 self.participant.withdrawalStatus = WithdrawalStatus.NO_USE ParticipantDao().update(self.participant) with self.assertRaises(Forbidden): self.dao.get(1) with self.assertRaises(Forbidden): self.dao.query(Query([FieldFilter('participantId', Operator.EQUALS, self.participant.participantId)], None, 10, None))
def load(): """load all defined players into self.allIds and self.allNames""" query = Query("select id,name from player") if not query.success: sys.exit(1) Players.allIds = {} Players.allNames = {} for nameid, name in query.records: Players.allIds[name] = nameid Players.allNames[nameid] = name if not name.startswith('Robot'): Players.humanNames[nameid] = name
def get_query_list(workbook_path): query_list = [] query_expand_workbook = load_workbook(filename=workbook_path, read_only=True, data_only=True) query_expand_sheet = query_expand_workbook["关键词组"] for rows in query_expand_sheet.iter_rows(values_only=True): query = rows[0] if query: query = query.replace(" ", "") query_list.append(Query(query)) return query_list
def get_sync_results_for_request(dao, max_results): token = request.args.get('_token') count_str = request.args.get('_count') count = int(count_str) if count_str else max_results results = dao.query( Query([], OrderBy('logPositionId', True), count, token, always_return_token=True)) return make_sync_results_for_request(dao, results)
def test_adjustment_puts_different_files_in_different_base_directories_together( self): in_res = QueryResult(hits=[("/a/render_widget.cpp", 10), ("/b/render_widget.cpp", 10), ("/a/render_view.cpp", 9), ("/b/render_view.cpp", 9)]) # Cwd is not an active dir. res = query._apply_global_rank_adjustment( in_res, ["/a", "/b"], Query("render", current_filename="/x")) self.assertEquals([ "/a/render_widget.cpp", "/a/render_view.cpp", "/b/render_widget.cpp", "/b/render_view.cpp" ], res.filenames) # b is cwd. res = query._apply_global_rank_adjustment( in_res, ["/a", "/b"], Query("render", current_filename="/b")) self.assertEquals([ "/b/render_widget.cpp", "/b/render_view.cpp", "/a/render_widget.cpp", "/a/render_view.cpp" ], res.filenames)