def __init__(self): """ Initialize the settings class """ self.model = { "name": "settings", "id": "settings_id", "fields": ("settings_id", "usermail", "userpass", "usercountry", "pd", "pf", "sf", "http", "smtp", "port", "mailto", "mailserver", "mailport", "mailuser", "mailpass", "fc", "fp", "fe", "lsc", "lsp", "sac", "sap", "sc", "cust_idx", "page_idx"), "types": ("INTEGER PRIMARY KEY NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "INTEGER", "INTEGER", "INTEGER") } self._settings = {} self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def __init__(self): """ Initialize product class """ self.model = { "name": "products", "id": "product_id", "fields": ("product_id", "sku", "name1", "name2", "name3", "item", "price", "d2", "d4", "d6", "d8", "d12", "d24", "d48", "d96", "min", "net", "groupid"), "types": ("INTEGER PRIMARY KEY NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "TEXT") } self._product = {} self._products = [] self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def __init__(self): """ Initialize current class """ self.model = { "name": "visits", "id": "visit_id", "fields": ("visit_id", "report_id", "employee_id", "customer_id", "visit_date", "po_sent", "po_buyer", "po_number", "po_company", "po_address1", "po_address2", "po_postcode", "po_postoffice", "po_country", "po_note", "prod_demo", "prod_sale", "visit_type", "po_sas", "po_sale", "po_total", "po_approved", "visit_note"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "INTEGER NOT NULL", "INTEGER NOT NULL", "TEXT NOT NULL", "INTEGER DEFAULT 0", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT NOT NULL", "REAL DEFAULT 0", "REAL DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "TEXT") } self._visit = {} self._visits = [] self._visits = [] self._visits = [] self._csv_record_length = 22 self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def __get_doc_scores(self, posting_list, query: Query): click.secho("[Search Engine] Computing search scores ...", fg="bright_blue") query_tf_idf = {} norm_query_vector = 0 query_vocabulary = query.get_vocabulary() for token in query_vocabulary: tf_idf = query.get_tf(token) * self.collection.get_idf(token) query_tf_idf[token] = tf_idf norm_query_vector += tf_idf**2 norm_query_vector = sqrt(norm_query_vector) doc_scores = {} for doc_id in posting_list: score = 0 for token in query_vocabulary: if self.weighting_model == "tw-idf": weight = self.collection.get_tw_idf(target_term=token, target_doc_id=doc_id, b=0.003) elif self.weighting_model == "tf-idf": weight = self.collection.get_piv_plus(target_term=token, target_doc_id=doc_id, b=0.2) else: weight = self.collection.get_bm25_plus( target_term=token, target_doc_id=doc_id, b=0.75, k1=1.2) score += query_tf_idf[token] * weight score /= self.collection.documents_norms[doc_id] * norm_query_vector doc_scores[doc_id] = score return doc_scores
def __init__(self): """ Initilize Report class """ self.model = { "name": "reports", "id": "report_id", "fields": ("report_id", "employee_id", "rep_no", "rep_date", "timestamp", "newvisitday", "newdemoday", "newsaleday", "newturnoverday", "recallvisitday", "recalldemoday", "recallsaleday", "recallturnoverday", "sasday", "sasturnoverday", "demoday", "saleday", "kmmorning", "kmevening", "supervisor", "territory", "workday", "infotext", "sent", "offday", "offtext", "kmprivate"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "INTEGER NOT NULL", "TEXT NOT NULL", "TEXT NOT NULL", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "TEXT", "TEXT", "INTEGER DEFAULT 0", "TEXT", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "TEXT", "INTEGER DEFAULT 0") } self._reports = [] self._report = {} self._csv_record_length = 25 self.q = Query() self.c = ReportCalculator() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def __init__(self): """ Initialize Calculator """ self.model = { "name": "reportcalculations", "id": "calc_id", "fields": ("calc_id", "calc_date", "report_id", "employee_id", "reports_calculated", "new_visit", "new_demo", "new_sale", "new_turnover", "recall_visit", "recall_demo", "recall_sale", "recall_turnover", "sas", "sas_turnover", "current", "demo", "sale", "turnover", "kmwork", "kmprivate", "workdays", "offdays"), "types": ("INTEGER PRIMARY KEY NOT NULL", "TEXT NOT NULL", "INTEGER NOT NULL", "INTEGER NOT NULL", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0") } self._totals = {} self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def new_query(): if g.user is None: return redirect("/login?next=/query/new") query = Query() query.user = g.user query.title = "%s's untitled query #%s" % (g.user.username, int(time.time())) query.save_new() return redirect(url_for('query_show', query_id=query.id))
def new_query(): if get_user() is None: return redirect("/login?next=/query/new") query = Query() query.user = get_user() g.conn.session.add(query) g.conn.session.commit() return redirect(url_for('query_show', query_id=query.id))
def api_run_query(): if g.user is None: return "Authentication required", 401 text = request.form['text'] query = Query.get_by_id(request.form['query_id']) last_query_rev = query.latest_rev if last_query_rev: last_query_run = QueryRun.get_latest_run(last_query_rev.id) if last_query_run: result = worker.run_query.AsyncResult(last_query_run.task_id) if not result.ready(): result.revoke(terminate=True) last_query_run.status = QueryRun.STATUS_SUPERSEDED last_query_run.save() query_rev = QueryRevision(query_id=query.id, text=text) query_rev.save_new() query.latest_rev = query_rev query.save() query_run = QueryRun() query_run.query_rev = query_rev query_run.status = QueryRun.STATUS_QUEUED query_run.save_new() query_run.task_id = worker.run_query.delay(query_run.id).task_id query_run.save() return json.dumps({ 'output_url': url_for('api_query_output', user_id=g.user.id, run_id=query_run.id) })
def query(): grafana_query = json.loads(request.data.decode("utf-8")) g_query = Query(grafana_query) responder = Responder(Responder.get_data_store()) resp = responder.query(g_query) return (json.dumps(resp), 200) if resp else flask.abort(500)
def search_company(location, region, country): query = Query() query.by_location(location) query.by_region(region) query.by_country(country) search_result = pd.DataFrame( Company.find(collection="company", query=query._query)) return search_result
def api_set_meta(): if g.user is None: return "Authentication required", 401 query = Query.get_by_id(request.form['query_id']) if 'title' in request.form: query.title = request.form['title'] query.save() return json.dumps({'id': query.id})
def __init__(self): """ Initialize CustomerProduct class """ self.model = { "name": "customerproducts", "id": "cp_id", "fields": ("cp_id", "customer_id", "item", "sku", "pcs"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "TEXT NOT NULL", "TEXT NOT NULL", "INTEGER DEFAULT 0") } self._products = [] self._product = {} self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def __init__(self): """Initialize contact class""" self.model = { "name": "contacts", "id": "contact_id", "fields": ("contact_id", "customer_id", "name", "department", "email", "phone", "infotext"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT") } self._contact = {} self._contacts = [] self._csv_record_length = 8 self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def search(keywords): session = Session() corpuses = session.query(Corpus).all() for corpus in corpuses: corpus.computeToken() for corpus in corpuses: print(corpus.title + ": \n") print(corpus.tokens) print("\n") keywords = tp.stopword.remove(keywords) keywords = tp.stemming(keywords) keywords = keywords.split() querys = [Query(tp.normalize_text(keyword)) for keyword in keywords] total_corpus = len(corpuses) total_weight = {} for query in querys: counter_df = 0 print(query.keyword) for corpus in corpuses: counter_df = counter_df + corpus.isQueryExist(query.keyword) print("document : {} , TF : {} ".format( corpus.title, corpus.computeTF(query.keyword))) query.setDF(counter_df) print("df : {}".format(query.df)) query.computeIDF(total_corpus) print("idf : {}".format(query.idf)) print("idf+1 : {}".format(query.idf + 1)) for corpus in corpuses: print("compute TF : {}".format(corpus.computeTF(query.keyword))) weight = corpus.computeTF(query.keyword) * (query.idf + 1) print("wight {} : {}".format(corpus.title, weight)) # print(weight) corpus.set_weight(query.keyword, weight) print("document : {} , weight : {}".format( corpus.title, corpus.get_weight(query.keyword))) print() print("total weight") for corpus in corpuses: counter_weight = 0 for query in querys: counter_weight = counter_weight + corpus.get_weight(query.keyword) if (counter_weight != 0): total_weight[corpus] = counter_weight # print("document : {} , total weight : {}".format(corpus.title , total_weight[str(corpus.id)])) ranked = sorted(total_weight.items(), key=lambda x: x[1], reverse=True) ranked = dict(ranked) print("document teratas") print(ranked) return corpuses, querys, ranked
def search(self, string_query: str, article_window: int = 10): query = Query(string_query, self.stopwords, self.__text_transformer) posting_list = self.__get_posting_list(query) doc_scores = self.__get_doc_scores(posting_list, query) if self.__text_transformer.context_retrieval: sorted_docs = [(k, v) for k, v in sorted( doc_scores.items(), key=lambda item: item[1], reverse=True)] context_scores = self.__get_context_scores( sorted_docs[:article_window], query) return context_scores return doc_scores
def __get_posting_list(self, query: Query): final_posting_list = [] vocabulary = query.get_vocabulary() for token in vocabulary: if not final_posting_list: final_posting_list = self.collection.get_posting_list(token) else: posting_list = self.collection.get_posting_list(token) final_posting_list = merge_or_postings_list( final_posting_list, posting_list) return final_posting_list
def __init__(self): """ Initialize OrderLine class """ self.model = { "name": "orderlines", "id": "line_id", "fields": ("line_id", "visit_id", "pcs", "sku", "text", "price", "sas", "discount", "linetype", "linenote", "item"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "INTEGER", "TEXT", "TEXT", "REAL", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "TEXT", "TEXT", "TEXT") } self._line = {} self._lines = [] self._csv_record_length = 8 self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql)
def query_edit(request, query_id=0, log_type=0): '''查询类编辑 ''' if request.REQUEST.get('edit_type', ''): return QueryTemplateEdit(request)() is_copy = request.REQUEST.get('is_copy', '') query_id = int(request.REQUEST.get('query_id', '') or 0) log_key = request.REQUEST.get('log_key', '') if query_id: model = Query.objects.using('read').get(id=query_id) if model.cache_validate == None: model.cache_validate = 0 if is_copy: model.id = 0 model.name = '%s-copy' % model.name else: model = Query() model.id = query_id model.name = '' model.cache_validate = 0 model.log_key = log_key log_defs = LogDefine.objects.using('read').all() return render_to_response('query/query_edit.html', locals())
def post(self): cookies = h.get_default_cookies(self) current_user = h.get_current_user(cookies) if current_user != None and '_pvk' in cookies and cookies['_pvk'] != None and cookies['_pvk'] != '': unlinked_result_views = ResultView.gql("WHERE session_id = :1 AND user = NULL", cookies['_pvk']).fetch(5) for result_view in unlinked_result_views: result_view.user = current_user if current_user.fb_user_id != None: result_view.fb_user_id = current_user.fb_user_id # Create a "backdated" version for this query, because the user probably made the query while # logged out, and it's worthwile knowing that that happened if result_view.query_string != None and result_view.query_string != '': query = Query(query_string = result_view.query_string, referrer = self.request.referer, url = "__ARTIFICIALLY_CREATED_FOR_USER__") query.user = current_user if current_user.fb_user_id != None: query.fb_user_id = current_user.fb_user_id if '_pvk' in cookies and cookies['_pvk'] != None and cookies['_pvk'] != '': query.session_id = cookies['_pvk'] query.put() result_view.query = query result_view.put() fb_post_id = QueryFacebookWallpostHelper.postToWallIfNecessary(current_user, result_view, query, result_view.image_url) else: result_view.put() self.response.out.write('{status: \'ok\'}')
def post(self): cookies = h.get_default_cookies(self) current_user = h.get_current_user(cookies) query = Query(query_string = self.request.get('q')) query.referrer = self.request.get('referrer') query.url = self.request.get('u') query.ip_address = self.request.remote_addr if current_user != None: query.user = current_user if current_user.fb_user_id != None: query.fb_user_id = current_user.fb_user_id if '_pvk' in cookies and cookies['_pvk'] != None and cookies['_pvk'] != '': query.session_id = cookies['_pvk'] query.put() self.response.out.write('{status: \'ok\'}')
def __init__(self): """ Initialize Employee class """ self.model = { "name": "employees", "id": "employee_id", "fields": ("employee_id", "salesrep", "fullname", "email", "country", "sas"), "types": ("INTEGER PRIMARY KEY NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "INTEGER DEFAULT 0") } self._employee = {} self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql) self.s = Settings() if rules.check_settings(self.s.settings): self.load(self.s.settings["usermail"])
def search_job(from_date, to_date, title): query = Query() query.on_calendar(from_date, to_date) query.by_title(title) search_result = pd.DataFrame( Job.find(collection="job", query=query._query)) return search_result
def post(self): logging.info("***rvlh") cookies = h.get_default_cookies(self) current_user = h.get_current_user(cookies) result_view = ResultView() result_view.source = self.request.get('src') result_view.referrer = self.request.get('referrer') result_view.url = self.request.get('u') result_view.ip_address = self.request.remote_addr result_view.image_url = self.request.get('image_url') result_view.query_string = self.request.get('q') #logging.info(" ".join((result_view.source, result_view.referrer, result_view.url, result_view.ip_address, # result_view.image_url, result_view.query_string))) if '_pvk' in cookies and cookies['_pvk'] != None and cookies['_pvk'] != '': result_view.session_id = cookies['_pvk'] if current_user != None: result_view.user = current_user if current_user.fb_user_id != None: result_view.fb_user_id = current_user.fb_user_id query = Query.gql( "WHERE user = :1 AND query_string = :2 ORDER BY created_at DESC", current_user.key(), self.request.get('q')).get() if query != None: result_view.query = query result_view.put() # Post to facebook wall for the query if the query hasn't already posted to wall if current_user != None and query != None and query.fb_wall_post_id == None: logging.info("***Posting") image_url = self.request.get('image_url') if not image_url: from google.appengine.api import urlfetch from django.utils import simplejson q = 'site:'+result_view.url+" "+result_view.query_string ue_query = urllib.quote_plus(q) url = 'http://ajax.googleapis.com/ajax/services/search/images?v=1.0&q='\ +ue_query+'&key='\ +h.cfg['gs_api_key'] result = urlfetch.fetch(url) o = simplejson.loads(result.content) image_url = o['responseData']['results'][0]['tbUrl'] fb_post_id = QueryFacebookWallpostHelper.postToWallIfNecessary(current_user, result_view, query, image_url) self.response.out.write('{status: \'ok\', fb_wall_post_id: \'%s\', result_view_key: \'%s\'}' % (fb_post_id, str(result_view.key()))) else: # logging.info("***NoPost : "+current_user+" "+query+" "+query.fb_wall_post_id) self.response.out.write('{status: \'ok\', result_view_key: \'%s\', result_view_has_user: \'%s\'}' % (str(result_view.key()), str(current_user != None)))
def compute_scores(self, list_of_docs, query: Query): """ Scores each document, depending of the tokens it contains.""" print("Search Engine is computing search scores ...") query_tf_idf = {} vocab_query = query.get_vocabulary() # get the tf_idf for words in the query for word in vocab_query: tf_idf = query.get_term_frequency(word) * self.collection.compute_idf(word) query_tf_idf[word] = tf_idf # score the documents which contain the words doc_scores = {} for doc_id in list_of_docs: score = 0 for word in vocab_query: normalized_tf = self.collection.log_normalization( term=word, id_document=doc_id ) if normalized_tf == 0: # the word is not in the document doc_tf_idf = 0 else: doc_tf_idf = normalized_tf * self.collection.compute_idf(word) #tf-idf for the word in the document score += query_tf_idf[word] * doc_tf_idf doc_scores[doc_id] = score return doc_scores
def query(self, query="", mode="SYNC"): """ Run a query on the imported resources Parameters ---------- query : str, required The query string Returns ------- query : `Query` The created Query """ adql_query = self.__adql_resource.create_query(query) return Query(adql_query=adql_query, mode=mode)
def query_show(query_id): query = Query.get_by_id(query_id) can_edit = g.user is not None and g.user.id == query.user_id jsvars = { 'query_id': query.id, 'can_edit': can_edit } # Check if there's a run? query_run = QueryRun.get_latest_run(query.latest_rev_id) if query_run is not None: jsvars['output_url'] = url_for('api_query_output', user_id=query.user_id, run_id=query_run.id) return render_template( "query/view.html", user=g.user, query=query, jsvars=jsvars )
def post(self): query = User.all() query.order('-created_at') cursor = self.request.get('cursor') if cursor != None: query.with_cursor(cursor) users = query.fetch(50) for user in users: user_queries = Query.gql("WHERE user = :1 ORDER BY created_at ASC", user).fetch(1000) user_search_strings = [] for user_query in user_queries: if user_query.url != None: parsed_url = urlparse(user_query.url) else: parsed_url = urlparse('') params = cgi.parse_qs(parsed_url.query) # Right now, we'll use the x (and y) param to signify an organic search. if 'x' in params and params['x'] != None: user_search_strings.append(user_query.query_string) organic_search_metric = OrganicSearchMetric.gql("WHERE user = :1", user).get() if organic_search_metric == None: organic_search_metric = OrganicSearchMetric( fb_user_id = user.fb_user_id, user = user, search_count = len(user_search_strings), searches = "||".join(user_search_strings) ) else: organic_search_metric.fb_user_id = user.fb_user_id organic_search_metric.user = user organic_search_metric.search_count = len(user_search_strings) organic_search_metric.searches = "||".join(user_search_strings) organic_search_metric.put() self.response.out.write(simplejson.dumps({'status': 'ok', 'cursor': str(query.cursor()), 'count': len(users) }))
def test_query_for_table_gets_all_json_data(self) -> None: hour_in_ms = 3600000 q1 = Query({ 'intervalMs': hour_in_ms, 'targets': [{ 'target': 'fubar', 'type': 'table' }], 'maxDataPoints': 5, 'range': { 'from': "2020-01-01T12:00:00.000Z", 'to': "2020-01-04T12:00:00.000Z" } }) expected = [{ 'type': 'table', 'columns': [{ 'text': 'dms_id', 'type': 'string', }, { 'text': 'dms_time', 'type': 'time', }, { 'text': 'inner.fu', 'type': 'string', }, { 'text': 'inner.bar', 'type': 'number', }, { 'text': 'outer', 'type': 'number', }], 'rows': [['the app', 1577847600000, 'bar', 2, 123.4], ['the app', 1577847600000, 'bar', 2, 123.4], ['the app', 1577847600000, 'bar', 2, 123.4], ['the app', 1577847600000, 'bar', 2, 123.4], ['the app', 1577847600000, 'bar', 2, 123.4]], }] self.maxDiff = None self.assertEqual(expected, self.responder.query(q1))
def fork_query(id): if get_user() is None: return redirect("/login?next=fork/{id}".format(id=id)) query = Query() query.user = get_user() parent_query = g.conn.session.query(Query).filter(Query.id == id).one() query.title = parent_query.title query.parent_id = parent_query.id query.description = parent_query.description g.conn.session.add(query) g.conn.session.commit() query_rev = QueryRevision(query_id=query.id, text=parent_query.latest_rev.text) query.latest_rev = query_rev g.conn.session.add(query) g.conn.session.add(query_rev) g.conn.session.commit() return redirect(url_for('query_show', query_id=query.id))
def get_list_of_documents(self, query: Query): """Return documents where the words of the query appear""" target_documents_list = [] # Get words of query vocabulary = query.get_vocabulary() for word in vocabulary: if not target_documents_list: target_documents_list = self.collection.get_documents_containing_term(word) print( f"[Search Engine] the word {word} is present in {len(target_documents_list)} items" ) else: documents_list = self.collection.get_documents_containing_term(word) print( f"[Search Engine] the word {word} is present in {len(documents_list)} items" ) print("Merge ...") # merge the two lists and order the final list target_documents_list = sorted(list(set(target_documents_list) | set(documents_list))) return target_documents_list
def query(self, query: Query) -> list: _TIME_SERIE = "timeserie" _TABLE = "table" resp = [] for target in query.get_targets(): if target.get_type() == _TIME_SERIE: resp.append( ResponseEntry( self._data_store, target.get_name(), *query.get_unix_range(), query.get_ms_interval(), query.get_max_datapoint_count() ).as_time_series() ) elif target.get_type() == _TABLE: resp.append( ResponseEntry( self._data_store, target.get_name(), *query.get_unix_range(), query.get_ms_interval(), query.get_max_datapoint_count() ).as_table() ) else: err_str = "Unable to respond to query for {} of type {}, with timeframe {} to {}".format( target.get_name(), target.get_type(), *query.get_unix_range() ) print(err_str) resp = None return resp
def test_query_for_timeserie_on_json_payload(self) -> None: hour_in_ms = 3600000 q1 = Query({ 'intervalMs': hour_in_ms, 'targets': [{ 'target': 'fubar', 'type': 'timeserie' }], 'maxDataPoints': 5, 'range': { 'from': "2020-01-01T12:00:00.000Z", 'to': "2020-01-04T12:00:00.000Z" } }) expected = [{ 'datapoints': [['val', 1577847600000], ['val', 1577847600000], ['val', 1577847600000], ['val', 1577847600000], ['val', 1577847600000]], 'target': 'fubar' }] self.assertEqual(expected, self.responder.query(q1))
class Contact: """ Contact class """ def __init__(self): """Initialize contact class""" self.model = { "name": "contacts", "id": "contact_id", "fields": ("contact_id", "customer_id", "name", "department", "email", "phone", "infotext"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "TEXT") } self._contact = {} self._contacts = [] self._csv_record_length = 8 self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql) @property def contact(self): """ Active contact :return: """ return self._contact @contact.setter def contact(self, contact_id): """ Set contact :return: """ self.find(contact_id) @property def list_(self): return self._contacts @list_.setter def list_(self, customer_id): self.load_for_customer(customer_id=customer_id) @property def csv_record_length(self): """The number of fields expected on csv import""" return self._csv_record_length def clear(self): """ Clear internal variables """ self._contact = {} self._contacts = [] def add(self, name, department="", phone="", email="", info=""): """ Create a contact Args: name: department: phone: email: info: """ values = (None, name, department, email, phone, info) new_id = self.insert(values) return self.find(new_id) def delete(self, contact_id): """ Delete contact Args: contact_id: Returns: bool """ filters = [("contact_id", "=")] values = (contact_id, ) sql = self.q.build("delete", self.model, filters=filters) success, data = self.q.execute(sql, values=values) if success and data: return True return False def find(self, contact_id): """ Load specific contact by id Args: contact_id: Returns: bool """ values = (contact_id, ) sql = self.q.build("select", self.model) success, data = self.q.execute(sql, values=values) if success: try: self._contact = dict(zip(self.model["fields"], data[0])) except IndexError: pass if success and data: return True return False def translate_row_insert(self, row): """ Translate a csv row Args: row: """ new_row = (row[0], row[1], row[2].strip(), row[3].strip(), row[4].strip(), row[5].strip(), row[7].strip()) self.insert(new_row) def insert(self, values): """ Insert items Args: values: contact data to insert in contact table Returns: the new rowid """ sql = self.q.build("insert", self.model) success, data = self.q.execute(sql, values=values) if success and data: return data return False def load_for_customer(self, customer_id): """ Load contacts for current Args: customer_id: Returns: bool """ filters = [("customer_id", "=")] values = (customer_id, ) sql = self.q.build("select", self.model, filters=filters) success, data = self.q.execute(sql, values=values) if success: try: self._contacts = [ dict(zip(self.model["fields"], row)) for row in data ] self._contact = self._contacts[0] return True except IndexError: self._contact = {} self._contacts = [] return False def recreate_table(self): """ Drop and create table """ sql = self.q.build("drop", self.model) self.q.execute(sql) sql = self.q.build("create", self.model) self.q.execute(sql) self.clear() def update(self): """ Update item Returns: bool """ fields = list(self.model["fields"])[1:] filters = [(self.model["id"], "=")] values = self.q.values_to_update(self._contact.values()) sql = self.q.build("update", self.model, update=fields, filters=filters) success, data = self.q.execute(sql, values=values) if success and data: return True return False
def search(self, string_query: str): """ Returns a score of each document, regarding the query """ query = Query(string_query.lower(), self.stopwords, self.lemmatizer) documents_list = self.get_list_of_documents(query) doc_scores = self.compute_scores(documents_list, query) return doc_scores
class Employee: """ Employee class """ def __init__(self): """ Initialize Employee class """ self.model = { "name": "employees", "id": "employee_id", "fields": ("employee_id", "salesrep", "fullname", "email", "country", "sas"), "types": ("INTEGER PRIMARY KEY NOT NULL", "TEXT", "TEXT", "TEXT", "TEXT", "INTEGER DEFAULT 0") } self._employee = {} self.q = Query() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql) self.s = Settings() if rules.check_settings(self.s.settings): self.load(self.s.settings["usermail"]) @property def employee(self): """ Return current and only employeeid """ return self._employee def insert(self, values): """ Insert employee in database Args: values: """ sql = self.q.build("insert", self.model) self.q.execute(sql, values=values) def load(self, email): """ Load the employee """ filters = [("email", "=")] values = (email, ) sql = self.q.build("select", self.model, filters=filters) success, data = self.q.execute(sql, values) # first check if employee is loaded # second check is in exception handling try: _ = data[0] self._employee = dict(zip(self.model["fields"], data[0])) except IndexError: if httpFn.inet_conn_check(): # load from http self.load_from_http() success, data = self.q.execute(sql, values) try: # second check after load_from_http _ = data[0] self._employee = dict(zip(self.model["fields"], data[0])) except IndexError: self._employee = {} def load_from_http(self): """ Load employee from http """ self.s.get() data = httpFn.get_employee_data(self.s) if data: data = list(data) data[0:0] = [None] self.insert(tuple(data)) def update(self): """ Update employee in database """ fields = list(self.model["fields"])[1:] filters = [(self.model["id"], "=")] values = self.q.values_to_update(self._employee.values()) sql = self.q.build("update", self.model, update=fields, filters=filters) self.q.execute(sql, values=values)
def query_save(request, query_id=0): _g = request.REQUEST.get query_id = int(_g('query_id', '') or 0) if query_id: model = Query.objects.get(id=query_id) else: model = Query() err_msg = '' model.log_type = 0 model.log_key = request.REQUEST.get('log_key', '0') model.select = request.REQUEST.get('select', '') model.remark = request.REQUEST.get('remark', '') model.name = request.REQUEST.get('name', '') model.where = request.REQUEST.get('where', '') model.group = request.REQUEST.get('group', '') model.order = request.REQUEST.get('order', '') model.cache_validate = int(request.REQUEST.get('cache_valid', 0)) model.order_type = int(request.REQUEST.get('order_type', '0')) model.sql = request.REQUEST.get('sql', '') model.other_sql = request.REQUEST.get('other_sql', '') model.field_config = request.REQUEST.get("field_config", "") model.template_name = request.REQUEST.get('template_name', '') try: save_id = int(_g('save_id', '') or 0) if save_id != query_id and Query.objects.filter(id=save_id): err_msg = 'ID 已经存在' else: if save_id: model.id = save_id query_id = save_id model.save(using='write') except Exception, e: err_msg = trace_msg() print('query save error:', e)
def get(self): c = h.context() c['model'] = 'query' c['model_properties'] = sorted(Query.properties()) h.render_out(self, 'admin.tplt', c)
class Report: """ Report """ def __init__(self): """ Initilize Report class """ self.model = { "name": "reports", "id": "report_id", "fields": ("report_id", "employee_id", "rep_no", "rep_date", "timestamp", "newvisitday", "newdemoday", "newsaleday", "newturnoverday", "recallvisitday", "recalldemoday", "recallsaleday", "recallturnoverday", "sasday", "sasturnoverday", "demoday", "saleday", "kmmorning", "kmevening", "supervisor", "territory", "workday", "infotext", "sent", "offday", "offtext", "kmprivate"), "types": ("INTEGER PRIMARY KEY NOT NULL", "INTEGER NOT NULL", "INTEGER NOT NULL", "TEXT NOT NULL", "TEXT NOT NULL", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "REAL DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "TEXT", "TEXT", "INTEGER DEFAULT 0", "TEXT", "INTEGER DEFAULT 0", "INTEGER DEFAULT 0", "TEXT", "INTEGER DEFAULT 0") } self._reports = [] self._report = {} self._csv_record_length = 25 self.q = Query() self.c = ReportCalculator() if not self.q.exist_table(self.model["name"]): sql = self.q.build("create", self.model) self.q.execute(sql) @property def csv_record_length(self): """The number of fields expected on csv import""" return self._csv_record_length @property def report(self): """ Report Returns: Active report """ return self._report @property def reports(self): """ Report List Returns: Current list of reports """ try: _ = self._reports[0] except (IndexError, KeyError): self.__get_by_period() return self._reports def clear(self): """ Clear internal variables """ self.c.clear() self._report = {} self._reports = [] def create(self, employee, workdate): """ Create reportid for employeeid and date supplied Args: :type employee: object :type workdate: str iso formatted representing the work date """ # we need to find the number of reports for the month of the supplied date # then init_detail 1 to that number # we need to calculate the sums for the previous reportid for month # those sums will be stored in seperate table # creating a new table with # sum demoes & sum sales # | * | DAY | MONTH | # | --- | ------------------------------ | ------------------------------ | # | * | Visit | Demo | Sale | Turnover | Visit | Demo | Sale | Turnover | # | --- | ------------------------------ | ------------------------------ | # | N | sum sum sum sum sum sum sum sum # | R | sum sum sum sum sum sum sum sum # | SAS | sum sum sum sum # | SUM | sum sum sum sum sum sum sum sum # parameters for initial feed of ReportCalc # aggregates aggregates = [ "count(report_id) AS 'report_count'", "sum(newvisitday) AS 'new_visit'", "sum(newdemoday) AS 'new_demo'", "sum(newsaleday) AS 'new_sale'", "sum(newturnoverday) AS 'new_turnover'", "sum(recallvisitday) AS 'recall_visit'", "sum(recalldemoday) AS 'recall_demo'", "sum(recallsaleday) AS 'recall_sale'", "sum(recallturnoverday) AS 'recall_turnover'", "sum(sasday) AS 'sas'", "sum(sasturnoverday) AS 'sas_turnover'", "(sum(newvisitday) + sum(recallvisitday)) AS 'current'", "(sum(newdemoday) + sum(recalldemoday)) AS 'demo'", "(sum(newsaleday) + sum(recallsaleday) + sum(sasday)) AS 'sale'", "(sum(newturnoverday) + sum(recallturnoverday) + sum(sasturnoverday)) AS 'turnover'", "(sum(kmevening - kmmorning)) AS 'kmwork'", "(sum(kmprivate)) AS 'kmprivate'", "(sum(workday = 1)) AS 'workdays'", "(sum(offday = 1)) AS 'offdays'" ] filters = [("rep_date", "LIKE", "and"), ("employee_id", "=", "and"), ("sent", "=")] ym_filter = "{}%".format(workdate[:8]) employee_id = employee["employee_id"] territory = employee["salesrep"] values = (ym_filter, employee_id, 1) sql = self.q.build("select", self.model, aggregates=aggregates, filters=filters) success, data = self.q.execute(sql, values) if success and data: # assign expected result from list item try: _ = data[0] except IndexError: return False # temporary convert tuple to list current_month_totals = list(data[0]) # extract report count from first column report_count = int(current_month_totals[0]) # increment report count next_report = report_count + 1 # init_detail a combined list with the identifiers and the totals current_month_totals = [workdate, "None", employee_id ] + current_month_totals timestamp = datetime.today() # init_detail tuple with values to initialze the new report new_report_values = (None, employee_id, next_report, workdate, timestamp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "", territory, 1, "", 0, 0, "", 0) # assign return value as new report_id report_id = self.insert(new_report_values) # insert report_id to identify for which report the totals was calculated current_month_totals[1] = report_id # revert to tuple current_month_totals = tuple(current_month_totals) # insert the values in the calculation table self.c.insert(current_month_totals) return True else: return False def insert(self, values): """ Insert new reportid in table Args: :type values: iterable """ sql = self.q.build("insert", self.model) success, data = self.q.execute(sql, values=values) if success and data: return data return False def load(self, workdate=None, year=None, month=None): """ Load reports for a given period If none given load all Args: :type workdate: str :type year: str :type month: str """ self.__get_by_period(workdate, year, month) def recreate_table(self): """ Drop and initialize reports table """ self.c.recreate_table() sql = self.q.build("drop", self.model) self.q.execute(sql) sql = self.q.build("create", self.model) self.q.execute(sql) self.clear() def translate_row_insert(self, row, employee_id): """ Translate a csv row Args: :type row: iterable :type employee_id: int """ # translate bool text to integer for col 19, 21 field_19 = utils.bool2int(utils.arg2bool(row[19])) field_21 = utils.bool2int(utils.arg2bool(row[21])) # create timestamp local_timestamp = datetime.today() values = (row[0], employee_id, row[1], row[2].strip(), local_timestamp, row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17].strip(), row[18].strip(), field_19, row[20].strip(), field_21, row[22], row[23].strip(), row[24]) self.insert(values) def update(self): """ Update reportid in database """ # update_list = list(self.model["fields"])[1:] # update_where = [(self.model["id"], "=")] # self.q.values_to_update(self._report.values()) # if DBG: # printit( # "{}\n ->update\n ->sql: {}\n ->values: {}".format( # sql, values)) # if DBG: # printit( # "{}\n ->update\n ->success: {}\n ->data: {}".format( # success, data)) pass def __get_by_period(self, workdate=None, year=None, month=None): """ Load reports matching args or all if no args Args: :type workdate: str :type year: str :type month: str """ if workdate: try: _ = self._reports[0] for report in self._reports: if report["workdate"] == workdate: self._report = report return except (IndexError, KeyError): pass filters = [("rep_date", "like")] value = "{}-{}-{}".format("%", "%", "%") if year: value = "{}-{}-{}".format(year, "%", "%") if year and month: value = "{}-{}-{}".format(year, month, "%") values = (value, ) sql = self.q.build("select", self.model, filters=filters) success, data = self.q.execute(sql, values=values) if success and data: try: _ = data[0] self._reports = [ dict(zip(self.model["fields"], row)) for row in data ] self._reports = sorted(self._reports, key=itemgetter("rep_date"), reverse=True) if workdate: for report in self._reports: if report["rep_date"] == workdate: self._report = report break if not self._report: self._report = self._reports[0] except IndexError: self._report = {} self._reports = [] else: self._report = {} self._reports = []