Beispiel #1
0
def save_complex_query( request ):
    """
    """

    if request.method == 'POST':

        query = request.POST['groups_query']
        name = request.POST['name']
        id = request.POST['id']
        user = request.user

        try:
            if id != '':

                query_obj = Query.objects.get(id=id)
                query_obj.name = name
                query_obj.query = query
                query_obj.saved = True

            else:
                query_obj = Query(name=name, query=query, saved=True)

            query_obj.save()
            query_obj.user.add(user)

            response = HttpResponse(status=200, content_type='application/json ')
            response._is_string = False

            return response

        except Exception, e:
            return
Beispiel #2
0
    def load_store(self):
        missing = self.missing
        scale = self.scale

        if self.query_code == 0:
            sq = Query.get_all_files('archives', self.query_page,
                                     self.query_sort, self.query_order,
                                     self.query_fn_filter)
        elif self.query_code == 1:
            sq = Query.get_1tag_files('archives', self.query_page,
                                      self.query_sort, self.query_order,
                                      self.query_fn_filter)
        elif self.query_code == -1:
            sq = Query.get_notag_files('archives', self.query_page,
                                       self.query_sort, self.query_order,
                                       self.query_fn_filter)
        self.clear()
        for q in sq:
            try:
                pb = Pixbuf.new_from_file_at_size(IMGPATH.format(q[0]), scale,
                                                  scale)
            except GLib.Error:
                pb = missing
            self.append((
                *q[:-1],
                pb,
            ))
Beispiel #3
0
def single_review(request, review_name_slug):
    context = {}
    try:
        review = Review.objects.get(user=request.user, slug=review_name_slug)
        context['review_title'] = review.title
        queries = Query.objects.filter(review=review)
        paper = Paper.objects.filter(review=review)
        context['queries'] = queries
        context['review'] = review
        context['paper'] = paper
        if request.method == "POST":
            if request.POST.get('delete_query', "") != "":
                query_to_delete = Query.objects.get(name=request.POST.get('delete_query'))
                if query_to_delete != None:
                    query_to_delete.delete()
                    context['alert_message'] = "Query deleted: " + query_to_delete.name
            else:
			#making sure an empty query will not be saved, removing spaces on both sides of the name
				new_query=request.POST.get('queryField', default="")
				new_query=new_query.strip()
				if new_query!="" and not queries.filter(name = new_query).exists():
					query = Query(review=review, name=new_query)
					query.save()
					context['alert_message'] = "Query saved: " + request.POST.get('queryField')
				else:
					context['alert_message']="You are either saving an empty query or a query with this name alreday exists."
				queries = Query.objects.filter(review=review)
				context['queries'] = queries
				context['review'] = review

    except Review.DoesNotExist:
        pass
    return render(request, 'ultimatereview/querybuilder.html', context)
Beispiel #4
0
def handle_request2(request, flag):
    parsed_req = ast.literal_eval(request)
    query = register_request(parsed_req, flag)  #Register the query happened
    rumor = search_if_exists(query)  #Find existing rumor

    #if the rumor exists link it, otherwise register if it's a report
    if rumor:
        query.linked_rumor = rumor
        query.save()
    elif query.query_type == '3':
        rumor = Rumor(body=query.body,
                      image_url=query.image_url,
                      image_local_path=query.image_local_path,
                      image_hash=query.image_hash,
                      video_url=query.video_url)  # e.g. image/jpeg
        rumor.save()
        query.rumor = rumor
        query.save()

    if query.query_type == '1':
        if not rumor:
            return '2'  # not a rumor
        else:
            if query.num_media == '0':  #text based (body)
                rumor_count = Query.objects(
                    body__exact=query.body,
                    query_type=2).count  # 2 means rumor submission
                pass
            #it has media, each pic/video will be sent in an individual request.
            #We are not currently handling video
            elif query.num_media == '1':
                rumor_count = Query.objects(
                    image_hash__exact=query.image_hash).count()
            print(rumor_count)
            print('Found {} posts with tag "mongodb"'.format(rumor_count))
Beispiel #5
0
def get_search_results(query_id, query_postings):
    # Check previously calculated queries for changes in the corpus
    query = Query.objects(id=query_id).only("results",
                                            "total_frequency").first()
    total_frequency = Entry.objects(id__in=query_postings.iterkeys()).only(
        "total_frequency").sum("total_frequency")
    if not query or total_frequency != query.total_frequency:
        results = []
        avg_length = Meme.objects.only("length").aggregate_average("length")
        idf, relevant_docs = get_idf_relevant_docs(query_postings)
        for meme in relevant_docs:  # Iterate through relevant documents to calculate its score
            bm25 = calculate_bm25(avg_length, idf, meme, query_postings)
            result = SearchResult(id=MemeId(source=meme.id.source,
                                            meme_id=meme.id.meme_id),
                                  name=meme.name,
                                  title=meme.title,
                                  caption=meme.caption,
                                  score=meme.score,
                                  url=meme.url,
                                  image=meme.image,
                                  bm25=bm25)
            results.append(result)
        results = sorted(results, key=lambda result: result.bm25,
                         reverse=True)[:200]
        query = Query(id=query_id,
                      results=results,
                      total_frequency=total_frequency)
        query.save()
    return query.results
Beispiel #6
0
def create_query(count, approximate, selected, excluded_pks, image_data_url, user=None):
    query = Query()
    query.count = count
    if excluded_pks:
        query.excluded_index_entries_pk = [int(k) for k in excluded_pks]
    query.selected_indexers = selected
    query.approximate = approximate
    if not (user is None):
        query.user = user
    image_data = base64.decodestring(image_data_url[22:])
    if settings.HEROKU_DEPLOY:
        query.image_data = image_data
    query.save()
    dv = Video()
    dv.name = 'query_{}'.format(query.pk)
    dv.dataset = True
    dv.query = True
    dv.parent_query = query
    dv.save()
    if settings.HEROKU_DEPLOY:
        query_key = "queries/{}.png".format(query.pk)
        query_frame_key = "{}/frames/0.png".format(dv.pk)
        s3 = boto3.resource('s3')
        s3.Bucket(settings.MEDIA_BUCKET).put_object(Key=query_key, Body=image_data)
        s3.Bucket(settings.MEDIA_BUCKET).put_object(Key=query_frame_key, Body=image_data)
    else:
        query_path = "{}/queries/{}.png".format(settings.MEDIA_ROOT, query.pk)
        query_frame_path = "{}/{}/frames/0.png".format(settings.MEDIA_ROOT, dv.pk)
        create_video_folders(dv)
        with open(query_path, 'w') as fh:
            fh.write(image_data)
        with open(query_frame_path, 'w') as fh:
            fh.write(image_data)
    return query, dv
Beispiel #7
0
 def get(self):
     if "id" in self.request.params:
         self.c['query'] = Query.get(self.request.params["id"])
         if not self.c['query'].can_edit():
             return self.redirect("/query/view/?id=%s" % query.key())
     elif list(self.request.params.items()) != []:
         self.c['query'] = Query.fromParams(self.request.params)
     self.render("edit_query.html")
Beispiel #8
0
 def execute(self):
     # color = self.color.color.name(QtGui.QColor.HexRgb)
     query = Query(self.input.toPlainText())
     query.execute()
     if query.error == Query.NoError:
         selection = Selection()
         selection.query = query
         DataView.Model.headerDataChanged.emit(Qt.Vertical, 0, Row.count())
Beispiel #9
0
def annotation_search_view_results(request):
    """
        annotation Search view
    """
    dataset = ''
    datasetLabel = ''
    conceptClass = ''
    conceptLabel = ''
    explore = ''
    user = request.user
    name = 'query-' + datetime.utcnow().strftime("%Y-%m-%d-%H:%M")
    results = ''
    if request.GET.get('groups_query', None) is not None:

        groups_query = unquote(request.GET['groups_query'])
        load_groups = simplejson.loads(groups_query)
        dataset = unquote(request.GET['dataset'])
        datasetLabel = unquote(request.GET['datasetLabel'])
        conceptClass = class_search_connector(None, datasetLabel, num_max_hits='200', page_num='1').get('1',[])

        import re
        r = re.compile('sparqlEndpoint=(.*?)&')
        endpoint_url = r.search(dataset)
        if 'read/sparql' in endpoint_url.group(1):
            explore = endpoint_url.group(1).replace('read/sparql', 'explore/sql.html')
            explore = explore.replace('https://','https://*****:*****@'%request.ticket)
        ####### Save History #######
        if request.user.is_authenticated():
            query_obj = Query(name=name, query=groups_query)
            query_obj.save()
            query_obj.user.add(user)

        if not datasetLabel.count('https'):
            base_dataset = datasetLabel.replace('http', 'https')
        if dataset.count('/read') > 0:
            base_dataset = datasetLabel.replace('/read','')

        response = requests.get('%s/dpsschema.xml' % base_dataset, auth=('admin', request.ticket), verify=False)
        response.encoding = 'utf-8'
        import xmltodict
        annotationSearch = xmltodict.parse(response.text)

        objectProperties = annotationSearch['DataInstance']['Tables']['Table']['Fields']['Field']
        table_root = '%s/unannotated#%s'%( base_dataset, annotationSearch['DataInstance']['Tables']['Table']['D2rName'])
        column = {}
        for annotation in objectProperties:
            #range = '%s/unannotated#%s'%( base_dataset, annotation['D2rName'])
            #rangeLabel = annotation['Name']
            termRange = '%s/mapping#has_roottable_%s'%( base_dataset, annotation['D2rName'])
            column[termRange] =annotation['D2rName']
        #query_sparql = json2sparql(load_groups)
        query_sparql = json2csvquery(load_groups, column, table_root)
        results =dataset_query_connector(query_sparql, endpoint_url, request.user.username, request.ticket)

    return render_to_response('scs_search/scs_search.html',
                              {'search': 'complex', 'queryresults': results, 'dataset': dataset, 'datasetLabel': datasetLabel
                                  , 'class': conceptClass, 'breadcrum': [1, 1, 1], 'load_groups': json.dumps(groups_query) , 'conceptClass':conceptClass, 'explore':explore},
                              RequestContext(request))
Beispiel #10
0
def _enqueue(auth_info, **kwargs):
    result = Query(**kwargs)
    result.put()
    taskqueue.add(url='/queue/pop', params={
        'qid': result.key.id(),
        'oauth_token': auth_info.get('oauth_token', ''),
        'oauth_token_secret': auth_info.get('oauth_token_secret', '')
    })
    return result
Beispiel #11
0
def save_query(sequence, job_id, description):
    """
    Create query object in the main database.
    """
    query = Query(id=job_id,
                  query=sequence.upper(),
                  description=description,
                  submitted=datetime.datetime.now())
    query.save()
Beispiel #12
0
def import_raw(query: Query) -> None:
    """Queries twitter based on the query, stores the raw results
    """
    parameters = query.load()

    results = load_tweets(parameters.get('size'), parameters.get('input'))
    ids = save_raw(results)

    query.add_results('tweet', ids)
Beispiel #13
0
def import_raw(query: Query) -> None:
    """Queries twitter based on the query, stores the raw results
    """
    parameters = query.load()

    results = load_tweets(parameters.get('size'), parameters.get('input'))
    ids = save_raw(results)

    query.add_results('tweet', ids)
Beispiel #14
0
def handle_check(query):
    if query.num_media == '0':
        count = Query.objects(body__exact=query.body, query_type=2).count()
    if query.num_media == '1':
        count = Query.objects(image_hash__exact=query.image_hash,
                              query_type=2).count()

    if count > 15:  # threshold number of spam reports
        return 1  # decider decided it is rumor
    return 2  # we have less reports to what qualifis as a rumor
Beispiel #15
0
 def set_tag_id(self, id):
     file = Query.get_tag(id)
     self.id = file.id
     self.name = file.name if file.name != None else ""
     self.note = file.note if file.note != None else ""
     self.flag = file.flag if file.flag != None else ""
     self.rating = file.rating
     self.thumb = file.thumb
     self.thumbpath = f'/media/soni/1001/persistent/1001/thumbs/{file.thumb}.jpg'
     #
     for q in Query.get_tag_aliases(self.id):
         self.aliases.add_tagchild(q[0], q[1])
Beispiel #16
0
    def feedback(self):
        """Provide a feedback for a status.

        Overrides the older feedback if there was any.
        """
        req = json.loads(self.request.body)
        try:
            qid = int(req.get('qid', ''))
            sid = int(req.get('sid', ''))
            score = int(req.get('score', ''))
            if score < -1 or score > 1:
                self.abort(400, detail='Invalid score.')
            uid = self.user_session['user_id']
            q = Query.get_by_id(qid)
            if q is None:
                raise ValueError()
            f = Feedback.gql('WHERE uid = :1 AND qid = :2 AND sid = :3',
                             uid, qid, sid).get()
            if f is None:
                f = Feedback(uid=uid, qid=qid, sid=sid)
            f.score = score
            f.put()
            self.write({'message': 'success'})
        except ValueError:
            self.abort(400,
                       detail='Please specify valid query ID and status ID.')
Beispiel #17
0
def get_long_url(short_url):
    long_url = Query.long_from_short(short_url)
    if not long_url:
        return abort(404)

    Submmit.add_successfull_redirect(db)
    return redirect(long_url, code=302)
def delete(name):
    query = Query.find(name)
    if query:
        query.delete()

    json_data = json.dumps({'status': 'ok'})
    return Response(json_data,  mimetype='application/json')
    def process_response(self, request, response):
        path = request.get_full_path()
        user = None
        user_id = request.session.get('user_id', None)

        start_time = request.session.get('time')
        if start_time:
            run_time = time.time() - start_time
        else:
            run_time = -1

        if user_id:
            del request.session['user_id']
            user = Users.objects.get(pk=user_id)

        if not self.is_filter_path(path):
            data = {
                'remote_address': request.META['REMOTE_ADDR'],
                'request_method': request.method,
                'request_path': path,
                'request_body': request.body,
                'response_status': response.status_code,
                'user': user,
                'run_time': run_time
            }
            Query(**data).save()

        return response
    def process_query(self, entity, attribute, verbose=False):
        '''
        Process the user query and return the aggregated sentiment and related entries.

        :param entity:  str
        :param attribute: str
        :param verbose: bool
        :rtype: (float, List[AttributeEntry])
        '''

        query = Query(entity, attribute)
        if verbose:
            print("Query parsed.")

        relevant_entries = self.data_source.lookup(query)
        count = len(relevant_entries)
        if verbose:
            print("{} relevant entr{} found.".format(
                count, "y" if count == 1 else "ies"))

        if count == 0:
            return None, []

        aggregateData = [{
            'sentiment': expr.sentiment,
            'is_header': expr.is_header
        } for entry in relevant_entries for expr in entry.expressions]
        score = self.aggregator_service.aggregate_sentiment(aggregateData)
        if verbose:
            print("Sentiment scores aggregated.")

        return score, relevant_entries
Beispiel #21
0
    def do_button_press_event(self, event):
        if event.button == Gdk.BUTTON_SECONDARY:
            selection = self.get_selected_items()
            path = self.get_path_at_pos(event.x, event.y)
            # selection = self.get_selection()
            # pos = self.get_path_at_pos(event.x, event.y)# path, column, cell_x, cell_y
            if path:
                #clicked any content
                if path in selection:
                    #clicked in selection
                    self.menu.popup(None, None, None, None, event.button, event.time)
                else:
                    #clicked outside of selection
                    # Gtk.IconView.do_button_press_event(self, event)
                    self.unselect_all()
                    self.select_path(path)

                    self.menu.popup(None, None, None, None, event.button, event.time)
            else:
                #clicked empty area
                self.unselect_all()
                return False
        elif event.button == Gdk.BUTTON_MIDDLE:
            path = self.get_path_at_pos(event.x, event.y)
            self.select_path(path)
            model = self.get_model()
            iter = model.get_iter(path)
            filepath = Query.get_file_path(model[iter][0])
            open_file(filepath, 'mcomix')
        else:
            Gtk.IconView.do_button_press_event(self, event)
def save(name, sql, meta_vars, connection, editors):
    return Query.create_or_update(
        name=name,
        sql=sql,
        meta_vars=meta_vars,
        connection=connection,
        editors=editors)
Beispiel #23
0
def scrape_job_data(user):
    try:
        if request.method == 'POST':
            print(request)
            site = request.json["site"]
            job_type = request.json["type"]
            city = request.json["city"]
            country = request.json["country"]
            province = request.json["province"]
            technologies = request.json["technologies"]
            print(job_type, technologies)
           
            output =  scrape(job_type, technologies , site) # Passing to the Scrape function
            if len(output["jobs"]) == 0:
                return jsonify({"type": "Error", "message": "No results found..."}),400


            query = {}
            if user:
                query = Query(site=site, job_type=job_type,city=city, country=country, province=province, user_id=user.id)
                db.session.add(query)
                db.session.commit()
                save_to_db(query.id, output["jobs"])
            output["query"] = query
            print("NUM JOBS ", len(output["jobs"]))
            return jsonify(output)
    except Exception as e:
        print("Error", e)
        return jsonify({"type": "Error", "message": "Error fetching job results, please try again"}),400
Beispiel #24
0
    def set_query_from_folder(self, fol_int, finish_cb):
        self.clear()
        for q in Query.get_tags(fol_int):
            self.append((
                *q[:4],
                None,
                q[4],
            ))

        def add_pb(model, path, iter, data):
            try:
                pb = Pixbuf.new_from_file_at_size(
                    IMGPATH.format(model[iter][3]), 192, 192)
            except GLib.Error:
                pb = avatar
            GLib.idle_add(self.set_value, iter, 4, pb)

        def another(finish_cb):
            self.foreach(add_pb, None)
            finish_cb()

        # thread = multiprocessing.Process(target=another, args=(finish_cb,), daemon=True)
        # thread.start()
        thread = threading.Thread(target=another,
                                  name='tag',
                                  args=(finish_cb, ),
                                  daemon=True)
        thread.start()
Beispiel #25
0
    def data(self, index, role=None):
        # logger.debug("index={}, role={}".format(QModelIndex, role))
        row, column = index.row(), index.column()

        if role == Qt.DisplayRole:
            query = Query.get(row)
            if query:
                if column == QueryListModel.Id:
                    return str(query.id)
                if column == QueryListModel.Input:
                    return str(query.input)
                if column == QueryListModel.Status:
                    return query.status_label
                if column == QueryListModel.Returned:
                    return str(len(query.result))

        # if role == Qt.EditRole:
        #     cell = Cell.get(row, column)
        #     return str(cell.data) if cell else None

        if role == Qt.SizeHintRole:
            return QueryListModel.CellSize

        if role == Qt.BackgroundRole:
            return QueryListModel.SelectedRowColor
Beispiel #26
0
def search_references(request, taxon_id, items_per_page):
    '''Search references related to the taxon using Mendeley.'''
    dajax = Dajax()

    items = int(items_per_page) + int(items_per_page)

    # Get taxon.
    taxon = Taxon.objects.get(id=taxon_id)

    # Search Mendeley.
    results = search(taxon.name, items=items)

    # Create database query.
    if not results['total_results']:
        results['total_results'] = 0
    query = Query(
            total_results=results['total_results'],
            taxon=taxon,
            items_per_page=items,
            )
    query.save()

    # Define values.
    articles_count = taxon.articles.count()
    total_results = results['total_results']

    # Get ratio.
    fetch_ratio = get_ratio(articles_count, total_results)

    # Extract only uuid from documents.
    uuids = [str(document['uuid']) for document in results['documents']]

    # Get datetime object.
    timestamp = query.timestamp

    # Assign values to be updated.
    dajax.assign('#last-updated', 'innerHTML', date(timestamp, DATETIME_FORMAT))
    dajax.assign('#fetch-ratio', 'innerHTML', fetch_ratio)
    dajax.assign('#total-results', 'innerHTML', total_results)
    dajax.script('$("#updating").fadeOut();')
    dajax.script('$("#fetching-notice").fadeIn();')
    dajax.script('$("#fetching-notice #yellow-loading").fadeIn();')
    dajax.assign('#being-fetched', 'innerHTML', items)
    dajax.script('Dajaxice.livingbib.alive.get_details(Dajax.process, {"uuids": "%s", "articles_count": "%d", "total_results": "%d", "rank": "0", "new": "0", "taxon_id": "%s"})' % (uuids, articles_count, total_results, taxon_id))
    return dajax.json()
Beispiel #27
0
def query(request):
    ConditionSet = formset_factory(ConditionForm, extra=5)
    if request.method == 'POST':
        selectionset = ConditionSet(request.POST,request.FILES) 
        if selectionset.is_valid():
            query=Query(qid=makeQID(),query=constructQuery(selectionset.cleaned_data))
            query.save()
            return HttpResponseRedirect('/portal/results/%s/'%query.qid) 
    else:
        selectionset = ConditionSet(initial=[
                {'lower': u'5000',
                 'upper': u'5050',
                 'parameter':'RadTransWavelengthExperimentalValue',
                 'connection':True,
                 },
                ])
        
    return render_to_response('portal/query.html', {'selectionset': selectionset})
Beispiel #28
0
 def get_all_queries(self) -> list:
     data = self.__query_sheets('Queries!A:ZZ')
     result = list()
     for entry in data:
         try:
             result.append(Query(keyword=entry[0], maximum_price=int(entry[1]), ignore=entry[2:]))
         except:
             logging.error("Failed to process row {}".format(entry))
     return result
Beispiel #29
0
 def post(self):
     if "preview" in self.request.params:
         self.redirect("/query/edit/?%s" % urlencode(self.request.params))
     else:
         if not self.c['query'].can_edit():
             return self.redirect("/query/edit/" % query.key())
         query = Query.fromParams(self.request.params)
         query.save()
         self.redirect("/query/view/?id=%s" % query.key())
Beispiel #30
0
 def latest(self):
     """Retrieves the latest queries."""
     queries = Query.gql('WHERE status = :1 ORDER BY updated DESC',
                         Status.Done).fetch(limit=10)
     qs = []
     for q in queries:
         d = q.to_dict(include=['query'])
         d['qid'] = q.key.id()
         qs.append(d)
     self.write({'items': qs})
Beispiel #31
0
 def on_update_button_clicked(self, widget):
     r = Query.update_file(
         media='archives',
         index=self.id,
         thumb=self.thumbpath,
         set=self.set,
         note=self.note,
         rating=self.rating,
         )
     self.emit('updated','Done', r)
Beispiel #32
0
def read_config():
    global db

    if settings.CONFIG_FILE:
        logger.info("reading config file: " + settings.CONFIG_FILE)
        config = json.loads(get_file_or_s3(settings.CONFIG_FILE))

        for query_config in config["queries"]:
            query = Query(json=query_config)
            db.save_query(query)
            logger.debug("saved query " + query.id + ": " + query.search)
Beispiel #33
0
def search(input: str, size: int=100) -> Query:
    """Adds search query into a queue for importing and processing
    """
    # TODO - Take into account the rate limit, the importer will fail and need adding back to the queue if hit
    # TODO - multi depends_on maybe coming soon. It would allow a end of processing notification to exist

    query = Query()
    query.save(input=input, size=size, stream='twitter')

    source_queue = Queue(name='source')
    process_queue = Queue(name='process')

    # Import tweets
    import_job = source_queue.enqueue(import_raw, query=query)

    # Process tweets
    process_queue.enqueue(aggregate_pixels, query=query, depends_on=import_job)
    process_queue.enqueue(aggregate_adjectives, query=query, depends_on=import_job)

    return query
Beispiel #34
0
def sendMessage():
    form = FeedbackForm()
    if form.validate_on_submit():
        new = Query(content=form.content.data,
                    name=current_user.username,
                    patient_id=current_user.id,
                    doctor_id=current_user.mydoctor)
        db.session.add(new)
        db.session.commit()
        flash('Message sent', 'message')
        return redirect(url_for('home'))
    return render_template('sendMessage.html', form=form)
Beispiel #35
0
 def set_file_id(self, id):
     file = Query.get_file(id)
     try:
         # self.imgfile = f'/media/soni/1001/persistent/1001/thumbs/{file.id}.jpg'
         self.imgfile = IMGPATH.format(file.id)
     except Exception as e:
         pass
     self.id = file.id
     self.filename = file.filename
     self.filepath = file.filepath
     self.size = format_size(file.size)
     self.mtime = str(file.mtime)
     self.thumbpath = file.thumb if file.thumb != None else ""
     self.set = file.set if file.set != None else ""
     self.note = file.note if file.note != None else ""
     self.rating = file.rating
     #
     for q in Query.file_tags(id):
         self.tags_container.add_tagchild(q[0],q[1])
     for q in Query.tag_findall(file.filename):
         self.rcmmnds_container.add_sggstchild(q)
Beispiel #36
0
def postQuery(request):
	'''Method will provide a form to user, method wil note down the time, date and userID of the user, 
		before all this method will check wether the user is authenticated or not,
		if authenticated the user will be able to post query 
		and if not authenticated then a message will pop-up user has to login first. 
		Parameter: request --> All the information of the form is stored in request. 
		Function will return a Post query html page.'''

		
	f=QueryForm()
	dc = { 'form' :f} 
	context = RequestContext(request, dc)
	Timestamp=datetime.now()
	if request.method == 'POST':
		f=QueryForm(request.POST)
		if not f.is_valid():
			dc = { 'form' :f} 
			context = RequestContext(request, dc)
			return render(request,'postquery.html',context)
		else:
			q=Query()
			q.queryText=f['text'].data
			q.userID=request.user
			q.queryTime=Timestamp
			q.save()
			qID=q.queryID
			dc = {'queryID' :qID}
			context = RequestContext(request,dc)
			notification.send([request.user], "query", {"query": q})
			return render(request,'postquery.html',context)
			#notification.send([request.user], "query", {"query": query})
	return render(request,'postquery.html',context)
Beispiel #37
0
def search(input: str, size: int = 100) -> Query:
    """Adds search query into a queue for importing and processing
    """
    # TODO - Take into account the rate limit, the importer will fail and need adding back to the queue if hit
    # TODO - multi depends_on maybe coming soon. It would allow a end of processing notification to exist

    query = Query()
    query.save(input=input, size=size, stream='twitter')

    source_queue = Queue(name='source')
    process_queue = Queue(name='process')

    # Import tweets
    import_job = source_queue.enqueue(import_raw, query=query)

    # Process tweets
    process_queue.enqueue(aggregate_pixels, query=query, depends_on=import_job)
    process_queue.enqueue(aggregate_adjectives,
                          query=query,
                          depends_on=import_job)

    return query
Beispiel #38
0
 def set_query_from_folder2(self, fol_int):
     self.clear()
     for q in Query.get_tags(folder_id=fol_int):
         try:
             pb = Pixbuf.new_from_file_at_size(IMGPATH.format(q[3]), 192,
                                               192)
         except GLib.Error:
             pb = avatar
         self.append((
             *q[:4],
             pb,
             q[4],
         ))
Beispiel #39
0
def single_review(request, review_name_slug):
    context = {}
    try:
        review = Review.objects.get(user=request.user, slug=review_name_slug)
        context['review_title'] = review.title
        queries = Query.objects.filter(review=review)
        paper = Paper.objects.filter(review=review)
        context['queries'] = queries
        context['review'] = review
        context['paper'] = paper
        if request.method == "POST":
            if request.POST.get('delete_query', "") != "":
                query_to_delete = Query.objects.get(
                    name=request.POST.get('delete_query'))
                if query_to_delete != None:
                    query_to_delete.delete()
                    context[
                        'alert_message'] = "Query deleted: " + query_to_delete.name
            else:
                #making sure an empty query will not be saved, removing spaces on both sides of the name
                new_query = request.POST.get('queryField', default="")
                new_query = new_query.strip()
                if new_query != "" and not queries.filter(
                        name=new_query).exists():
                    query = Query(review=review, name=new_query)
                    query.save()
                    context[
                        'alert_message'] = "Query saved: " + request.POST.get(
                            'queryField')
                else:
                    context[
                        'alert_message'] = "You are either saving an empty query or a query with this name alreday exists."
                queries = Query.objects.filter(review=review)
                context['queries'] = queries
                context['review'] = review

    except Review.DoesNotExist:
        pass
    return render(request, 'ultimatereview/querybuilder.html', context)
Beispiel #40
0
def send_stats_from_db():
    statistics = {
        "all_count": Query.count_all_redirects(db),
        "all_good_today": Query.get_all_good_redirects_from_today(db),
        "all_bad_today": Query.count_all_bad_redirects_from_today(db),
        "all_good_hour": Query.get_all_good_redirects_from_hour(db),
        "all_bad_hour": Query.get_all_bad_redirects_from_hour(db),
        "all_good_minute": Query.get_all_good_redirects_from_minute(db),
        "all_bad_minute": Query.get_all_bad_redirects_from_minute(db)
    }

    return jsonify(statistics)
def main(packages, fetch_index, num_queries, show_progress, *args, **kwargs):

    # Read names of target packages from file
    with open(packages) as packages_file:
        package_list = [line.strip() for line in packages_file.readlines()]

    # Set up progress bar.
    if show_progress:
        progress_bar = ProgressBar(maxval=len(package_list), widgets=[
            'Progress: ', Percentage(),
            ' ', Bar(marker=RotatingMarker()),
            ' ', ETA(),
            ' Fetched posts for ', Counter(), ' / ' + str(len(package_list)) + ' packages.'
        ])
        progress_bar.start()

    # Fetch statistics for posts related to each tag
    for package_count, package in enumerate(package_list, start=1):

        # We identify the queries related to a package as those that have seeds
        # that begin with the package name followed by a space.
        # We then group the queries, annotating them with a score that's computed
        # as the sum of reciprocal depths where it appears.
        top_queries = (
            Query.select(
                Query.query,
                fn.Sum(1.0 / (Query.depth + 1)).alias('score')
            )
            .join(Seed, on=(Seed.id == Query.seed))
            .where(Seed.seed % (package + ' %'))
            .where(Query.fetch_index == fetch_index)
            .group_by(Query.query).order_by(SQL('score').desc())
            .limit(num_queries)
        )

        records = []
        for query in top_queries:
            records.append({
                'package': package,
                'query': query.query,
            })
        yield records

        if show_progress:
            progress_bar.update(package_count)

    if show_progress:
        progress_bar.finish()

    raise StopIteration
Beispiel #42
0
def _result(qid):
    result = Query.get_by_id(qid)
    if result is not None:
        d = result.to_dict(exclude={'email', 'uid'})
        d['qid'] = qid
        if result.status == Status.Done and result.methods:
            methods = ndb.get_multi(result.methods)
            tweets = set()
            for ind, m in enumerate(methods):
                # Note: should this be the first 5 or random 5?
                tweets.update(m.status_ids[:5])
            d['status_ids'] = list(tweets)
            random.shuffle(d['status_ids'])
        return d
Beispiel #43
0
    def execute(self):
        query = Query(select=self.input.toPlainText())
        query.execute()
        if query.error == Query.NoError:
            var_name = self.target.text().strip()
            column = None
            new_column = False
            if var_name:
                logger.debug("var_name={}".format(var_name))
                column = Column.get_by_name(var_name)

            if not column:
                column = Column(Column.count(), name=var_name)
                new_column = True

            logger.debug("new_column={}".format(new_column))

            for row, data in query.result.items():
                if new_column:
                    Cell(row, column.id, data=data)
                else:
                    cell = Cell.get(row, column.id)
                    cell.data = data
Beispiel #44
0
    def post(self, request, *args, **kwargs):
        try:
            alert_d = request.data
            # alert_d['employer_profile_id'] = alert_d.pop('employer_profile')['profile_id']
            # alert_d['location_id'] = alert_d.pop('location')['location_id']
            if 'alert_id' not in alert_d:
                okStatus = status.HTTP_201_CREATED
            else:
                okStatus = status.HTTP_200_OK

            if alert_d['query']['terms']:
                alert_d['query']['terms'] = ','.join(alert_d['query']['terms'])
            else:
                alert_d['query']['terms'] = None

            if alert_d['query']['employer_names']:
                alert_d['query']['employer_names'] = ','.join(alert_d['query']['employer_names'])
            else:
                alert_d['query']['employer_names'] = None

            if alert_d['query']['locations']:
                alert_d['query']['locations'] = ','.join(alert_d['query']['locations'])
            else:
                alert_d['query']['locations'] = None
            query = Query(**alert_d.pop('query'))
            query.save()

            alert_d['query'] = query

            alert = Alert(**alert_d)
            alert.save()

            return Response(AlertSerializer(alert).data, status=okStatus)

        except Exception as e:
            print '%s (%s)' % (e, type(e))
            return Response(e.message)
Beispiel #45
0
def query(request):
    result = dict()
    result['code'] = 0
    result['message'] = 'success'

    if request.method == 'POST':
        check = ['phone', 'message', 'lat', 'lng']
        inputs = dict()
        for field in check:
            if request.POST[field]:
                value = request.POST[field]
                inputs[field] = value
            else:
                print 'Parameter missing'
                result['code'] = 1
                result['message'] = 'Error: Parameter ' + field + ' is missing'
                return HttpResponse(json.dumps(result))
        try:
            user = User.objects.filter(phone=inputs['phone']).get()
            if user.gender == 'F' :
                new_query = Query(user = user,
                                  message = inputs['message'],
                                  lat = inputs['lat'],
                                  lng = inputs['lng'])
                new_query.save()
            else:
                print 'Error: Querying is only valid for female user'
                result['code'] = 1
                result['message'] = 'Error: Querying is only valid for female user'
                return HttpResponse(json.dumps(result))
        except Exception as e:
            print str(e)
            result['code'] = 1
            result['message'] = 'Error: Unregisted User'

    return HttpResponse(json.dumps(result))
Beispiel #46
0
 def set_query_like_text(self, text):
     self.clear()
     if text == "":
         return
     for q in Query.get_tags(filter_text=text):
         try:
             pb = Pixbuf.new_from_file_at_size(IMGPATH.format(q[3]), 192,
                                               192)
         except GLib.Error:
             pb = avatar
         self.append((
             *q[:4],
             pb,
             q[4],
         ))
Beispiel #47
0
def advance_search_view(request):
    dataset = ''
    datasetLabel = ''
    conceptClass = ''
    user = request.user
    name = 'query-' + datetime.utcnow().strftime("%Y-%m-%d-%H:%M")
    results = ''
    breadcrum = [0, 0, 0]
    if request.GET.get('groups_query', None) is not None:

        groups_query = unquote(request.GET['groups_query'])
        load_groups = simplejson.loads(groups_query)

        ####### Save History #######
        if user.is_authenticated():
            query_obj = Query(name=name, query=groups_query)
            query_obj.save()
            query_obj.user.add(user)

        results = complex_query_connector(load_groups, request.user)
        breadcrum[0] = 1
    elif request.GET.get('id', None) is not None:

        query_obj = Query.objects.get(id=request.GET['id'])
        groups_query = unquote(query_obj.query)
        load_groups = simplejson.loads(groups_query)

        ####### Save History #######
        query_obj = Query.objects.get(id=id)
        if user.is_authenticated():
            if not query_obj.saved:
                query_obj.name = name
            query_obj.query = groups_query
            query_obj.date = datetime.utcnow()
            query_obj.save()
            query_obj.user.add(user)

        results = complex_query_connector(load_groups, request.user)
        breadcrum[0] = 1

    return render_to_response('scs_search/scs_search.html',
                              {'search': 'complex', 'results': results, 'dataset': dataset, 'datasetLabel': datasetLabel
                                  , 'class': conceptClass, 'breadcrum': breadcrum, 'classLabel': ''},
                              RequestContext(request))
Beispiel #48
0
 def set_query_tag_id(self, tag_id):
     scale = self.scale
     missing = self.missing
     self.tag_id = tag_id
     sq = Query.get_files('archives', 'filepath', 'desc', tag_id)
     # sq = Query.get_files(obj.query_media, obj.query_sort, obj.query_order, int(tag), filter=obj.query_fn_filter)
     self.clear()
     for q in sq:
         try:
             pb = Pixbuf.new_from_file_at_size(IMGPATH.format(q[0]), scale,
                                               scale)
         except GLib.Error:
             pb = missing
         self.append((
             *q[:-1],
             pb,
         ))
Beispiel #49
0
def load_model(city_code):
    # check database, if not found make api call

    print("loding model ***********************************")
    model = Query.query.filter_by(city_code=city_code).first()

    if not model:
        print("api call ***********************************")
        dataset = Dataset(city_code)
        model = PricePredictionModel(dataset)

        model = Query(city_code=model.city_code, model=model)

        db.session.add(model)
        db.session.commit()

    print("model loaded ***********************************")
    return model
Beispiel #50
0
 def set_query_filter_text(self, value):
     scale = self.scale
     missing = self.missing
     sq = Query.get_files('archives',
                          'filepath',
                          'desc',
                          self.tag_id,
                          filter=f'%{value}%')
     self.clear()
     for q in sq:
         try:
             pb = Pixbuf.new_from_file_at_size(IMGPATH.format(q[0]), scale,
                                               scale)
         except GLib.Error:
             pb = missing
         self.append((
             *q[:-1],
             pb,
         ))
Beispiel #51
0
def notify(qid):
    result = Query.get_by_id(qid)
    if not result.email:
        return
    message = """
Expanded search results for "{query}" are available at {url}

Don't forget to provide feedback at the above URL.
"""
    try:
        url = 'http://tweetement.com/#/result/' + str(qid)
        body = message.format(query=result.query, url=url)
        mail.send_mail(sender='Tweetement <*****@*****.**>',
                       to=result.email,
                       subject='Results for %s are ready' % result.query,
                       body=body)
    except Exception as e:
        # Oh well.
        logging.exception(e)
Beispiel #52
0
    def on_add_tag_button_clicked(self, widget, entry):
        text = entry.get_text()
        tag_id, res = Query.add_file_tag_from_text('archives', self.id, text)
        if res:
            #ASK
            popover = Gtk.Popover()
            popover.set_relative_to(widget)
            popover.set_position(Gtk.PositionType.TOP)
            flow = TagFlowBox()
            flow.connect("child-clicked", self.on_rcmmnds_child_clicked)
            for q in res:
                flow.add_sggstchild(q)

            popover.add(flow)
            popover.show_all()
            return

        self.tags_container.add_tagchild(tag_id, text)
        entry.set_text("")
def test_twitter_search_gets_added_onto_queue_to_be_processed():
    """Ensures that multiple queries get added onto an empty queue

    Run four search queries
    Check size of queries added in four
    Check size of queue is four
    """

    with Connection(connection=redis_db):
        queue = Queue(name='source')

        query = 'Test Query'
        twitter.search(input=query)
        twitter.search(input=query)
        twitter.search(input=query)
        twitter.search(input=query)

        assert len(Query.keys()) == 4
        assert queue.count == 4
def create_query(count, approximate, selected, excluded_pks, image_data_url):
    query = Query()
    query.count = count
    if excluded_pks:
        query.excluded_index_entries_pk = [int(k) for k in excluded_pks]
    query.selected_indexers = selected
    query.approximate = approximate
    query.save()
    dv = Video()
    dv.name = 'query_{}'.format(query.pk)
    dv.dataset = True
    dv.query = True
    dv.parent_query = query
    dv.save()
    create_video_folders(dv)
    image_data = base64.decodestring(image_data_url[22:])
    query_path = "{}/queries/{}.png".format(settings.MEDIA_ROOT, query.pk)
    query_frame_path = "{}/{}/frames/0.png".format(settings.MEDIA_ROOT, dv.pk)
    with open(query_path, 'w') as fh:
        fh.write(image_data)
    with open(query_frame_path, 'w') as fh:
        fh.write(image_data)
    return query, dv
Beispiel #55
0
 def pop(self):
     """Pops a query from the queue and performs query expansion."""
     qid = int(self.request.get('qid'))
     auth_info = (self.request.get('oauth_token'),
                  self.request.get('oauth_token_secret'))
     result = Query.get_by_id(qid)
     if not result or result.status != Status.Pending:
         logging.warning('Query not pending. qid={}'.format(qid))
         return
     logging.info('Queue pop: {}'.format(qid))
     result.status = Status.Working
     result.put()
     try:
         expand_query(qid, auth_info)
         result.status = Status.Done
     except Exception as e:
         logging.exception(e.message)
         result.status = Status.Cancelled
         result.status_msg = e.message
     result.put()
     if result.status == Status.Done:
         notify(qid)
Beispiel #56
0
def complex_query_service(request):
    """
        Complex Query Service
    """

    if request.method == 'POST':

        groups_query = request.POST['groups_query']
        id = request.POST.get('id', "")
        load_groups = simplejson.loads(groups_query)

        ####### Save History #######
        user = request.user
        name = 'query-' + datetime.utcnow().strftime("%Y-%m-%d-%H:%M")

        try:
            if id == "":
                query_obj = Query(name=name, query=groups_query)
            else:
                query_obj = Query.objects.get(id=id)
                if not query_obj.saved:
                    query_obj.name = name
                query_obj.query = groups_query
                query_obj.date = datetime.utcnow()
            if user.is_authenticated():
                query_obj.save()
                query_obj.user.add(user)
        except Exception, e:
            pass
            ############################

        connector = json.dumps(complex_query_connector(load_groups,request.user), sort_keys=False)

        response = HttpResponse(content=connector,
                                content_type='application/json ')
        response._is_string = False

        return response
Beispiel #57
0
 def execute(self):
     # color = self.color.color.name(QtGui.QColor.HexRgb)
     query = Query(self.input.toPlainText())
     query.execute()
def run_query (request):
    user = request.user
    project = Project.objects.get(created_by=user.id)
    query_name = request.POST.get("query_name", "")
    from_date = request.POST.get("datepicker_from", "")
    to_date = request.POST.get("datepicker_to", "")
    language = request.POST.get("lan", "")
    query = Query(name=query_name, venn=request.POST.get("query_logic", ""), from_date=parser.parse(from_date), to_date=parser.parse(to_date),
                  created=timezone.now(), created_by=user, owned_by=project)
    query.save()
    keywords = request.POST.get("keywords", "")
    category = Category.objects.get(name="Keywords")
    query_property = Query_properties(query=query, category=category, properties=keywords)
    query_property.save()
    twitter = request.POST.get("twitter", "")
    category = Category.objects.get(name="Twitter")
    query_property = Query_properties(query=query, category=category, properties=twitter)
    query_property.save()
    facebook = request.POST.get("facebook", "")
    category = Category.objects.get(name="Facebook")
    query_property = Query_properties(query=query, category=category, properties=facebook)
    query_property.save()
    brands = request.POST.get("brands", "")
    try:
        category = Category.objects.filter(name="brands")
    except ValueError:
        print ValueError.message
    if category.__len__(): #exists already the category
        category = category[0]
    ## otherwise create the category
    else:
        #print "is empty"
        category = Category(name="brands")
        category.save()
    query_property = Query_properties(query=query, category=category, properties=brands)
    query_property.save()
    query_lan=Query_languages(query=query,language=language)
    query_lan.save()

    ##handle dynamic properties
    i = 0;
    prop_value = "prop-value-%s" % i
    prop_name = "prop-name-%s" % i
    while request.POST.get(prop_value, ""):
        property_name = request.POST.get(prop_name, "")
        property_value = request.POST.get(prop_value, "")
        try:
            ## try to find if the category already exists - in lowercase
            category = Category.objects.filter(name=(str(property_name).lower()))
        except ValueError:
            #print ValueError.message
            continue

        if category.__len__(): #exists already the category
            category = category[0]
        ## otherwise create the category
        else:
            category = Category(name=str(property_name).lower())
            category.save()
            ## end store the properties in the category
        query_property = Query_properties(query=query, category=category, properties=property_value)
        query_property.save()

        i += 1
        prop_value = "prop-value-%s" % i
        prop_name = "prop-name-%s" % i

    return query.id