예제 #1
0
def _getPercentiler(query, field, samples, name):
    count = query.count(limit=samples)
    if count >= samples:
        minEntity = query.order(field).get(projection=[field])
        maxEntity = query.order(-field).get(projection=[field])
        if minEntity and maxEntity:
            minValue = getattr(minEntity, field._name)
            maxValue = getattr(maxEntity, field._name)

            futures = []
            width = (maxValue - minValue) / float(samples)
            for i in range(0, samples):
                start = i * width
                end = start + width
                rangeQuery = query.filter(field >= start)
                if i < (samples - 1):
                    rangeQuery = rangeQuery.filter(field < end)
                else:
                    rangeQuery = rangeQuery.filter(field <= end)  # include the max value!
                futures.append(rangeQuery.count_async())

            # would be smart to vary the size of bands based on results, in case data is not nicely distributed...

            Future.wait_all(futures)
            histogram = [future.get_result() for future in futures]
            total = sum(histogram)
            if total > 0:
                percentiler = Percentiler(id=name)
                percentiler.compute(total, minValue, maxValue, histogram)
                percentiler.put()
                return percentiler
예제 #2
0
파일: upload.py 프로젝트: grevian/FaceSite
    def post(self):
        try:
            upload = self.get_uploads()[0]
            info = self.get_file_infos()[0]
            blob_key = upload.key()

            # Generate a Thumbnail and inline displayable sized image
            thumbnail_url_future = get_serving_url_async(blob_key, size=120, secure_url=True)
            display_url_future = get_serving_url_async(blob_key, size=480, secure_url=True)

            Future.wait_all([display_url_future, thumbnail_url_future])

            # Store the image metadata
            i = Image(
                bucket_key=str(blob_key),
                gs_object_name=info.gs_object_name.split("/")[-1:][0],
                display_url=display_url_future.get_result(),
                thumbnail_url=thumbnail_url_future.get_result()
            )

            @transactional
            def defer_analyze_image():
                image_key = i.put()

                ia = ImageAnalysis(
                        parent=image_key,
                        id=image_key.id()
                )
                ia.put()

                deferred.defer(analyze_image, image_key.id())

            defer_analyze_image()

            # For webcam AJAX Uploads just return the gallery path and it'll handle the redirect itself
            if self.request.get("webcam", None):
                self.response.write('/gallery/%s' % i.key.id())
            # Otherwise for a browser upload, redirect to the gallery
            else:
                self.redirect('/gallery/%s' % i.key.id())

        except Exception as e:
            logging.error(e)
            self.error(500)
예제 #3
0
def _update_entities(*entities):
    """ Updates entities in a transaction

        Parameters:
        :param entities: entities to be updated

        Returns:
        :return: an array with the result for each entity

        Raises:
        :raises: TransactionFailedError if the transaction fails
    """
    futures = []
    for entity in entities:
        futures.append(entity.put_async())
    Future.wait_all(futures)
    results = []
    for future in futures:
        results.append(future.get_result())
    return results
예제 #4
0
    def post(self):
        """
        Add audio fingerprint hash records to the database in response to POST
        requests containing JSON encoded data in the body. Body data should
        be a dict containing the database key id for the song being added and
        a list of tuples containing a hash id and list of absolute offsets
        in the song: {"song_id": <int>, "hashes": [(<int>, [<int>, ...]), ...]}
        """
        entity = Key(urlsafe=API_ENTITY_KEY).get()
        if self.request.headers['API_KEY'] != entity.api_key:
            self.error(401)
            return
        body_data = json.loads(self.request.body)
        song_id_key = body_data["song_id"]
        hashes = body_data["hashes"]
        skey = Key(Songs, song_id_key).id()

        logging.info("POST /hashes - length: {}".format(len(hashes)))

        updates = []
        records = ndb.get_multi_async([Key(Hashes, k) for k, _ in hashes])
        for f, (fp_key, offsets) in zip(records, hashes):
            fp = f.get_result() or Hashes(id=fp_key, song_list=[])
            new_entries = [(skey, o) for o in offsets
                           if (skey, o) not in fp.song_list]

            if new_entries:
                fp.song_list.extend(new_entries)
                updates.append(fp)

        if updates:
            Future.wait_all(ndb.put_multi_async(updates))
            logging.info("Handled {} records.".format(len(updates)))

        self.response.headers.add_header('Content-Type', 'application/json')
        self.response.out.write(json.dumps(len(hashes)))
예제 #5
0
파일: main.py 프로젝트: rookie/Resound-App
    def post(self):
        """
        Add audio fingerprint hash records to the database in response to POST
        requests containing JSON encoded data in the body. Body data should
        be a dict containing the database key id for the song being added and
        a list of tuples containing a hash id and list of absolute offsets
        in the song: {"song_id": <int>, "hashes": [(<int>, [<int>, ...]), ...]}
        """
        entity = Key(urlsafe=API_ENTITY_KEY).get()
        if self.request.headers['API_KEY'] != entity.api_key:
            self.error(401)
            return
        body_data = json.loads(self.request.body)
        song_id_key = body_data["song_id"]
        hashes = body_data["hashes"]
        skey = Key(Songs, song_id_key).id()

        logging.info("POST /hashes - length: {}".format(len(hashes)))

        updates = []
        records = ndb.get_multi_async([Key(Hashes, k) for k, _ in hashes])
        for f, (fp_key, offsets) in zip(records, hashes):
            fp = f.get_result() or Hashes(id=fp_key, song_list=[])
            new_entries = [(skey, o) for o in offsets
                           if (skey, o) not in fp.song_list]

            if new_entries:
                fp.song_list.extend(new_entries)
                updates.append(fp)

        if updates:
            Future.wait_all(ndb.put_multi_async(updates))
            logging.info("Handled {} records.".format(len(updates)))

        self.response.headers.add_header('Content-Type', 'application/json')
        self.response.out.write(json.dumps(len(hashes)))