def get_bucket(self, bucket, validate=True):
        """ Retrieve a bucket by name.

        Keyword arguments:
        bucket - The name of the bucket
        validate - If ``True``, the function will try to verify the bucket exists
            on the service-side. (Default: ``True``)
        """

        if not validate:
            return Bucket(self, bucket)

        response = self.make_request("HEAD", bucket)
        if response.status == 200:
            return Bucket(self, bucket)
        elif response.status == 401:
            err = get_response_error(response)
            err.code = "invalid_access_key_id"
            err.message = "Request not authenticated, Access Key ID is either " \
                "missing or invalid."
            raise err
        elif response.status == 403:
            err = get_response_error(response)
            err.code = "permission_denied"
            err.message = "You don't have enough permission to accomplish " \
                "this request."
            raise err
        elif response.status == 404:
            err = get_response_error(response)
            err.code = "bucket_not_exists"
            err.message = "The bucket you are accessing doesn't exist."
            raise err
        else:
            err = get_response_error(response)
            raise err
Exemple #2
0
    def modify(self, tens, delBuckets=None, repBuckets=None):
        '''
        len(delBuckets) + len(tens.shape()) - len(newBuckets) == len(self.tensor().shape())
        Creates a copy of this Node with tens as its Tensor.  Omits buckets at indices listed in
        delBuckets. Replaces Buckets at indices listed in repBuckets with new Bucket objects.
        Raises a ValueError if repBuckets and delBuckets contain overlapping elements.
        '''
        if delBuckets is None:
            delBuckets = []

        if repBuckets is None:
            repBuckets = []

        assert self in self._network.topLevelNodes()
        assert len(set(delBuckets).intersection(set(repBuckets))) == 0
        assert len(delBuckets) + len(tens.shape()) - \
            len(self.tensor().shape()) >= 0

        Buckets = []

        for i, b in enumerate(self.buckets()):
            if i not in delBuckets:
                if i not in repBuckets:
                    Buckets.append(b)
                else:
                    Buckets.append(Bucket(self.network()))

        n = Node(
            tens,
            self._network,
            children=[self],
            Buckets=Buckets,
            logScalar=self._logScalar)

        return n
Exemple #3
0
def main():
    # bit stream: raw data
    bit_stream = []
    bit_bucket = []
    for i in range(10):
        bit_stream.append([])
        bit_bucket.append(Bucket())

    count = 0
    number_stream = []

    with open('cmsc5741_stream_data.txt', 'r') as file_read:
        integer_stream = read_stream_data(file_read)
        for data in integer_stream:
            update_bit_stream_and_bucket(
                data, bit_stream, bit_bucket)  # function convert data into int
            # estimation
            estimation_sum = estimate_sum_of_last_hundred(
                bit_stream, bit_bucket)
            # real
            number_stream.insert(0, data)
            if len(number_stream) > 100:
                number_stream.pop()
            true_sum = sum(int(num) for num in number_stream)
            # output
            count = count + 1
            print('%d true_sum = %d, estimate_sum = %d, error = %.3f%%' %
                  (count, true_sum, estimation_sum,
                   (estimation_sum - true_sum) / true_sum * 100))
        # whenever output is needed
        output_bit_stream_and_bucket(bit_stream, bit_bucket)
Exemple #4
0
    def __init__(self, query_date=None):
        """
        基本的工具,
        :param query_date:
        """
        self.bucket = Bucket()

        self.query_date = date.today().strftime(
            '%Y-%m-%d') if query_date is None else query_date
        self.aws_path = SETTINGS['AWS_PATH'].format(
            dt=self.query_date.replace('-', ''))

        if not exists(dirname(self.aws_path)):
            os.makedirs(dirname(self.aws_path))

        # Base Mongo Config
        self.client = MongoClient(SETTINGS['MONGO_HOST'],
                                  SETTINGS['MONGO_PORT'])
        self.collection = self.client[SETTINGS['MONGO_DB']][
            SETTINGS['MONGO_COLLECTION']]

        # Filtering Config
        filter_path = join(dirname(abspath(__file__)), 'filter')

        if not exists(filter_path):
            os.makedirs(filter_path)
        self.filter_filename = join(filter_path, 'filter.txt')
        self._get_unique_from_file()
Exemple #5
0
 def __generate_buckets(self, dna, number_of_buckets):
     score_cards = dna.structure
     bucket_size = len(score_cards) / number_of_buckets
     iterable = iter(score_cards)
     buckets = list(iter(lambda: tuple(islice(iterable, bucket_size)), ()))
     buckets = map(lambda x: Bucket(x), buckets)
     return buckets
Exemple #6
0
def run_livecheck(repo_name):
    from bucket import Bucket
    from dupestats import DupeStats
    from authorset import AuthorSet
    from rewriter import rewritechart

    bucket = Bucket(path=repo_name, desc=f"{repo_name}-descriptors")
    bucket.extract()

    stat = DupeStats(path=repo_name, dump_file=f"dupestats_{repo_name}.json")
    stat.dupe_stats()

    authorSet = AuthorSet(
        chartdir=repo_name,
        authorset_charts=f"authorsets_{repo_name}_charts.json",
        authorset_maint=f"authorsets_{repo_name}_maint.json",
        authorset_email=f"authorsets_{repo_name}_emails.json",
        authorset_heatmap=f"authorsets_{repo_name}-heatmap.png",
        authorset_dot=f"authorsets_{repo_name}.dot",
        authorset_png=f"autorsets_{repo_name}.png",
        authorset_pdf=f"authorsets_{repo_name}.pdf",
        dupestats_charts=f"dupestats_{repo_name}.json")

    authorSet.preprocess()
    authorSet.process()
    authorSet.processproposals()
Exemple #7
0
 def rebuild(self):
     old_buckets = self.buckets
     self.bucket_count *= 2
     self.buckets = [Bucket() for _ in range(self.bucket_count)]
     self.size = 0
     for bucket in old_buckets:
         for item in bucket:
             self.insert(item.key, item.value)
Exemple #8
0
 def bucket(self, bucket_name):
     """
     构造bucket对象
     参数:
         bucket_name:  bucket名称
     """
     b = Bucket(self, bucket_name)
     return b
Exemple #9
0
 def setUp(self):
     self.bin = Bucket('test')
     c1 = Coin(1, 1)
     c2 = Coin(2, 2)
     c3 = Coin(3, 3)
     self.bin.add_coin(c1, 7)
     self.bin.add_coin(c2, 8)
     self.bin.add_coin(c3, 9)
    def test_bucket_sync_too_fast(self):
        myBucket = Bucket(20000, drain_period=86400)

        user = _randomBytes()
        self.assertTrue(myBucket.userCanSyncAmount(user, 10000, 1234))
        self.assertEqual(myBucket.currentSizeForUser(user, 1234), 10000)
        self.assertTrue(myBucket.userCanSyncAmount(user, 10000, 1235))
        self.assertFalse(myBucket.userCanSyncAmount(user, 10000, 1236))
Exemple #11
0
 def addNodeFromArray(self, arr):
     '''
     Takes as input an array and constructs a Tensor and Node around it,
     then adds the Node to this Network.
     '''
     t = Tensor(arr.shape, arr)
     return Node(t,
                 self,
                 Buckets=[Bucket(self) for _ in range(len(arr.shape))])
Exemple #12
0
def load_sync(path):
    """
    Loads image from S3
    :param string path: Path to load
    """
    bucket = os.environ.get('AWS_LOADER_BUCKET')
    region = os.environ.get('AWS_REGION', 'eu-west-1')
    bucket_loader = Bucket(bucket, region)

    return bucket_loader.get(path)
Exemple #13
0
def getBucketStats(bucketName, timeSlice):
    bucket_id = args.cluster + ":buckets:" + timeSlice
    rb = cb_bucket.get(bucket_id)
    if rb != '':
        for cur_bucket in rb.value:
            if cur_bucket["name"] == bucketName:
                bucket = Bucket(cur_bucket)
                docId = args.cluster + ":bucket:" + bucketName + ":stats:" + timeSlice
                rs = cb_bucket.get(docId)
                bucket.setStats(rs.value)
                return bucket
Exemple #14
0
 def __init__(self, *args, **kwargs):
   """"""
   
   self._n_bkts = kwargs.pop('n_bkts', None)
   super(Metabucket, self).__init__(*args, **kwargs)
   if self._n_bkts is None:
     self._n_bkts = super(Metabucket, self).n_bkts
   self._buckets = [Bucket(self._config, name='Sents-%d' % i) for i in xrange(self.n_bkts)]
   self._sizes = None
   self._data = None
   self._len2bkt = None
   return
Exemple #15
0
 def __init__(self, debug, samples, requiredAccQty, queueSize,
              tokenArrivalSpeed, bucketCapacity, generatorType, lamb, tOn,
              tOff):
     self.debug = debug
     self.samples = samples
     self.requiredAccQty = requiredAccQty
     self.bucket = Bucket(tokenArrivalSpeed, bucketCapacity, bucketCapacity)
     if generatorType == "poisson":
         self.generator = PoissonGenerator(lamb)
     elif generatorType == "onoff":
         self.generator = OnOffGenerator(lamb, tOn, tOff)
     self.queue = QueueWrapper(queueSize)
Exemple #16
0
def load_sync(path):
    """
    Loads image from S3
    :param string path: Path to load
    """
    bucket = os.environ.get('AWS_LOADER_BUCKET')
    region = os.environ.get('AWS_REGION', 'eu-west-1')
    accessKeyId = os.environ.get('AWS_ACCESS_KEY_ID')
    secretAccessKey = os.environ.get('AWS_SECRET_KEY_ID')
    bucket_loader = Bucket(bucket, region, accessKeyId, secretAccessKey)

    return bucket_loader.get(path)
Exemple #17
0
 def add_bucket(self):
     operation_time = time.time()
     self.buckets.append(
         Bucket(bucket_id=self.n_buckets,
                debug=self.debug,
                chrono=self.chrono,
                universe=self.universe,
                source=self.source,
                vectorizer=self.vectorizer,
                stemming=self.stemming))
     self.n_buckets += 1
     self.debug.log("\tFinished in: " + str(time.time() - operation_time))
Exemple #18
0
def register():
    error = None
    register_from = RegisterForm()

    if request.method == 'POST':
        # print(request.form)
        if register_from.validate_on_submit():
            # if True:
            ID = request.form.get('id')
            username = request.form.get('username')
            password = request.form.get('password1')
            password2 = request.form.get('password2')
            # f = form.photo.data

            filename = images.save(register_from.photo.data)

            print(ID, username, password, password2)
            # ID = request.form['ID']
            # username = request.form['username']
            # password = request.form['password']

            # f.save(f.filename)
            query = datastore_client.query(kind='user')

            for entity in query.fetch():
                # print('id',entity['id'], 'pd', entity['password'])
                if str(ID) == entity["id"]:
                    error = "The ID already exists"
                    return render_template('register.html',
                                           form=register_from,
                                           error=error)
                elif str(username) == entity['user_name']:
                    error = "The username already exists"
                    return render_template('register.html',
                                           form=register_from,
                                           error=error)

            temp_uploaded_url = Bucket(bucket_name,
                                       PATH_BASE + str(filename)).image_url
            store_user(ID, username, password, temp_uploaded_url)
            render_template('index.html', error=error)
        else:
            return render_template('register.html',
                                   form=register_from,
                                   error=error)

        return render_template('index.html')
    else:

        return render_template('register.html',
                               form=register_from,
                               error=error)
    def text_bucket_drain(self):
        myBucket = Bucket(20000, drain_period=86400)

        user = _randomBytes()
        self.assertTrue(myBucket.userCanSyncAmount(user, 20000, 1234))
        self.assertEqual(myBucket.currentSizeForUser(user, 1234), 20000)
        self.assertEqual(myBucket.currentSizeForUser(user, 1234 + 21600), 5000)
        self.assertEqual(myBucket.currentSizeForUser(user, 1234 + 43200),
                         10000)
        self.assertEqual(myBucket.currentSizeForUser(user, 1234 + 64800),
                         15000)
        self.assertEqual(myBucket.currentSizeForUser(user, 1234 + 86400),
                         20000)
    def create_bucket(self, bucket, zone=Zone.DEFAULT):
        """ Create a new bucket.

        Keyword arguments:
        bucket - The name of the bucket
        zone - The zone at which bucket and its objects will locate.
            (Default: follow the service-side rule)
        """
        headers = {"Location": zone}
        response = self.make_request("PUT", bucket, headers=headers)
        if response.status in [200, 201]:
            return Bucket(self, bucket)
        else:
            raise get_response_error(response)
    def __init__(self, filename, vocabs, *args, **kwargs):

        super(Dataset, self).__init__(*args, **kwargs)
        self._train = (filename == self.train_file)
        self.vocabs = vocabs
        self.buckets = [
            Bucket(self._config, name='Sents-%d' % i)
            for i in range(self.n_bkts)
        ]
        self.id2position = []
        self.len2bkts = {}
        self.vocabs = vocabs
        self.reading_dataset(filename)
        self._finalize()
Exemple #22
0
    def addDim(self):
        # TODO: Should probably implement self-factoring so that memory usage
        # doesn't become an issue.

        self._dim += 1

        arr = kroneckerDelta(self._dim, self._length)
        tens = Tensor(arr.shape, arr)
        self._tensor = tens

        if self._parent is not None:
            self._parent.delete()

        self._buckets.append(Bucket(self._network))
        self._buckets[-1].addNode(self)
    def test_bucket_clean(self):
        myBucket = Bucket(20000, drain_period=86400)

        user1 = _randomBytes()
        user2 = _randomBytes()
        user3 = _randomBytes()

        self.assertTrue(myBucket.userCanSyncAmount(user1, 10000, 0))
        self.assertTrue(myBucket.userCanSyncAmount(user2, 15000, 0))
        self.assertTrue(myBucket.userCanSyncAmount(user3, 20000, 0))

        myBucket.cleanUsers(50000)
        self.assertEqual(myBucket.userCount(), 2)
        self.assertTrue(myBucket.userCanSyncAmount(user1, 20000, 50000))
        self.assertFalse(myBucket.userCanSyncAmount(user2, 20000, 50000))
        self.assertFalse(myBucket.userCanSyncAmount(user3, 15000, 50000))
 def collectItemAndChange(self):
     """collect item and change after buy in bucket"""
     bucket = Bucket()
     if self._collectItem():
         self._totalSales = self._totalSales + self._currentItem.getPrice()
         bucket.setItem(self._currentItem)
         returnedChanges = self._collectChange()
         bucket.setChanges(returnedChanges)
     else:
         bucket.setItem(None)
         refund_changes = self._refund()
         bucket.setChanges(refund_changes)
     self._currentBalance = 0
     self._currentItem = None
     self._currentlyInsertedCoins = []
     return bucket
Exemple #25
0
def __generate_users():
    base_id = 's3803990'
    base_name = 'Qixiang_Cheng'

    base_image_post_name = '.jpg'

    # image_url = bucket(bucket_name, source_file_name).image_url

    for i in range(10):
        source_file_name = PATH_BASE + str(i) + base_image_post_name

        temp_list = []
        temp_list.append(base_id + str(i))
        temp_list.append(base_name + str(i))
        temp_list.append(__random_password())
        temp_list.append(Bucket(bucket_name, source_file_name).image_url)
        user_dict[i] = temp_list
Exemple #26
0
    def dictToBucket(client):
        eMin = client["eMin"]
        eMax = client["eMax"]
        pMin = client["pMin"]
        pMax = client["pMax"]
        e = client["e"]
        p = client["p"]
        CID = client["CID"]
        return Bucket(eMin, eMax, pMin, pMax, e, p, CID)


# pBucketsAvailable = 0.0
# for unit in unitGroups['Bucket']:
#     pBucketsAvailable += unit.pAvailable()

# totalPAvailable = pDispatch + pBucketsAvailable
# p = min(unit.pForced, totalPAvailable)
# totalPAvailable -= p
    def test_bucket_sync_multiple(self):
        myBucket = Bucket(20000, drain_period=86400)

        user1 = _randomBytes()
        user2 = _randomBytes()
        user3 = _randomBytes()

        self.assertTrue(myBucket.userCanSyncAmount(user1, 10000, 0))
        self.assertTrue(myBucket.userCanSyncAmount(user2, 15000, 0))
        self.assertTrue(myBucket.userCanSyncAmount(user3, 20000, 0))

        self.assertTrue(myBucket.userCanSyncAmount(user1, 10000, 1000))
        self.assertFalse(myBucket.userCanSyncAmount(user2, 15000, 10000))
        self.assertTrue(myBucket.userCanSyncAmount(user3, 20000, 100000))

        self.assertFalse(myBucket.userCanSyncAmount(user1, 10000, 10000))
        self.assertTrue(myBucket.userCanSyncAmount(user2, 1000, 20000))
        self.assertFalse(myBucket.userCanSyncAmount(user3, 20000, 110000))
    def __init__(self,
                 water_level_setpoint,
                 time_interval=0.1,
                 maximum_flowrate=10,
                 initial_water_amount=0,
                 radius=15):
        '''
		maximum_flowrate in litre/minute
		time_interval in seconds
		radius and water_level_setpoint are in cm

		'''
        self.bucket = Bucket(radius=self.cm_to_m(radius),
                             initial_water_amount=initial_water_amount)
        self.controller = Controller(
            set_point=self.cm_to_m(water_level_setpoint),
            water_level=self.bucket.water_level,
            maximum_flowrate=self.lpm_to_m3ps(maximum_flowrate))
        self.time_interval = time_interval
Exemple #29
0
def forum_save(filename, subject, message, name):
    temp_url = Bucket(bucket_name, PATH_BASE + str(filename)).image_url
    kind1 = 'user'

    parent_key = datastore_client.key(kind1, session['id'])
    task_key = datastore_client.key('message', parent=parent_key)

    task = datastore.Entity(task_key)
    dt = datetime.datetime.now()

    task.update({
        'name': name,
        'subject': subject,
        'message': message,
        'url': temp_url,
        'timestamp': dt
    })

    datastore_client.put(task)
Exemple #30
0
def forum_update(id, filename, subject, message, name, timestamp):
    temp_url = Bucket(bucket_name, PATH_BASE + str(filename)).image_url

    complete_key = datastore_client.key("message", int(id))

    task = datastore.Entity(key=complete_key)

    task.update({
        "message": message,
        "name": name,
        "subject": subject,
        "timestamp": timestamp,
        'url': temp_url
    })

    datastore_client.put(task)

    delete_key = datastore_client.key("message", int(id))
    datastore_client.delete(delete_key)