def update(self): """Recalculate properties of month when items are modified/deleted""" self.spend = sum(item.price for item in self.items) self.average = self.spend * 1.0 / self.number_of_people self.put() money_usages = ndb.get_multi(self.money_usages) for mu in money_usages: mu.update(self) ndb.sleep(0.1)
def new_month(cls, people_key_strings): """ End current month, create and return a new month :return new month """ people_keys = [ ndb.Key(urlsafe=url_string) for url_string in people_key_strings ] people = ndb.get_multi(people_keys) # get last month prev_month = cls.end_month() # new month new_month = Month(prev_month=prev_month.key if prev_month else None, next_month=None, people=people_keys, money_usages=[], items=[]) new_month.put() # end prev month if prev_month: prev_month.next_month = new_month.key prev_month.put() # create money usages money_usages = [] for person_key, person in zip(people_keys, people): lml = person.get_last_month_left() money_usage = MoneyUsage(person=person_key, money_to_pay=-lml, last_month_left=lml, month=new_month.key) money_usages.append(money_usage) ndb.put_multi(money_usages) money_usages = [money_usage.key for money_usage in money_usages] new_month.money_usages = money_usages new_month.put() # update last money usage of people # and update next_money_usage for person, money_usage in zip(people, money_usages): if person.last_money_usage: lmu = person.last_money_usage.get() lmu.next_money_usage = money_usage lmu.put() person.last_money_usage = money_usage ndb.put_multi(people) ndb.sleep(0.7) return new_month
def edit_item(self): try: item = int(self.request.get("item")) month_key = self.request.get("key") month = Key(urlsafe=month_key).get() # date raw_date = self.request.get("date") date = datetime.strptime(raw_date, "%Y-%m-%d") # buyer buyer = Key(urlsafe=self.request.get("buyer")) if buyer not in month.people: raise ValueError # what what = self.request.get("what") if len(what) == 0: raise ValueError # price price = self.request.get("price") try: price = eval(price) if price <= 0: raise ValueError except Exception: self.response.status = 409 self.write("Invalid price field") # write changes to item & sort month.items month.items[item].date = date month.items[item].buyer = buyer month.items[item].what = what month.items[item].price = price month.items.sort(key=lambda x: x.date, reverse=True) # save & response month.put() ndb.sleep(0.1) month.update() self.write(month_key) except Exception as e: print(e) self.response.status = 409 self.write("One of item field is invalid.")
def add_item(self): """add an item to database""" # --------------- get info ---------------- price = self.request.get("price") what = self.request.get("what") buyer = self.request.get("buyer") month = Month.get_current_month() # ---------------- validate --------------- errors = [] # check if any month exist if month is None: errors.append("Please create a month before adding items") # check for empty fields if None in [price, what, buyer]: errors.append("Please fill all information") # check empty str of what if what == "": errors.append("Please enter what have been bought") # check if buyer exists try: buyer = Key(urlsafe=buyer) if buyer not in month.people: raise ValueError except Exception: errors.append("Invalid buyer") # evaluate price try: if not re.match("^[0-9 \-+*/()]+$", price): raise SyntaxError price = eval(price) if price <= 0 or not isinstance(price, int): raise ValueError except (SyntaxError, ZeroDivisionError): errors.append("Invalid arithmetic expression in field price") except ValueError: errors.append("Price must be a positive integer") if len(errors) > 0: self.response.status = 409 self.write(";".join(errors)) return # ------------- put to database ---------------- item = Item(buyer=buyer, price=price, what=what) month.items.append(item) month.update() ndb.sleep(0.5)
def gcs_read_async(path): """ Asynchronously reads a file from GCS. NOTE: for large files (>10MB), this may return a truncated response due to urlfetch API limits. We don't want to read large files anyways, so this is fine. Args: path: the location of the object to read Returns: a Future that resolve to the file's data, or None if an error occurred. """ context = ndb.get_context() url = GCS_API_URL + path headers = {'accept-encoding': 'gzip, *', 'x-goog-api-version': '2'} for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2 ** retry) continue if status in (200, 206): raise ndb.Return(result.content) logging.error("unable to fetch '%s': status code %d" % (url, status)) raise ndb.Return(None)
def acquire(self, timeout=None): ctx = ndb.get_context() locked = False while not locked: value = yield ctx.memcache_get(self._key, for_cas=True) if value is None: locked = yield ctx.memcache_add( self._key, self._value, self.DEADLINE, ) elif not isinstance(value, int): raise TimeoutError elif value > 0: locked = yield ctx.memcache_cas( self._key, value - 1, self.DEADLINE, ) if locked: raise ndb.Return(self) if timeout is not None: if timeout <= 0: raise TimeoutError timeout -= self.SLEEP yield ndb.sleep(self.SLEEP)
def get(url): context = ndb.get_context() headers = { 'accept-encoding': 'gzip, *', 'x-goog-api-version': '2', } url_result = urlparse.urlparse(url) if url_result.netloc.endswith('.googleapis.com'): auth_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/cloud-platform') if auth_token: headers['Authorization'] = 'OAuth %s' % auth_token for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2**retry) continue if status in (200, 206): content = result.content if result.headers.get('content-encoding') == 'gzip': dec = zlib.decompressobj(15 | 16) content = dec.decompress(result.content, MAX_SIZE) if dec.unconsumed_tail: logging.warning( 'only decompressed %d KB, %d KB remain in buffer.', len(content) / 1024, len(dec.unconsumed_tail) / 1024) raise ndb.Return(content) logging.error("unable to fetch '%s': status code %d", url, status) raise ndb.Return(None)
def get(url): context = ndb.get_context() headers = { 'accept-encoding': 'gzip, *', 'x-goog-api-version': '2', } url_result = urlparse.urlparse(url) if url_result.netloc.endswith('.googleapis.com'): auth_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/cloud-platform') if auth_token: headers['Authorization'] = 'OAuth %s' % auth_token for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2 ** retry) continue if status in (200, 206): content = result.content if result.headers.get('content-encoding') == 'gzip': dec = zlib.decompressobj(15 | 16) content = dec.decompress(result.content, MAX_SIZE) if dec.unconsumed_tail: logging.warning('only decompressed %d KB, %d KB remain in buffer.', len(content) / 1024, len(dec.unconsumed_tail) / 1024) raise ndb.Return(content) logging.error("unable to fetch '%s': status code %d", url, status) raise ndb.Return(None)
def AcquireAsync(self, blocking=True, max_acquire_attempts=None, timeout=_USE_DEFAULT_TIMEOUT): """Acquires a lock asynchronously, blocking or non-blocking. If non-blocking, a single attempt will be made to acquire the lock; otherwise, max_acquire_attempts will be made. Args: blocking: Whether to block waiting for the lock. max_acquire_attempts: Maximum number of attempts to make in order to acquire the lock if blocking. If None, default_max_acquire_attempts will be used instead. timeout: Optional timeout for the lock in seconds, after which it will be assumed to be free (even if never explicitly released). Defaults to the timeout value set during initialization. If this value is set to None and the lock holder dies before releasing the lock, it will be in a perpetual acquired state. Returns: True if the lock was acquired, or False if the lock was not acquired and blocking=False. Raises: AcquireLockError: If the lock is already acquired via this lock object, or if max_acquire_attempts is exceeded. ValueError: If max_acquire_attempts < 1. """ if self._acquired: raise AcquireLockError(u'Lock already acquired') if max_acquire_attempts is None: max_acquire_attempts = self.default_max_acquire_attempts if max_acquire_attempts < 1: raise ValueError(u'max_acquire_attempts must be >= 1') if timeout is _USE_DEFAULT_TIMEOUT: timeout = self.default_timeout self._lock_id = str(uuid.uuid4()) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) elif not blocking: raise ndb.Return(False) intervals = [ _INITIAL_DELAY + i for i in xrange(max_acquire_attempts - 1) ] for sleep_time in intervals: yield ndb.sleep(sleep_time) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) raise AcquireLockError( u'Failed to acquire lock [{}] after {} tries.'.format( self._id, max_acquire_attempts))
def AcquireAsync(self, blocking=True, max_acquire_attempts=None, timeout=_USE_DEFAULT_TIMEOUT): """Acquires a lock asynchronously, blocking or non-blocking. If non-blocking, a single attempt will be made to acquire the lock; otherwise, max_acquire_attempts will be made. Args: blocking: Whether to block waiting for the lock. max_acquire_attempts: Maximum number of attempts to make in order to acquire the lock if blocking. If None, default_max_acquire_attempts will be used instead. timeout: Optional timeout for the lock in seconds, after which it will be assumed to be free (even if never explicitly released). Defaults to the timeout value set during initialization. If this value is set to None and the lock holder dies before releasing the lock, it will be in a perpetual acquired state. Returns: True if the lock was acquired, or False if the lock was not acquired and blocking=False. Raises: AcquireLockError: If the lock is already acquired via this lock object, or if max_acquire_attempts is exceeded. ValueError: If max_acquire_attempts < 1. """ if self._acquired: raise AcquireLockError(u'Lock already acquired') if max_acquire_attempts is None: max_acquire_attempts = self.default_max_acquire_attempts if max_acquire_attempts < 1: raise ValueError(u'max_acquire_attempts must be >= 1') if timeout is _USE_DEFAULT_TIMEOUT: timeout = self.default_timeout self._lock_id = str(uuid.uuid4()) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) elif not blocking: raise ndb.Return(False) intervals = retry.FuzzedExponentialIntervals(_INITIAL_DELAY, max_acquire_attempts - 1) for sleep_time in intervals: yield ndb.sleep(sleep_time) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) raise AcquireLockError( u'Failed to acquire lock [{}] after {} tries.'.format( self._id, max_acquire_attempts))
def wait(self, timeout=None): while not (yield self.is_set()): if timeout is not None: if timeout <= 0: raise TimeoutError timeout -= self.SLEEP yield ndb.sleep(self.SLEEP) raise ndb.Return(self)
def AcquireAsync(self, blocking=True, max_acquire_attempts=_MAX_ACQUIRE_ATTEMPTS, timeout=60): """Acquires a lock asynchronously, blocking or non-blocking. If non-blocking, a single attempt will be made to acquire the lock; otherwise, max_acquire_attempts will be made. Args: blocking: Whether to block waiting for the lock. max_acquire_attempts: Maximum number of attempts to make in order to acquire the lock if blocking. timeout: Optional timeout for the lock in seconds, after which it will be assumed to be free (even if never explicitly released). Defaults to 60. If this value is set to None and the lock holder dies before releasing the lock, it will be in a perpetual acquired state. Returns: True if the lock was acquired, or False if the lock was not acquired and blocking=False. Raises: AcquireLockError: If the lock is already acquired via this lock object, or if max_acquire_attempts is exceeded. ValueError: If max_acquire_attempts < 1. """ if self._acquired: raise AcquireLockError(u'Lock already acquired') if max_acquire_attempts < 1: raise ValueError(u'max_acquire_attempts must be >= 1') self._lock_id = str(uuid.uuid4()) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) elif not blocking: raise ndb.Return(False) intervals = retry.FuzzedExponentialIntervals(_INITIAL_DELAY, max_acquire_attempts - 1) for sleep_time in intervals: yield ndb.sleep(sleep_time) self._acquired = yield self._AcquireAsync(timeout) if self._acquired: raise ndb.Return(True) raise AcquireLockError( u'Failed to acquire lock [{}] after {} tries.'.format( self._id, max_acquire_attempts))
def get(url): context = ndb.get_context() headers = {'accept-encoding': 'gzip, *', 'x-goog-api-version': '2'} for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2**retry) continue if status in (200, 206): content = result.content if result.headers.get('content-encoding') == 'gzip': content = zlib.decompress(result.content, 15 | 16) raise ndb.Return(content) logging.error("unable to fetch '%s': status code %d", url, status) raise ndb.Return(None)
def get(url): context = ndb.get_context() headers = {'accept-encoding': 'gzip, *', 'x-goog-api-version': '2'} for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2 ** retry) continue if status in (200, 206): content = result.content if result.headers.get('content-encoding') == 'gzip': content = zlib.decompress(result.content, 15 | 16) raise ndb.Return(content) logging.error("unable to fetch '%s': status code %d" % (url, status)) raise ndb.Return(None)
def f(): ndb.sleep(0).wait() # Yield thread. return 1
def fn_async(x): log.append('%d started' % x) yield ndb.sleep(0.01) log.append('%d finishing' % x) raise ndb.Return(x + 10)
def fn_async(x): yield ndb.sleep(float(x) / 1000) raise ndb.Return(x)
def compute_async(x): log.append('compute_async(%r) started' % x) yield ndb.sleep(0.001) log.append('compute_async(%r) finishing' % x) raise ndb.Return(x)
def fn_async(x): log.append('%d started' % x) yield ndb.sleep(float(x) / 1000) log.append('%d finishing' % x) raise ndb.Return(x)
def convert(file_name): # open file try: print "Opening file" f = open(file_name, "rb") # unpickle try: print "Unpickling" buyers = pickle.load(f) months = pickle.load(f) goods = pickle.load(f) # money_usages = pickle.load(f) except pickle.PickleError: print "ERROR occurred when unpickling" raise except IOError: print "Error opening file" raise finally: f.close() print "Start converting" # add all people print "Creating {} people".format(len(buyers)) people = [Person(name=buyer.name) for buyer in buyers] ndb.put_multi(people) # must be put so that they have keys # utilities for later parts buyer_id = [buyer.id for buyer in buyers] # old id person_key_urlsafe = [person.key.urlsafe() for person in people] # new key person_id_to_key_urlsafe = dict(zip( buyer_id, person_key_urlsafe)) # translate old id to new key # add month by month, from old to new print "Start creating months" for month in months: print "\nMonth: " + datetime.ctime(month.time_begin) # create new month param = { "action": "new", "people": person_key_urlsafe # add everyone to this month } status = send_request("/newmonth", param) # check if month is created if status != 302: raise Exception("Cannot create new month") current_month_key = Month.get_current_month_key() # get goods in this month goods_in_month = filter(lambda x: x.month_id == month.id, goods) # add items to month print "Adding {} items to this month".format(len(goods_in_month)) for good in goods_in_month: # new item param = { "action": "add", "price": str(good.price), "what": good.what, "buyer": person_id_to_key_urlsafe[good.buyer] } status = send_request("/month/" + current_month_key.urlsafe(), param) # check if item is added if status != 200: raise Exception("Cannot add item") print "+", # change datetime of month print "\nChanging dates of month" current_month = current_month_key.get() current_month.time_begin = month.time_begin current_month.time_end = month.time_end # change datetime of items in month print "Changing dates of items" for item, good in zip(current_month.items, goods_in_month): item.date = good.date print "+", # update month current_month.put() ndb.sleep(0.5) print "\nFinished month"