def __init__(self, *, cfg, visible_feeds=None): self.cfg = copy.deepcopy(cfg) from .feeds import DEFAULT_VISIBLE_FEEDS self.visible_feeds = list(visible_feeds or DEFAULT_VISIBLE_FEEDS) # FIXME: deprecate self.feed_period try: self.feed_period = int(cfg['publish_strategy']['time_interval'] / cfg['check_time_interval']) except KeyError: self.feed_period = None self.check_time_interval = pendulum.interval( seconds=cfg.get('check_time_interval', 600)) try: self.publish_time_interval = pendulum.interval( seconds=cfg['publish_strategy']['time_interval']) except KeyError: self.publish_time_interval = None self.feed_slot = cfg.get('publish_strategy', {}).get('time_slot', None) if self.feed_slot is not None: self.feed_slot = int(self.feed_slot) self.nfeed_checked = 0 self.last_published = pendulum.utcnow().subtract(days=1) log.debug('successfully initialized {}'.format(self))
def __init__(self, *, cfg, visible_feeds=None): self.cfg = copy.deepcopy(cfg) from .feeds import DEFAULT_VISIBLE_FEEDS self.visible_feeds = list(visible_feeds or DEFAULT_VISIBLE_FEEDS) # FIXME: deprecate self.feed_period try: self.feed_period = int(cfg['publish_strategy']['time_interval'] / cfg['check_time_interval']) except KeyError: self.feed_period = None self.check_time_interval = pendulum.interval(seconds=cfg.get('check_time_interval', 600)) try: self.publish_time_interval = pendulum.interval(seconds=cfg['publish_strategy']['time_interval']) except KeyError: self.publish_time_interval = None self.feed_slot = cfg.get('publish_strategy', {}).get('time_slot', None) if self.feed_slot is not None: self.feed_slot = int(self.feed_slot) self.nfeed_checked = 0 self.last_published = pendulum.utcnow().subtract(days=1) log.debug('successfully initialized {}'.format(self))
def should_publish(self): # TODO: update according to: https://bitsharestalk.org/index.php?topic=9348.0;all #return False if self.nfeed_checked == 0: log.debug('Should publish at least once at launch of the bts_tools') return True if self.feed_period is not None and self.nfeed_checked % self.feed_period == 0: log.debug('Should publish because time interval has passed: {} seconds'.format(self.publish_time_interval)) return True now = pendulum.utcnow() if self.publish_time_interval and now - self.last_published > self.publish_time_interval: log.debug('Should publish because time interval has passed: {}'.format(self.publish_time_interval)) return True if self.feed_slot: target = now.replace(minute=self.feed_slot, second=0, microsecond=0) targets = [target.subtract(hours=1), target, target.add(hours=1)] diff = [now-t for t in targets] # check if we just passed our time slot if any(pendulum.interval() < d and abs(d) < 1.1*self.check_time_interval for d in diff): log.debug('Should publish because time slot has arrived: time {:02d}:{:02d}'.format(now.hour, now.minute)) return True log.debug('No need to publish feeds') return False
def should_publish_steem(self, node, price): # check whether we need to publish again: # - if published more than 12 hours ago, publish again # - if published price different by more than 3%, publish again if 'last_price' not in node.opts: # make sure we have already published once log.debug( 'Steem should publish for the first time since launch of bts_tools' ) return True last_published_interval = pendulum.interval(hours=12) variance_trigger = 0.03 if pendulum.utcnow( ) - node.opts['last_published'] > last_published_interval: log.debug( 'Steem should publish as it has not been published for {}'. format(last_published_interval)) return True if abs(price - node.opts['last_price'] ) / node.opts['last_price'] >= variance_trigger: log.debug( 'Steem should publish as price has moved more than {}%'.format( 100 * variance_trigger)) return True log.debug('No need for Steem to publish') return False
def parse_interval(x): count, unit = x.split(None, 1) count = int(count) if not unit.endswith('s'): unit = '{}s'.format(unit) spec = {unit: count} return pendulum.interval(**spec)
def cast_to_python(self, value): if value is not None: if self.input_type == self._type_interval: value = pendulum.interval(milliseconds=int(value)) else: value = pendulum.parse(value) return value
def start_event_loop(self): logger.info("Started loop.") while self.now <= self.end: index = self.__time_as_index(self.now) if 0 <= index < len(self.events): while len(self.events[index]) > 0: self.events[index].pop(0)(self.now) # noinspection PyArgumentList self.now += pendulum.interval(seconds=1)
def __init__(self, conf): self.now = get_time(conf, 'start', pendulum.now()).start_of('second') self.start_index = self.now.int_timestamp # noinspection PyArgumentList runtime = get_interval(conf, 'runtime', pendulum.interval(days=1)) self.end = (self.now + runtime).start_of('second') logger.info("Created stepper engine from {start} to {end}.".format( start=self.now, end=self.end)) self.events = []
def __init__(self, conf, engine, client): super(Delay, self).__init__(conf, engine, client) self.engine = engine self.client = client delay = get_interval(conf, 'delay', pendulum.interval()) device_conf = conf['device'] logger.info("Delaying creation of device {conf} until {delay}.".format( conf=conf, delay=delay)) self.engine.register_event_in(self.__build_create(device_conf), delay, None, self)
def __init__(self, *, cfg, visible_feeds=DEFAULT_VISIBLE_FEEDS): self.cfg = dict(cfg) self.visible_feeds = list(visible_feeds) # FIXME: deprecate self.feed_period try: self.feed_period = int(cfg['publish_time_interval'] / cfg['check_time_interval']) except KeyError: self.feed_period = None self.check_time_interval = pendulum.interval(seconds=cfg.get('check_time_interval', 300)) self.publish_time_interval = None if 'publish_time_interval' in cfg: self.publish_time_interval = pendulum.interval(seconds=cfg['publish_time_interval']) self.feed_slot = cfg.get('publish_time_slot', None) if self.feed_slot is not None: self.feed_slot = int(self.feed_slot) self.nfeed_checked = 0 self.last_published = pendulum.utcnow().subtract(days=1)
def measure_light(self, time): if self.battery > 0: self.light = sun_bright(time.int_timestamp, (self.longitude, self.latitude)) # sun_bright(synth.simulation.sim.get_time(), # (float(Device.get_property(self, "longitude")), # float(Device.get_property(self, "latitude"))) # )) self.client.update_device(self.id, time, {'light': self.light}) # noinspection PyArgumentList self.engine.register_event_in(self.measure_light, pendulum.interval(hours=1), None, self)
def seconds_to_hours_and_minutes(seconds): ceil_seconds = math.ceil(seconds / 60) * 60 interval = pendulum.interval(seconds=ceil_seconds) if interval.total_hours() > 24: hours = math.floor(interval.total_hours()) minutes = interval.minutes if minutes > 1: return '{} hours {} minutes'.format(hours, minutes) else: return '{} hours {} minute'.format(hours, minutes) return interval.__str__()
def set_connected(self, time): if self.use_rssi: self.is_connected = True current_reliability = 1.0 # TODO: synth.simulation.helpers.timewave.interp(self.commsReliability, rel_time) self.is_connected = (current_reliability * self.radio_strength) > random.random() else: # simple probability. self.is_connected = self.reliability > random.random() next = min( random.expovariate(1 / self.mean_up_down_period.total_seconds()), self.mean_up_down_period.total_seconds() * 100 ) self.engine.register_event_in(self.set_connected, pendulum.interval(seconds=next), None, self)
def should_publish(self): # TODO: update according to: https://bitsharestalk.org/index.php?topic=9348.0;all #return False if self.nfeed_checked == 0: log.debug( 'Should publish at least once at launch of the bts_tools') return True if self.feed_period is not None and self.nfeed_checked % self.feed_period == 0: log.debug( 'Should publish because time interval has passed: {} seconds'. format(cfg['bts']['publish_time_interval'])) return True now = pendulum.utcnow() if self.publish_time_interval and now - self.last_published > self.publish_time_interval: log.debug( 'Should publish because time interval has passed: {}'.format( self.publish_time_interval)) return True if self.feed_slot: target = now.replace(minute=self.feed_slot, second=0, microsecond=0) targets = [target.subtract(hours=1), target, target.add(hours=1)] diff = [now - t for t in targets] # check if we just passed our time slot if any(pendulum.interval() < d and abs(d) < 1.1 * self.check_time_interval for d in diff): log.debug( 'Should publish because time slot has arrived: time {:02d}:{:02d}' .format(now.hour, now.minute)) return True # check_time_interval_minutes = cfg['check_time_interval'] // 60 + 1 # if self.feed_slot is not None: # start_slot = self.feed_slot # end_slot = self.feed_slot + check_time_interval_minutes # if (((start_slot <= now.minute <= end_slot) or # (end_slot >= 60 and now.minute <= end_slot % 60)) and # now - self.last_published > timedelta(minutes=max(3*check_time_interval_minutes, 50))): # # log.debug('Should publish because time slot has arrived: time {:02d}:{:02d} - target'.format(now.hour, now.minute)) # return True log.debug('No need to publish feeds') return False
def press_button(self, time): if self.battery > 0: self.button_press_count += 1 self.client.update_device(self.id, time, {'buttonPress': self.button_press_count}) # noinspection PyArgumentList next_press_interval = pendulum.interval(hours=1) # TODO: timewave? # timewave # .next_usage_time # synth.simulation.sim.get_time(), # ["Mon", "Tue", "Wed", "Thu", "Fri"], "06:00-09:00" logger.info("{id}: Pressed button for the {nth} time.".format( id=self.id, nth=as_ordinal(self.button_press_count), )) self.engine.register_event_in(self.press_button, next_press_interval, None, self)
def get(cur, base): log.debug('checking feeds for %s/%s at %s' % (cur, base, NAME)) prices = [] for dataset in DATASETS[(cur, base)]: url = 'https://www.quandl.com/api/v3/datasets/{dataset}.json?start_date={date}'.format( dataset=dataset, date=(pendulum.utcnow() - pendulum.interval(days=3)).strftime('%Y-%m-%d')) data = requests.get(url=url, timeout=TIMEOUT).json() if 'dataset' not in data: raise RuntimeError('Quandl: no dataset found for url: %s' % url) d = data['dataset'] if len(d['data']): prices.append(d['data'][0][1]) return FeedPrice(sum(prices) / len(prices), cur, base)
def get(cur, base): log.debug('checking feeds for %s/%s at %s' % (cur, base, NAME)) prices = [] for dataset in DATASETS[(cur, base)]: url = 'https://www.quandl.com/api/v3/datasets/{dataset}.json?start_date={date}'.format( dataset=dataset, date=(pendulum.utcnow() - pendulum.interval(days=3)).strftime('%Y-%m-%d') ) data = requests.get(url=url, timeout=TIMEOUT).json() if 'dataset' not in data: raise RuntimeError('Quandl: no dataset found for url: %s' % url) d = data['dataset'] if len(d['data']): prices.append(d['data'][0][1]) return FeedPrice(sum(prices) / len(prices), cur, base)
def __init__(self, conf, engine, client): self.engine = engine self.client = client self.mean_up_down_period = pendulum.interval(days=1) if 'rssi' in conf and 'reliabilitySchedule' in conf: self.use_rssi = True # use a normalised radio signal strength heavily skewed to 'strong'. strong_rssi = -50.0 weak_rssi = -120.0 normalised_rssi = (conf['rssi'] - strong_rssi) / (weak_rssi - strong_rssi) self.radio_strength = 1.0 - math.pow((1.0 - normalised_rssi), 4) else: self.use_rssi = False self.reliability = conf.get('reliability', 1.0) self.is_connected = True self.set_connected(engine.get_now())
def should_publish_steem(self, node, price): # check whether we need to publish again: # - if published more than 12 hours ago, publish again # - if published price different by more than 3%, publish again if 'last_price' not in node.opts: # make sure we have already published once log.debug('Steem should publish for the first time since launch of bts_tools') return True last_published_interval = pendulum.interval(hours=12) variance_trigger = 0.03 if pendulum.utcnow() - node.opts['last_published'] > last_published_interval: # FIXME: replace node.opts['last_published'] with self.last_published[node] log.debug('Steem should publish as it has not been published for {}'.format(last_published_interval)) return True if abs(price - node.opts['last_price']) / node.opts['last_price'] >= variance_trigger: log.debug('Steem should publish as price has moved more than {}%'.format(100*variance_trigger)) return True log.debug('No need for Steem to publish') return False
def read_chunked( self, getter: Callable, params: Mapping[str, Any] = None, chunk_size: pendulum.Interval = pendulum.interval(days=1) ) -> Iterator: params = {**params} if params else {} now_ts = int(pendulum.now().timestamp() * 1000) start_ts = int(self._start_date.timestamp() * 1000) chunk_size = int(chunk_size.total_seconds() * 1000) for ts in range(start_ts, now_ts, chunk_size): end_ts = ts + chunk_size params["startTimestamp"] = ts params["endTimestamp"] = end_ts logger.info( f"Reading chunk from stream {self.name} between {pendulum.from_timestamp(ts / 1000)} and {pendulum.from_timestamp(end_ts / 1000)}" ) yield from super().read(getter, params)
def test_pickle(self): it = pendulum.interval(days=3, seconds=2456, microseconds=123456) s = pickle.dumps(it) it2 = pickle.loads(s) self.assertEqual(it, it2)
def __init__(self, conf, engine, client): super(Device, self).__init__() self.engine = engine self.client = client # generate identifiers self.id = "-".join([ format(random.randrange(0, 255), '02x') for i in range(6) ]) # i.e. a MAC address self.is_demo_device = conf.get('isDemoDevice', True) # identifier for later deletion. self.label = conf.get('name', 'Thing ' + self.id) self.firmware = random.choice( ["0.51", "0.52", "0.6", "0.6", "0.6", "0.7", "0.7", "0.7", "0.7"]) self.factory_firmware = self.firmware self.operator = random.choice( ["O2", "O2", "O2", "EE", "EE", "EE", "EE", "EE"]) # setup battery self.battery = 100 if 'batteryLifeMu' in conf and 'batteryLifeSigma' in conf: battery_life_mu = get_interval(conf, 'batteryLifeMu', None).total_seconds() battery_life_sigma = get_interval(conf, 'batteryLifeSigma', None).total_seconds() battery_life_min = battery_life_mu - (2 * battery_life_sigma) battery_life_max = battery_life_mu + (2 * battery_life_sigma) battery_life = random.normalvariate(battery_life_mu, battery_life_sigma) # noinspection PyArgumentList self.battery_life = pendulum.interval(seconds=max( min(battery_life, battery_life_min), battery_life_max)) else: # noinspection PyArgumentList self.battery_life = get_interval(conf, 'batteryLife', pendulum.interval(minutes=5)) self.battery_auto_replace = conf.get('batteryAutoReplace', False) self.engine.register_event_in(self.battery_decay, self.battery_life / 100, None, self) # setup button press counter self.button_press_count = 0 self.engine.register_event_in(self.press_button, pendulum.interval(), None, self) # setup light measurement self.longitude = conf.get('longitude', 0) self.latitude = conf.get('latitude', 0) self.light = 0.0 # noinspection PyArgumentList self.engine.register_event_in(self.measure_light, pendulum.interval(hours=12), None, self) self.client.add_device( self.id, engine.get_now(), { 'battery': self.battery, 'longitude': self.longitude, 'latitude': self.latitude, })
def setup_dummy_suppliers_with_old_and_new_domains(self, n): with self.app.app_context(): framework = Framework.query.filter_by(slug='digital-outcomes-and-specialists').first() self.set_framework_status(framework.slug, 'open') for i in range(1, n + 1): if i == 2: ps = PriceSchedule.from_json({ 'serviceRole': { 'category': 'Technical Architecture, Development, Ethical Hacking and Web Operations', 'role': 'Senior Ethical Hacker'}, 'hourlyRate': 999, 'dailyRate': 9999, 'gstIncluded': True }) prices = [ps] else: prices = [] NON_MATCHING_STRING = 'aaaaaaaaaaaaaaaaa' name = "Supplier {}".format(i - 1) summary = "suppliers of supplies" if name != 'Supplier 3' else NON_MATCHING_STRING name = name if name != 'Supplier 3' else NON_MATCHING_STRING t = pendulum.now('UTC') s = Supplier( code=i, name=name, abn='1', description="", summary=summary, data={ 'seller_type': {'sme': True, 'start_up': True} } if i == 2 else {'sme': True, 'start_up': False}, addresses=[ Address( address_line="{} Dummy Street".format(i), suburb="Dummy", state="ZZZ", postal_code="0000", country='Australia' ) ], contacts=[], references=[], prices=prices, last_update_time=t + pendulum.interval(seconds=(i % 3)) ) if i == 2: s.add_unassessed_domain('Data science') if i == 4: s.add_unassessed_domain('Content and Publishing') if i == 3: s.add_unassessed_domain('Content and Publishing') s.add_unassessed_domain('Data science') s.update_domain_assessment_status('Data science', 'assessed') p1 = Product(name='zzz {}'.format(i), summary='summary {}'.format(i)) p2 = Product(name='otherproduct {}'.format(i), summary='othersummary {}'.format(i)) s.products = [p1, p2] sf = SupplierFramework( supplier_code=s.code, framework_id=framework.id, declaration={} ) db.session.add(s) db.session.add(sf) ds = Supplier( name=u"Dummy Supplier", abn=Supplier.DUMMY_ABN, description="", summary="", addresses=[Address(address_line="{} Dummy Street".format(i), suburb="Dummy", state="ZZZ", postal_code="0000", country='Australia')], contacts=[], references=[], prices=prices, ) ds.add_unassessed_domain('Content and Publishing') ds.add_unassessed_domain('Data science') ds.update_domain_assessment_status('Data science', 'assessed') db.session.add(ds) sf = SupplierFramework( supplier_code=ds.code, framework_id=framework.id, declaration={} ) db.session.add(sf) db.session.commit()
async def test_organization_bootstrap_bad_data( backend_data_binder, apiv1_backend_sock_factory, organization_factory, local_device_factory, backend, coolorg, ): neworg = organization_factory("NewOrg") newalice = local_device_factory("alice@dev1", neworg) await backend_data_binder.bind_organization(neworg) bad_organization_id = coolorg.organization_id good_organization_id = neworg.organization_id root_signing_key = neworg.root_signing_key bad_root_signing_key = coolorg.root_signing_key good_bootstrap_token = neworg.bootstrap_token bad_bootstrap_token = coolorg.bootstrap_token good_rvk = neworg.root_verify_key bad_rvk = coolorg.root_verify_key good_device_id = newalice.device_id good_user_id = newalice.user_id bad_user_id = UserID("dummy") public_key = newalice.public_key verify_key = newalice.verify_key now = pendulum.now() bad_now = now - pendulum.interval(seconds=1) good_cu = UserCertificateContent( author=None, timestamp=now, user_id=good_user_id, public_key=public_key, profile=UserProfile.ADMIN, human_handle=newalice.human_handle, ) good_redacted_cu = good_cu.evolve(human_handle=None) good_cd = DeviceCertificateContent( author=None, timestamp=now, device_id=good_device_id, device_label=newalice.device_label, verify_key=verify_key, ) good_redacted_cd = good_cd.evolve(device_label=None) bad_now_cu = good_cu.evolve(timestamp=bad_now) bad_now_cd = good_cd.evolve(timestamp=bad_now) bad_now_redacted_cu = good_redacted_cu.evolve(timestamp=bad_now) bad_now_redacted_cd = good_redacted_cd.evolve(timestamp=bad_now) bad_id_cu = good_cu.evolve(user_id=bad_user_id) bad_not_admin_cu = good_cu.evolve(profile=UserProfile.STANDARD) bad_key_cu = good_cu.dump_and_sign(bad_root_signing_key) bad_key_cd = good_cd.dump_and_sign(bad_root_signing_key) good_cu = good_cu.dump_and_sign(root_signing_key) good_redacted_cu = good_redacted_cu.dump_and_sign(root_signing_key) good_cd = good_cd.dump_and_sign(root_signing_key) good_redacted_cd = good_redacted_cd.dump_and_sign(root_signing_key) bad_now_cu = bad_now_cu.dump_and_sign(root_signing_key) bad_now_cd = bad_now_cd.dump_and_sign(root_signing_key) bad_now_redacted_cu = bad_now_redacted_cu.dump_and_sign(root_signing_key) bad_now_redacted_cd = bad_now_redacted_cd.dump_and_sign(root_signing_key) bad_id_cu = bad_id_cu.dump_and_sign(root_signing_key) bad_not_admin_cu = bad_not_admin_cu.dump_and_sign(root_signing_key) for i, (status, organization_id, *params) in enumerate([ ("not_found", good_organization_id, bad_bootstrap_token, good_cu, good_cd, good_rvk), ( "already_bootstrapped", bad_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, good_cu, good_cd, bad_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, bad_now_cu, good_cd, good_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, bad_id_cu, good_cd, good_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, bad_key_cu, good_cd, good_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, bad_now_cd, good_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, good_cu, bad_key_cd, good_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, bad_not_admin_cu, good_cd, good_rvk, ), # Tests with redacted certificates ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, good_cu, # Not redacted ! good_redacted_cd, ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, good_redacted_cu, good_cd, # Not redacted ! ), ( "bad_message", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, None, # None not allowed good_redacted_cd, ), ( "bad_message", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, good_redacted_cu, None, # None not allowed ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, bad_now_redacted_cu, good_redacted_cd, ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, good_redacted_cu, bad_now_redacted_cd, ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, good_redacted_cu, _missing, # Must proved redacted_device if redacted user is present ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, good_cd, good_rvk, _missing, # Must proved redacted_device if redacted user is present good_redacted_cd, ), ]): print(f"sub test {i}") async with apiv1_backend_sock_factory(backend, organization_id) as sock: rep = await organization_bootstrap(sock, *params) assert rep["status"] == status # Finally cheap test to make sure our "good" data were really good async with apiv1_backend_sock_factory(backend, good_organization_id) as sock: rep = await organization_bootstrap( sock, good_bootstrap_token, good_cu, good_cd, good_rvk, good_redacted_cu, good_redacted_cd, ) assert rep["status"] == "ok"
def setup_dummy_suppliers_with_old_and_new_domains(self, n): with self.app.app_context(): framework = Framework.query.filter_by( slug='digital-outcomes-and-specialists').first() self.set_framework_status(framework.slug, 'open') for i in range(1, n + 1): if i == 2: ps = PriceSchedule.from_json({ 'serviceRole': { 'category': 'Technical Architecture, Development, Ethical Hacking and Web Operations', 'role': 'Senior Ethical Hacker' }, 'hourlyRate': 999, 'dailyRate': 9999, 'gstIncluded': True }) prices = [ps] else: prices = [] NON_MATCHING_STRING = 'aaaaaaaaaaaaaaaaa' name = "Supplier {}".format(i - 1) summary = "suppliers of supplies" if name != 'Supplier 3' else NON_MATCHING_STRING name = name if name != 'Supplier 3' else NON_MATCHING_STRING t = pendulum.now('UTC') s = Supplier( code=i, name=name, abn='1', description="", summary=summary, data={'seller_type': { 'sme': True, 'start_up': True }} if i == 2 else { 'sme': True, 'start_up': False }, addresses=[ Address(address_line="{} Dummy Street".format(i), suburb="Dummy", state="ZZZ", postal_code="0000", country='Australia') ], contacts=[], references=[], prices=prices, last_update_time=t + pendulum.interval(seconds=(i % 3))) if i == 2: s.add_unassessed_domain('Data science') if i == 4: s.add_unassessed_domain('Content and Publishing') if i == 3: s.add_unassessed_domain('Content and Publishing') s.add_unassessed_domain('Data science') s.update_domain_assessment_status('Data science', 'assessed') p1 = Product(name='zzz {}'.format(i), summary='summary {}'.format(i)) p2 = Product(name='otherproduct {}'.format(i), summary='othersummary {}'.format(i)) s.products = [p1, p2] sf = SupplierFramework(supplier_code=s.code, framework_id=framework.id, declaration={}) db.session.add(s) db.session.add(sf) ds = Supplier( name=u"Dummy Supplier", abn=Supplier.DUMMY_ABN, description="", summary="", addresses=[ Address(address_line="{} Dummy Street".format(i), suburb="Dummy", state="ZZZ", postal_code="0000", country='Australia') ], contacts=[], references=[], prices=prices, ) ds.add_unassessed_domain('Content and Publishing') ds.add_unassessed_domain('Data science') ds.update_domain_assessment_status('Data science', 'assessed') db.session.add(ds) sf = SupplierFramework(supplier_code=ds.code, framework_id=framework.id, declaration={}) db.session.add(sf) db.session.commit()
import datetime import pytz import pytest from swimlane.core.fields.datetime import DatetimeField from swimlane.exceptions import ValidationError UTC = pendulum.timezone('UTC') datetime_now = datetime.datetime.now(pytz.timezone('MST')) # Mongo drops portion of microsecond, field truncates automatically for consistency datetime_now = datetime_now.replace(microsecond=int(math.floor(datetime_now.microsecond / 1000) * 1000)) pendulum_now = pendulum.instance(datetime_now) pendulum_interval = pendulum.interval(minutes=5) @pytest.mark.parametrize('field_name,dt,expected_raw', [ ('Incident Closed', pendulum_now, DatetimeField.format_datetime(pendulum_now)), ( 'Date Field', pendulum_now, DatetimeField.format_datetime(pendulum.Pendulum(pendulum_now.year, pendulum_now.month, pendulum_now.day)) ), ('Time Field', pendulum_now, DatetimeField.format_datetime(pendulum_now)), ('Incident Duration', pendulum_interval, pendulum_interval.in_seconds() * 1000) ]) def test_raw_serialization(mock_record, field_name, dt, expected_raw): """Test that datetime field values are appropriately serialized to raw""" mock_record[field_name] = dt
async def test_organization_bootstrap_bad_data( backend_data_binder, backend_sock_factory, organization_factory, local_device_factory, backend, coolorg, alice, ): neworg = organization_factory("NewOrg") newalice = local_device_factory("alice@dev1", neworg) await backend_data_binder.bind_organization(neworg) bad_organization_id = coolorg.organization_id good_organization_id = neworg.organization_id root_signing_key = neworg.root_signing_key bad_root_signing_key = coolorg.root_signing_key good_bootstrap_token = neworg.bootstrap_token bad_bootstrap_token = coolorg.bootstrap_token good_rvk = neworg.root_verify_key bad_rvk = coolorg.root_verify_key good_device_id = newalice.device_id good_user_id = newalice.user_id bad_user_id = UserID("dummy") public_key = newalice.public_key verify_key = newalice.verify_key now = pendulum.now() good_cu = UserCertificateContent( author=None, timestamp=now, user_id=good_user_id, public_key=public_key, is_admin=False).dump_and_sign(root_signing_key) good_cd = DeviceCertificateContent( author=None, timestamp=now, device_id=good_device_id, verify_key=verify_key).dump_and_sign(root_signing_key) bad_now = now - pendulum.interval(seconds=1) bad_now_cu = UserCertificateContent( author=None, timestamp=bad_now, user_id=good_user_id, public_key=public_key, is_admin=False).dump_and_sign(root_signing_key) bad_now_cd = DeviceCertificateContent( author=None, timestamp=bad_now, device_id=good_device_id, verify_key=verify_key).dump_and_sign(root_signing_key) bad_id_cu = UserCertificateContent( author=None, timestamp=now, user_id=bad_user_id, public_key=public_key, is_admin=False).dump_and_sign(root_signing_key) bad_key_cu = UserCertificateContent( author=None, timestamp=now, user_id=good_user_id, public_key=public_key, is_admin=False).dump_and_sign(bad_root_signing_key) bad_key_cd = DeviceCertificateContent( author=None, timestamp=now, device_id=good_device_id, verify_key=verify_key).dump_and_sign(bad_root_signing_key) for i, (status, organization_id, *params) in enumerate([ ("not_found", good_organization_id, bad_bootstrap_token, good_cu, good_cd, good_rvk), ( "already_bootstrapped", bad_organization_id, bad_bootstrap_token, bad_key_cu, bad_key_cd, bad_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, good_cu, good_cd, bad_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, bad_now_cu, good_cd, good_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, bad_id_cu, good_cd, good_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, bad_key_cu, good_cd, good_rvk, ), ( "invalid_data", good_organization_id, good_bootstrap_token, good_cu, bad_now_cd, good_rvk, ), ( "invalid_certification", good_organization_id, good_bootstrap_token, good_cu, bad_key_cd, good_rvk, ), ]): async with backend_sock_factory(backend, organization_id) as sock: rep = await organization_bootstrap(sock, *params) assert rep["status"] == status # Finally cheap test to make sure our "good" data were really good async with backend_sock_factory(backend, good_organization_id) as sock: rep = await organization_bootstrap(sock, good_bootstrap_token, good_cu, good_cd, good_rvk) assert rep["status"] == "ok"