class DocumentDigestConfig(models.Model): template_name = 'document_digest' DAY_OF_WEEK_CHOICES = ((1, 'Monday'), (2, 'Tuesday'), (3, 'Wednesday'), (4, 'Thursday'), (5, 'Friday'), (6, 'Saturday'), (7, 'Sunday')) enabled = models.BooleanField(null=False, blank=False, default=False) document_type = models.ForeignKey(DocumentType, blank=False, null=False, on_delete=CASCADE) documents_filter = models.CharField(max_length=100, blank=False, null=False, choices=DOC_FILTER_CHOICES) still_send_if_no_docs = models.BooleanField(null=False, blank=False, default=False) message_if_no_docs = models.CharField( max_length=1024, null=True, blank=True, help_text='''Template of the message replacing the header and the document table in case the document filter returned no documents. Jinja2 syntax. Leave empty for using the default. Example: {0}'''. format(DocFilterLoadedDocuments.message_if_no_docs)) period = models.CharField(max_length=100, blank=True, null=True, choices=DIGEST_PERIOD_CHOICES) for_role = models.ForeignKey(Role, null=True, blank=True, on_delete=CASCADE) for_user = models.ForeignKey(User, null=True, blank=True, on_delete=CASCADE) subject = models.CharField(max_length=1024, null=True, blank=True, help_text='''Template of the email subject in Jinja2 syntax. Leave empty for using the default. Example: {0}'''.format( DocFilterLoadedDocuments.subject)) header = models.CharField(max_length=2048, null=True, blank=True, help_text='''Template of the header in Jinja2 syntax. Leave empty for using the default. Example: {0}'''. format(DocFilterLoadedDocuments.header)) generic_fields = JSONField( encoder=ImprovedDjangoJSONEncoder, default=document_digest_config_generic_fields_default) user_fields = models.ManyToManyField( DocumentField, blank=True, help_text='''Fields of the documents to render in the email. Should match the specified document type. Leave empty for rendering all fields. ''') run_at_month = models.CharField( null=True, blank=True, max_length=100, help_text='''One or more comma separated month numbers (1 - 12). Leave empty to run at every month. Example: 1,3,5.''') run_at_day_of_month = models.CharField( null=True, blank=True, max_length=100, help_text='''One or more comma separated day of month numbers (1 - 31). Leave empty to run at every day. Days missing in a particular month are ignored. Example: 1, 10, 20, 30''') run_at_day_of_week = models.CharField( null=True, blank=True, max_length=100, help_text='''One or more comma separated day of ISO week day numbers (1 Mon - 7 Sunday). Example: 1,3,5 (Monday, Wednesday, Friday)''' ) run_at_hour = models.CharField( null=True, blank=True, max_length=100, help_text='''One or more comma separated hours (0 - 23). Leave empty for running every hour. Example: 9''') run_at_minute = models.CharField( null=False, blank=False, max_length=2, default='0', help_text='''Minute of hour to run at (0 - 59). Should be used to shuffle server load. Example: 30''') def __str__(self): return '{document_type}: {documents_filter} for period {period} (#{pk})' \ .format(pk=self.pk, document_type=self.document_type.code, documents_filter=self.documents_filter, period=self.period)
class Media(models.Model): description = models.CharField(max_length=50) fileURL = JSONField()
class FindTheDifferences(Entertainment): pictureURL = models.TextField() differences = JSONField( ) # Should be list of x's and y's (Location of each difference)
class Person(models.Model): gender = models.CharField(max_length=1, choices=GENDER_CH) happy = models.BooleanField(default=True) unhappy = models.BooleanField(default=False) bipolar = models.BooleanField(default=False) name = models.CharField(max_length=30) nickname = models.SlugField(max_length=36) age = models.IntegerField() bio = models.TextField() birthday = models.DateField() birth_time = models.TimeField() appointment = models.DateTimeField() blog = models.URLField() occupation = models.CharField(max_length=10, choices=OCCUPATION_CHOCIES) try: uuid = models.UUIDField(primary_key=False) except AttributeError: # New at Django 1.9 pass try: name_hash = models.BinaryField(max_length=16) except AttributeError: # We can't test the binary field if it is not supported # (django < 1,6) pass try: from django.contrib.postgres.fields import ArrayField acquaintances = ArrayField(models.IntegerField()) except ImportError: # New at Django 1.9 pass try: from django.contrib.postgres.fields import JSONField data = JSONField() except ImportError: # New at Django 1.9 pass try: from django.contrib.postgres.fields import HStoreField hstore_data = HStoreField() except ImportError: # New at Django 1.8 pass # backward compatibility with Django 1.1 try: wanted_games_qtd = models.BigIntegerField() except AttributeError: wanted_games_qtd = models.IntegerField() try: duration_of_sleep = models.DurationField() except AttributeError: pass if MOMMY_GIS: geom = models.GeometryField() point = models.PointField() line_string = models.LineStringField() polygon = models.PolygonField() multi_point = models.MultiPointField() multi_line_string = models.MultiLineStringField() multi_polygon = models.MultiPolygonField() geom_collection = models.GeometryCollectionField()
class Dataset(Model): title = TextField() source = TextField() description = TextField() order = IntegerField(default=999) references = ArrayField(TextField(), null=True) stored_at = CharField(max_length=32, choices=DATASET_STORAGES, default='LOCAL_POSTGRES') table_name = CharField(max_length=200) private = BooleanField(default=False) spatialEast = CharField(max_length=200, null=True) spatialSouth = CharField(max_length=200, null=True) spatialNorth = CharField(max_length=200, null=True) spatialWest = CharField(max_length=200, null=True) temporalCoverageBegin = DateTimeField(null=True) temporalCoverageEnd = DateTimeField(null=True) license = CharField(max_length=200, null=True) observations = CharField(max_length=200, null=True) publisher = TextField() category = CharField(max_length=200, null=True) image_uri = TextField(default='/static/img/logo.png') sample_rows = JSONField(null=True) number_of_rows = CharField(max_length=200, null=True) size_in_gb = FloatField(null=True) update_frequency = CharField(max_length=200, default='-') last_updated = DateTimeField(null=True) owner = ForeignKey(User, related_name='dataset_owner', null=True, on_delete=CASCADE, default=None) metadata = JSONField(default={}) hascoverage_img = BooleanField(default=False) arguments = JSONField(default={}) joined_with_dataset = models.ManyToManyField("self",through = 'JoinOfDatasets', symmetrical=False, related_name='joined_to') organization = ForeignKey(Organization, related_name='datasets', null=True, default=None, on_delete=CASCADE) def __str__(self): return self.title class Meta: ordering = ['-id'] def to_json(self): return { '_id': str(self.pk), 'title': self.title, 'source': self.source, 'description': self.description, 'references': self.references, } @property def number_of_rows_formated(self): try: size = long(self.number_of_rows) reminder = 0 power = 1000 n = 0 Dic_powerN = {0: '', 1: 'thousand', 2: 'million', 3: 'billion'} while size > power: reminder = size % power size /= power n += 1 return str(float(int(size) + round(float(reminder*0.001), 1))) + " " + Dic_powerN[n] except: return 'undefined' @property def temporalCoverageBeginTimestamp(self): try: temporalCoverageBegin = self.temporalCoverageBegin # temporalCoverageBegin_timestamp = long(time.mktime(temporalCoverageBegin.timetuple())) * 1000 td = temporalCoverageBegin.replace(tzinfo=None) - datetime(1970, 1, 1) temporalCoverageBegin_timestamp = long(td.total_seconds()) * 1000 return temporalCoverageBegin_timestamp except: return '' @property def temporalCoverageEndTimestamp(self): try: temporalCoverageEnd = self.temporalCoverageEnd # temporalCoverageBegin_timestamp = long(time.mktime(temporalCoverageBegin.timetuple())) * 1000 td = temporalCoverageEnd.replace(tzinfo=None) - datetime(1970, 1, 1) temporalCoverageEnd_timestamp = long(td.total_seconds()) * 1000 return temporalCoverageEnd_timestamp except: return '' def __unicode__(self): return self.title access_list = ManyToManyField(User, through='DatasetAccess')
class AbstractContractDetailsICO(CommonDetails): class Meta: abstract = True sol_path = 'lastwill/contracts/contracts/ICO.sol' soft_cap = models.DecimalField(max_digits=MAX_WEI_DIGITS, decimal_places=0, null=True) hard_cap = models.DecimalField(max_digits=MAX_WEI_DIGITS, decimal_places=0, null=True) token_name = models.CharField(max_length=512) token_short_name = models.CharField(max_length=64) admin_address = models.CharField(max_length=50) is_transferable_at_once = models.BooleanField(default=False) start_date = models.IntegerField() stop_date = models.IntegerField() rate = models.DecimalField(max_digits=MAX_WEI_DIGITS, decimal_places=0, null=True) decimals = models.IntegerField() platform_as_admin = models.BooleanField(default=False) temp_directory = models.CharField(max_length=36) time_bonuses = JSONField(null=True, default=None) amount_bonuses = JSONField(null=True, default=None) continue_minting = models.BooleanField(default=False) cold_wallet_address = models.CharField(max_length=50, default='') allow_change_dates = models.BooleanField(default=False) whitelist = models.BooleanField(default=False) verification = models.BooleanField(default=False) verification_status = models.CharField(max_length=100, default='NOT_VERIFIED') verification_date_payment = models.DateField(null=True, default=None) eth_contract_token = models.ForeignKey(EthContract, null=True, default=None, related_name='ico_details_token', on_delete=models.SET_NULL) eth_contract_crowdsale = models.ForeignKey( EthContract, null=True, default=None, related_name='ico_details_crowdsale', on_delete=models.SET_NULL) reused_token = models.BooleanField(default=False) token_type = models.CharField(max_length=32, default='ERC20') min_wei = models.DecimalField(max_digits=MAX_WEI_DIGITS, decimal_places=0, default=None, null=True) max_wei = models.DecimalField(max_digits=MAX_WEI_DIGITS, decimal_places=0, default=None, null=True) def predeploy_validate(self): now = timezone.now() if self.start_date < now.timestamp(): raise ValidationError({'result': 1}, code=400) token_holders = self.contract.tokenholder_set.all() for th in token_holders: if th.freeze_date: if th.freeze_date < now.timestamp() + 600: raise ValidationError({'result': 2}, code=400) @classmethod def min_cost(cls): network = Network.objects.get(name='ETHEREUM_MAINNET') cost = cls.calc_cost({}, network) return cost @staticmethod def calc_cost(kwargs, network): if NETWORKS[network.name]['is_free']: return 0 price = CONTRACT_PRICE_USDT['ETH_ICO'] if 'verification' in kwargs and kwargs['verification']: price += VERIFICATION_PRICE_USDT return price * NET_DECIMALS['USDT'] def compile(self, eth_contract_attr_name='eth_contract_token'): print('ico_contract compile') if self.temp_directory: print('already compiled') return dest, preproc_config = create_directory(self) token_holders = self.contract.tokenholder_set.all() amount_bonuses = add_amount_bonuses(self) time_bonuses = add_time_bonuses(self) preproc_params = {'constants': {}} preproc_params['constants'] = add_token_params( preproc_params['constants'], self, token_holders, not self.is_transferable_at_once, self.continue_minting) preproc_params['constants'] = add_crowdsale_params( preproc_params['constants'], self, time_bonuses, amount_bonuses) if self.min_wei: preproc_params["constants"]["D_MIN_VALUE_WEI"] = str( int(self.min_wei)) if self.max_wei: preproc_params["constants"]["D_MAX_VALUE_WEI"] = str( int(self.max_wei)) test_crowdsale_params(preproc_config, preproc_params, dest) address = NETWORKS[self.contract.network.name]['address'] preproc_params = add_real_params(preproc_params, self.admin_address, address, self.cold_wallet_address) with open(preproc_config, 'w') as f: f.write(json.dumps(preproc_params)) if os.system( "/bin/bash -c 'cd {dest} && yarn compile-crowdsale'".format( dest=dest)): raise Exception('compiler error while deploying') with open(path.join(dest, 'build/contracts/TemplateCrowdsale.json'), 'rb') as f: crowdsale_json = json.loads(f.read().decode('utf-8-sig')) with open(path.join(dest, 'build/TemplateCrowdsale.sol'), 'rb') as f: source_code = f.read().decode('utf-8-sig') self.eth_contract_crowdsale = create_ethcontract_in_compile( crowdsale_json['abi'], crowdsale_json['bytecode'][2:], crowdsale_json['compiler']['version'], self.contract, source_code) if not self.reused_token: with open(path.join(dest, 'build/contracts/MainToken.json'), 'rb') as f: token_json = json.loads(f.read().decode('utf-8-sig')) with open(path.join(dest, 'build/MainToken.sol'), 'rb') as f: source_code = f.read().decode('utf-8-sig') self.eth_contract_token = create_ethcontract_in_compile( token_json['abi'], token_json['bytecode'][2:], token_json['compiler']['version'], self.contract, source_code) self.save() @blocking @postponable @check_transaction def msg_deployed(self, message): print('msg_deployed method of the ico contract') address = NETWORKS[self.contract.network.name]['address'] if self.contract.state != 'WAITING_FOR_DEPLOYMENT': take_off_blocking(self.contract.network.name) return if self.reused_token: self.contract.state = 'WAITING_ACTIVATION' self.contract.save() self.eth_contract_crowdsale.address = message['address'] self.eth_contract_crowdsale.save() take_off_blocking(self.contract.network.name) print('status changed to waiting activation') return if self.eth_contract_token.id == message['contractId']: self.eth_contract_token.address = message['address'] self.eth_contract_token.save() self.deploy(eth_contract_attr_name='eth_contract_crowdsale') else: self.eth_contract_crowdsale.address = message['address'] self.eth_contract_crowdsale.save() tr = abi.ContractTranslator(self.eth_contract_token.abi) eth_int = EthereumProvider().get_provider( network=self.contract.network.name) nonce = int(eth_int.eth_getTransactionCount(address, "pending"), 16) chain_id = int(eth_int.eth_chainId(), 16) gas_price_current = int(1.1 * int(eth_int.eth_gasPrice(), 16)) gas_price_fixed = ETH_COMMON_GAS_PRICES[ self.contract.network.name] * NET_DECIMALS['ETH_GAS_PRICE'] gas_price = gas_price_current if gas_price_current < gas_price_fixed else gas_price_fixed print('nonce', nonce) print('transferOwnership message signed') signed_data = sign_transaction( address, nonce, 1000000, dest=self.eth_contract_token.address, contract_data=binascii.hexlify( tr.encode_function_call( 'transferOwnership', [self.eth_contract_crowdsale.address])).decode(), gas_price=int(gas_price * 1.2), chain_id=chain_id) self.eth_contract_token.tx_hash = eth_int.eth_sendRawTransaction( signed_data) self.eth_contract_token.save() print('transferOwnership message sended') def get_gaslimit(self): return CONTRACT_GAS_LIMIT['ICO'] @blocking @postponable def deploy(self, eth_contract_attr_name='eth_contract_token', attempts=1): if self.reused_token: eth_contract_attr_name = 'eth_contract_crowdsale' return super().deploy(eth_contract_attr_name, attempts=attempts) def get_arguments(self, eth_contract_attr_name): return { 'eth_contract_token': [], 'eth_contract_crowdsale': [self.eth_contract_token.address], }[eth_contract_attr_name] # token @blocking @postponable # @check_transaction def ownershipTransferred(self, message): address = NETWORKS[self.contract.network.name]['address'] if self.contract.state in ('ACTIVE', 'ENDED'): take_off_blocking(self.contract.network.name) return if message['contractId'] != self.eth_contract_token.id: if self.contract.state == 'WAITING_FOR_DEPLOYMENT': take_off_blocking(self.contract.network.name) print('ignored', flush=True) return if self.contract.state == 'WAITING_ACTIVATION': self.contract.state = 'WAITING_FOR_DEPLOYMENT' self.contract.save() # continue deploy: call init tr = abi.ContractTranslator(self.eth_contract_crowdsale.abi) eth_int = EthereumProvider().get_provider( network=self.contract.network.name) nonce = int(eth_int.eth_getTransactionCount(address, "pending"), 16) chain_id = int(eth_int.eth_chainId(), 16) gas_limit = 100000 + 80000 * self.contract.tokenholder_set.all().count( ) gas_price = ETH_COMMON_GAS_PRICES[ self.contract.network.name] * NET_DECIMALS['ETH_GAS_PRICE'] print('nonce', nonce) print('init message signed') signed_data = sign_transaction( address, nonce, gas_limit, dest=self.eth_contract_crowdsale.address, contract_data=binascii.hexlify(tr.encode_function_call( 'init', [])).decode(), gas_price=int(gas_price * 1.2), chain_id=chain_id) self.eth_contract_crowdsale.tx_hash = eth_int.eth_sendRawTransaction( signed_data) self.eth_contract_crowdsale.save() print('init message sended') # crowdsale @postponable @check_transaction def initialized(self, message): if self.contract.state != 'WAITING_FOR_DEPLOYMENT': return take_off_blocking(self.contract.network.name) if message['contractId'] != self.eth_contract_crowdsale.id: print('ignored', flush=True) return self.contract.state = 'ACTIVE' self.contract.deployed_at = datetime.datetime.now() self.contract.save() if self.eth_contract_token.original_contract.contract_type == 5: self.eth_contract_token.original_contract.state = 'UNDER_CROWDSALE' self.eth_contract_token.original_contract.save() network_link = NETWORKS[self.contract.network.name]['link_address'] network_name = MAIL_NETWORK[self.contract.network.name] if self.contract.user.email: send_mail( ico_subject, ico_text.format( link1=network_link.format( address=self.eth_contract_token.address, ), link2=network_link.format( address=self.eth_contract_crowdsale.address), network_name=network_name), DEFAULT_FROM_EMAIL, [self.contract.user.email]) if not 'MAINNET' in self.contract.network.name: send_testnet_gift_emails.delay(self.contract.user.profile.id) else: send_promo_mainnet.delay(self.contract.user.email) if self.verification: send_verification_mail( network=self.contract.network.name, addresses=( self.eth_contract_token.address, self.eth_contract_crowdsale.address, ), compiler=self.eth_contract_token.compiler_version, files={ 'token.sol': self.eth_contract_token.source_code, 'ico.sol': self.eth_contract_crowdsale.source_code, }, ) self.verification_date_payment = datetime.datetime.now().date() self.verification_status = 'IN_PROCESS' self.save() msg = self.bot_message transaction.on_commit(lambda: send_message_to_subs.delay(msg, True)) def finalized(self, message): if not self.continue_minting and self.eth_contract_token.original_contract.state != 'ENDED': self.eth_contract_token.original_contract.state = 'ENDED' self.eth_contract_token.original_contract.save() if self.eth_contract_crowdsale.contract.state != 'ENDED': self.eth_contract_crowdsale.contract.state = 'ENDED' self.eth_contract_crowdsale.contract.save() def check_contract(self): pass def timesChanged(self, message): if 'startTime' in message and message['startTime']: self.start_date = message['startTime'] if 'endTime' in message and message['endTime']: self.stop_date = message['endTime'] self.save()
class BeltFishSESQLModel(BaseSQLModel): _se_fields = ", ".join( [f"beltfish_su.{f}" for f in BaseSQLModel.se_fields]) _su_aggfields_sql = BaseSQLModel.su_aggfields_sql sql = f""" WITH beltfish_su AS ( {BeltFishSUSQLModel.sql} ) -- For each SE, summarize biomass by 1) avg -- of transects and 2) avg of transects' trophic groups SELECT beltfish_su.sample_event_id AS id, {_se_fields}, data_policy_beltfish, {_su_aggfields_sql}, COUNT(beltfish_su.pseudosu_id) AS sample_unit_count, ROUND(AVG(beltfish_su.biomass_kgha), 2) AS biomass_kgha_avg, biomass_kgha_by_trophic_group_avg, biomass_kgha_by_fish_family_avg FROM beltfish_su INNER JOIN ( SELECT sample_event_id, jsonb_object_agg( tg, ROUND(biomass_kgha::numeric, 2) ) AS biomass_kgha_by_trophic_group_avg FROM ( SELECT meta_su_tgs.sample_event_id, tg, AVG(biomass_kgha) AS biomass_kgha FROM ( SELECT sample_event_id, pseudosu_id, tgdata.key AS tg, SUM(tgdata.value::double precision) AS biomass_kgha FROM beltfish_su, LATERAL jsonb_each_text(biomass_kgha_by_trophic_group) tgdata(key, value) GROUP BY sample_event_id, pseudosu_id, tgdata.key ) meta_su_tgs GROUP BY meta_su_tgs.sample_event_id, tg ) beltfish_su_tg GROUP BY sample_event_id ) AS beltfish_se_tg ON beltfish_su.sample_event_id = beltfish_se_tg.sample_event_id INNER JOIN ( SELECT sample_event_id, jsonb_object_agg( ff, ROUND(biomass_kgha::numeric, 2) ) AS biomass_kgha_by_fish_family_avg FROM ( SELECT meta_su_ffs.sample_event_id, ff, AVG(biomass_kgha) AS biomass_kgha FROM ( SELECT sample_event_id, pseudosu_id, ffdata.key AS ff, SUM(ffdata.value::double precision) AS biomass_kgha FROM beltfish_su, LATERAL jsonb_each_text(biomass_kgha_by_fish_family) ffdata(key, value) GROUP BY sample_event_id, pseudosu_id, ffdata.key ) meta_su_ffs GROUP BY meta_su_ffs.sample_event_id, ff ) beltfish_su_ff GROUP BY sample_event_id ) AS beltfish_se_fish_families ON beltfish_su.sample_event_id = beltfish_se_fish_families.sample_event_id GROUP BY {_se_fields}, data_policy_beltfish, biomass_kgha_by_trophic_group_avg, biomass_kgha_by_fish_family_avg """ sql_args = dict(project_id=SQLTableArg(required=True)) objects = SQLTableManager() sample_unit_count = models.PositiveSmallIntegerField() depth_avg = models.DecimalField(max_digits=4, decimal_places=2, verbose_name=_("depth (m)")) current_name = models.CharField(max_length=100) tide_name = models.CharField(max_length=100) visibility_name = models.CharField(max_length=100) biomass_kgha_avg = models.DecimalField( max_digits=8, decimal_places=2, verbose_name=_("biomass (kg/ha)"), null=True, blank=True, ) biomass_kgha_by_trophic_group_avg = JSONField(null=True, blank=True) biomass_kgha_by_fish_family_avg = JSONField(null=True, blank=True) data_policy_beltfish = models.CharField(max_length=50) class Meta: db_table = "belt_fish_se_sm" managed = False
class Scanner(models.Model): """A scanner, i.e. a template for actual scanning jobs.""" objects = InheritanceManager() linkable = False name = models.CharField(max_length=256, unique=True, null=False, db_index=True, verbose_name='Navn') organization = models.ForeignKey(Organization, null=False, verbose_name='Organisation', on_delete=models.PROTECT) group = models.ForeignKey(Group, null=True, blank=True, verbose_name='Gruppe', on_delete=models.SET_NULL) schedule = RecurrenceField(max_length=1024, verbose_name='Planlagt afvikling') do_ocr = models.BooleanField(default=False, verbose_name='Scan billeder') do_last_modified_check = models.BooleanField( default=True, verbose_name='Tjek dato for sidste ændring', ) columns = models.CharField( validators=[validate_comma_separated_integer_list], max_length=128, null=True, blank=True) rules = models.ManyToManyField(Rule, blank=True, verbose_name='Regler', related_name='scanners') recipients = models.ManyToManyField(UserProfile, blank=True, verbose_name='Modtagere') # Spreadsheet annotation and replacement parameters # Save a copy of any spreadsheets scanned with annotations # in each row where matches were found. If this is enabled and any of # the replacement parameters are enabled (e.g. do_cpr_replace), matches # will also be replaced with the specified text (e.g. cpr_replace_text). output_spreadsheet_file = models.BooleanField(default=False) # Replace CPRs? do_cpr_replace = models.BooleanField(default=False) # Text to replace CPRs with cpr_replace_text = models.CharField(max_length=2048, null=True, blank=True) # Replace names? do_name_replace = models.BooleanField(default=False) # Text to replace names with name_replace_text = models.CharField(max_length=2048, null=True, blank=True) # Replace addresses? do_address_replace = models.BooleanField(default=False) # Text to replace addresses with address_replace_text = models.CharField(max_length=2048, null=True, blank=True) VALID = 1 INVALID = 0 validation_choices = ( (INVALID, "Ugyldig"), (VALID, "Gyldig"), ) url = models.CharField(max_length=2048, blank=False, verbose_name='URL') authentication = models.OneToOneField( Authentication, null=True, related_name='%(app_label)s_%(class)s_authentication', verbose_name='Brugernavn', on_delete=models.SET_NULL) validation_status = models.IntegerField(choices=validation_choices, default=INVALID, verbose_name='Valideringsstatus') exclusion_rules = models.TextField(blank=True, default="", verbose_name='Ekskluderingsregler') e2_last_run_at = models.DateTimeField(null=True) def exclusion_rule_list(self): """Return the exclusion rules as a list of strings or regexes.""" REGEX_PREFIX = "regex:" rules = [] for line in self.exclusion_rules.splitlines(): line = line.strip() if line.startswith(REGEX_PREFIX): rules.append( re.compile(line[len(REGEX_PREFIX):], re.IGNORECASE)) else: rules.append(line) return rules @property def is_running(self) -> bool: '''Are any scans currently running against this scanner?''' # using a string for the status is kind of ugly, but necessary # to avoid circular imports return self.webscans.filter(status="STARTED").exists() @property def schedule_description(self): """A lambda for creating schedule description strings.""" if any(self.schedule.occurrences()): return u"Ja" else: return u"Nej" # Run error messages ALREADY_RUNNING = ("Scanneren kunne ikke startes," + " fordi der allerede er en scanning i gang for den.") EXCHANGE_EXPORT_IS_RUNNING = ("Scanneren kunne ikke startes," + " fordi der er en exchange export igang.") process_urls = JSONField(null=True, blank=True) # Booleans for control of scanners run from web service. do_run_synchronously = models.BooleanField(default=False) is_visible = models.BooleanField(default=True) # First possible start time FIRST_START_TIME = datetime.time(18, 0) # Amount of quarter-hours that can be added to the start time STARTTIME_QUARTERS = 6 * 4 def get_start_time(self): """The time of day the Scanner should be automatically started.""" added_minutes = 15 * (self.pk % Scanner.STARTTIME_QUARTERS) added_hours = int(added_minutes / 60) added_minutes -= added_hours * 60 return Scanner.FIRST_START_TIME.replace( hour=Scanner.FIRST_START_TIME.hour + added_hours, minute=Scanner.FIRST_START_TIME.minute + added_minutes) @classmethod def modulo_for_starttime(cls, time): """Convert a datetime.time object to the corresponding modulo value. The modulo value can be used to search the database for scanners that should be started at the given time by filtering a query with: (WebScanner.pk % WebScanner.STARTTIME_QUARTERS) == <modulo_value> """ if (time < cls.FIRST_START_TIME): return None hours = time.hour - cls.FIRST_START_TIME.hour minutes = 60 * hours + time.minute - cls.FIRST_START_TIME.minute return int(minutes / 15) @property def display_name(self): """The name used when displaying the scanner on the web page.""" return "WebScanner '%s'" % self.name def __str__(self): """Return the name of the scanner.""" return self.name def run(self, type, blocking=False, user=None): """Run a scan with the Scanner. Return the Scan object if we started the scanner. Return None if there is already a scanner running, or if there was a problem running the scanner. """ local_tz = tz.gettz() now = datetime.datetime.now().replace(microsecond=0) # Check that this source is accessible, and return the resulting error # if it isn't source = self.make_engine2_source() with SourceManager() as sm, closing(source.handles(sm)) as handles: try: print(next(handles, True)) except ResourceUnavailableError as ex: return ", ".join([str(a) for a in ex.args[1:]]) # Create a new engine2 scan specification and submit it to the # pipeline rule = OrRule.make(*[ r.make_engine2_rule() for r in self.rules.all().select_subclasses() ]) prerules = [] if self.do_last_modified_check: # Make sure that the timestamp we give to LastModifiedRule is # timezone-aware; engine2's serialisation code requires this # for all datetime.datetimes, so LastModifiedRule will raise a # ValueError if we try to give it a naive one last = self.e2_last_run_at if last: if not last.tzinfo or last.tzinfo.utcoffset(last) is None: last = last.replace(tzinfo=local_tz) prerules.append(LastModifiedRule(last)) if self.do_ocr: cr = make_if( HasConversionRule(OutputType.ImageDimensions), DimensionsRule(width_range=range(32, 16385), height_range=range(32, 16385), min_dim=128), True) prerules.append(cr) rule = AndRule.make(*prerules, rule) message = { 'scan_tag': now.isoformat(), 'source': source.to_json_object(), 'rule': rule.to_json_object() } queue_name = settings.AMQP_PIPELINE_TARGET self.e2_last_run_at = now self.save() scan = now.isoformat() print(queue_name, json.dumps(message)) amqp_connection_manager.start_amqp(queue_name) amqp_connection_manager.send_message(queue_name, json.dumps(message)) amqp_connection_manager.close_connection() return scan def create_scan(self): """ Creates a file scan. :return: A file scan object """ from ..scans.scan_model import Scan scan = Scan() return scan.create(self) def path_for(self, uri): return uri def make_engine2_source(self) -> Source: """Construct an engine2 Source corresponding to the target of this Scanner.""" # (this can't use the @abstractmethod decorator because of metaclass # conflicts with Django, but subclasses should override this method!) raise NotImplementedError("Scanner.make_engine2_source") class Meta: abstract = False ordering = ['name']
class RpmRepository(Repository): """ Repository for "rpm" content. Fields: sub_repo (Boolean): Whether is sub_repo or not last_sync_revision_number (Text): The revision number last_sync_remote (Remote): The remote used for the last sync last_sync_repo_version (Integer): The repo version number of the last sync last_sync_repomd_checksum (Text): The repo version repomd.xml file sha256 original_checksum_types (JSON): Checksum for each metadata type """ TYPE = "rpm" CONTENT_TYPES = [ Package, UpdateRecord, PackageCategory, PackageGroup, PackageEnvironment, PackageLangpacks, RepoMetadataFile, DistributionTree, Modulemd, ModulemdDefaults, ] REMOTE_TYPES = [RpmRemote, UlnRemote] GPGCHECK_CHOICES = [(0, 0), (1, 1)] metadata_signing_service = models.ForeignKey( AsciiArmoredDetachedSigningService, on_delete=models.SET_NULL, null=True ) sub_repo = models.BooleanField(default=False) last_sync_revision_number = models.CharField(max_length=20, null=True) last_sync_remote = models.ForeignKey(Remote, null=True, on_delete=models.SET_NULL) last_sync_repo_version = models.PositiveIntegerField(default=0) last_sync_repomd_checksum = models.CharField(max_length=64, null=True) original_checksum_types = JSONField(default=dict) retain_package_versions = models.PositiveIntegerField(default=0) autopublish = models.BooleanField(default=False) metadata_checksum_type = models.CharField( default=CHECKSUM_TYPES.SHA256, choices=CHECKSUM_CHOICES, max_length=10 ) package_checksum_type = models.CharField( default=CHECKSUM_TYPES.SHA256, choices=CHECKSUM_CHOICES, max_length=10 ) gpgcheck = models.IntegerField(default=0, choices=GPGCHECK_CHOICES) repo_gpgcheck = models.IntegerField(default=0, choices=GPGCHECK_CHOICES) sqlite_metadata = models.BooleanField(default=False) def new_version(self, base_version=None): """ Create a new RepositoryVersion for this Repository. Creation of a RepositoryVersion should be done in a RQ Job. Args: repository (pulpcore.app.models.Repository): to create a new version of base_version (pulpcore.app.models.RepositoryVersion): an optional repository version whose content will be used as the set of content for the new version Returns: pulpcore.app.models.RepositoryVersion: The Created RepositoryVersion """ with transaction.atomic(): latest_version = self.versions.latest() if not latest_version.complete: latest_version.delete() version = RepositoryVersion( repository=self, number=int(self.next_version), base_version=base_version ) version.save() if base_version: # first remove the content that isn't in the base version version.remove_content(version.content.exclude(pk__in=base_version.content)) # now add any content that's in the base_version but not in version version.add_content(base_version.content.exclude(pk__in=version.content)) if Task.current() and not self.sub_repo: resource = CreatedResource(content_object=version) resource.save() return version def on_new_version(self, version): """ Called when new repository versions are created. Args: version: The new repository version. """ super().on_new_version(version) # avoid circular import issues from pulp_rpm.app import tasks if self.autopublish: publication = tasks.publish( repository_version_pk=version.pk, gpgcheck_options={"gpgcheck": self.gpgcheck, "repo_gpgcheck": self.repo_gpgcheck}, metadata_signing_service=self.metadata_signing_service, checksum_types={ "metadata": self.metadata_checksum_type, "package": self.package_checksum_type, }, sqlite_metadata=self.sqlite_metadata, ) distributions = self.distributions.all() if publication and distributions: for distribution in distributions: distribution.publication = publication distribution.save() @staticmethod def artifacts_for_version(version): """ Return the artifacts for an RpmRepository version. Override the default behavior to include DistributionTree artifacts from nested repos. Args: version (pulpcore.app.models.RepositoryVersion): to get the artifacts for Returns: django.db.models.QuerySet: The artifacts that are contained within this version. """ qs = Artifact.objects.filter(content__pk__in=version.content) for tree in DistributionTree.objects.filter(pk__in=version.content): qs |= tree.artifacts() return qs class Meta: default_related_name = "%(app_label)s_%(model_name)s" def finalize_new_version(self, new_version): """ Ensure there are no duplicates in a repo version and content is not broken. Remove duplicates based on repo_key_fields. Ensure that modulemd is added with all its RPMs. Ensure that modulemd is removed with all its RPMs. Resolve advisory conflicts when there is more than one advisory with the same id. Args: new_version (pulpcore.app.models.RepositoryVersion): The incomplete RepositoryVersion to finalize. """ if new_version.base_version: previous_version = new_version.base_version else: try: previous_version = new_version.previous() except RepositoryVersion.DoesNotExist: previous_version = None remove_duplicates(new_version) self._resolve_distribution_trees(new_version, previous_version) from pulp_rpm.app.modulemd import resolve_module_packages # avoid circular import resolve_module_packages(new_version, previous_version) self._apply_retention_policy(new_version) from pulp_rpm.app.advisory import resolve_advisories # avoid circular import resolve_advisories(new_version, previous_version) validate_repo_version(new_version) def _apply_retention_policy(self, new_version): """Apply the repository's "retain_package_versions" settings to the new version. Remove all non-modular packages that are older than the retention policy. A value of 0 for the package retention policy represents disabled. A value of 3 would mean that the 3 most recent versions of each package would be kept while older versions are discarded. Args: new_version (models.RepositoryVersion): Repository version to filter """ assert ( not new_version.complete ), "Cannot apply retention policy to completed repository versions" if self.retain_package_versions > 0: # It would be more ideal if, instead of annotating with an age and filtering manually, # we could use Django to filter the particular Package content we want to delete. # Something like ".filter(F('age') > self.retain_package_versions)" would be better # however this is not currently possible with Django. It would be possible with raw # SQL but the repository version content membership subquery is currently # django-managed and would be difficult to share. # # Instead we have to do the filtering manually. nonmodular_packages = ( Package.objects.with_age() .filter( pk__in=new_version.content.filter(pulp_type=Package.get_pulp_type()), is_modular=False, # don't want to filter out modular RPMs ) .only("pk") ) old_packages = [] for package in nonmodular_packages: if package.age > self.retain_package_versions: old_packages.append(package.pk) new_version.remove_content(Content.objects.filter(pk__in=old_packages)) def _resolve_distribution_trees(self, new_version, previous_version): """ There can be only one distribution tree in a repo version. Args: version (pulpcore.app.models.RepositoryVersion): current incomplete repository version previous_version (pulpcore.app.models.RepositoryVersion): a version preceding the current incomplete one """ disttree_pulp_type = DistributionTree.get_pulp_type() current_disttrees = new_version.content.filter(pulp_type=disttree_pulp_type) if len(current_disttrees) < 2: return if previous_version: previous_disttree = previous_version.content.get(pulp_type=disttree_pulp_type) new_version.remove_content(Content.objects.filter(pk=previous_disttree.pk)) incoming_disttrees = new_version.content.filter(pulp_type=disttree_pulp_type) if len(incoming_disttrees) != 1: raise DistributionTreeConflict( _("More than one distribution tree cannot be added to a " "repository version.") )
class AdvancedDataType(models.Model): name = models.CharField(max_length=30) json_data = JSONField() def __str__(self): return self.name
class Place(UUIDModel, TimeStampedModel, SoftDeletableModel, Utility): content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, default=0, null=False, blank=False) object_id = models.CharField(max_length=36, null=False, blank=False, default='0') subject = GenericForeignKey('content_type', 'object_id') organization = models.ForeignKey('whereabouts.Organization', default=0, null=False, blank=False, on_delete=models.SET(0)) address = AddressField(related_name='place', blank=True, null=True) address_extra = models.CharField(max_length=50, blank=True, null=True, help_text='i.e. Apartment number') address_type = models.CharField( max_length=20, default='street', blank=True, null=True, help_text='mailing, remote or street address') display_name = models.CharField( db_index=True, max_length=50, default='main', blank=False, null=False, help_text='main, resident, etc (main will be displayed first)') display_order = models.SmallIntegerField(default=0, blank=False, null=False) start = models.DateField(null=True, blank=True, help_text='optional, moved in date') finish = models.DateField(null=True, blank=True, help_text='optional, moved out date') infos = JSONField(default=Utility.default_infos, null=True, blank=True, help_text="please keep {} here even there's no data") # need to validate there only one 'main' for display_name class Meta: db_table = 'whereabouts_places' ordering = ( 'organization', 'content_type', 'object_id', 'display_order', ) constraints = [ models.UniqueConstraint(fields=[ 'organization', 'content_type', 'object_id', 'address', 'address_extra' ], condition=models.Q(is_removed=False), name="address_object") ] indexes = [ GinIndex( fields=['infos'], name='place_infos_gin', ), ] @property def street(self): if self.address: if Utility.blank_check( self.address_extra) and Utility.present_check( self.address.formatted): txt = '%s' % self.address.formatted elif self.address.locality: txt = '' if self.address.street_number: txt = '%s' % self.address.street_number if self.address.route: if txt: txt += ' %s' % self.address.route if self.address_extra: txt = txt + ' %s' % self.address_extra if txt else ' %s' % self.address_extra locality = '%s' % self.address.locality if txt and locality: txt += ', ' txt += locality else: txt = '%s' % self.address.raw return txt else: return self.address_extra def __str__(self): return '%s %s' % (self.address, self.subject)
class Experiment(DiffModel, DescribableModel): """A model that represents experiments.""" uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, null=False) sequence = models.IntegerField( editable=False, null=False, help_text='The sequence number of this experiment within the project.', ) project = models.ForeignKey('projects.Project', related_name='experiments') user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='experiments') experiment_group = models.ForeignKey( 'projects.ExperimentGroup', blank=True, null=True, related_name='experiments', help_text='The experiment group that generate this experiment.') content = models.TextField( blank=True, null=True, help_text='The yaml content of the polyaxonfile/specification.', validators=[validate_spec_content]) config = JSONField( help_text= 'The compiled polyaxon with specific values for this experiment.', validators=[validate_spec_content]) original_experiment = models.ForeignKey( 'self', null=True, blank=True, related_name='clones', help_text='The original experiment that was cloned from.') experiment_status = models.OneToOneField('ExperimentStatus', related_name='+', blank=True, null=True, editable=True) experiment_metric = models.OneToOneField('ExperimentMetric', related_name='+', blank=True, null=True, editable=True) commit = models.CharField(max_length=40, blank=True, null=True) class Meta: ordering = ['sequence'] unique_together = (('project', 'sequence'), ) def save(self, *args, **kwargs): if self.pk is None: last = Experiment.objects.filter(project=self.project).last() self.sequence = 1 if last: self.sequence = last.sequence + 1 super(Experiment, self).save(*args, **kwargs) def __str__(self): return self.unique_name @property def unique_name(self): if self.experiment_group: return '{}.{}'.format(self.experiment_group.unique_name, self.sequence) return '{}.{}'.format(self.project.unique_name, self.sequence) @cached_property def compiled_spec(self): return Specification(experiment=self.uuid, values=self.config) @cached_property def declarations(self): return self.compiled_spec.declarations @cached_property def resources(self): return self.compiled_spec.total_resources @property def last_job_statuses(self): """The statuses of the job in this experiment.""" statuses = [] for job in self.jobs.all(): status = job.last_status if status is not None: statuses.append(status) return statuses @property def calculated_status(self): master_status = self.jobs.filter(role=TaskType.MASTER)[0].last_status calculated_status = master_status if JobLifeCycle.is_done( master_status) else None if calculated_status is None: calculated_status = ExperimentLifeCycle.jobs_status( self.last_job_statuses) if calculated_status is None: return self.last_status return calculated_status @property def last_status(self): return self.experiment_status.status if self.experiment_status else None @property def last_metric(self): return self.experiment_metric.values if self.experiment_metric else None @property def is_running(self): return ExperimentLifeCycle.is_running(self.last_status) @property def is_done(self): return ExperimentLifeCycle.is_done(self.last_status) @property def finished_at(self): status = self.statuses.filter( status__in=ExperimentLifeCycle.DONE_STATUS).first() if status: return status.created_at return None @property def started_at(self): status = self.statuses.filter( status=ExperimentLifeCycle.STARTING).first() if status: return status.created_at return None @property def is_clone(self): return self.original_experiment is not None @property def is_independent(self): """If the experiment belongs to a experiment_group or is independently created.""" return self.experiment_group is None def update_status(self): current_status = self.last_status calculated_status = self.calculated_status if calculated_status != current_status: # Add new status to the experiment self.set_status(calculated_status) return True return False def set_status(self, status, message=None): ExperimentStatus.objects.create(experiment=self, status=status, message=message)
class ExperimentJob(DiffModel): """A model that represents job related to an experiment""" uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, null=False) experiment = models.ForeignKey(Experiment, related_name='jobs') definition = JSONField(help_text='The specific values for this job.') role = models.CharField(max_length=64, default=TaskType.MASTER) sequence = models.IntegerField( editable=False, null=False, help_text='The sequence number of this job within the experiment.', ) job_status = models.OneToOneField('ExperimentJobStatus', related_name='+', blank=True, null=True, editable=True) resources = models.OneToOneField(JobResources, related_name='+', blank=True, null=True, editable=True) class Meta: ordering = ['sequence'] unique_together = (('experiment', 'sequence'), ) def __str__(self): return self.unique_name @property def unique_name(self): return '{}.{}.{}'.format(self.experiment.unique_name, self.sequence, self.role) def save(self, *args, **kwargs): if self.pk is None: last = ExperimentJob.objects.filter( experiment=self.experiment).last() self.sequence = 1 if last: self.sequence = last.sequence + 1 super(ExperimentJob, self).save(*args, **kwargs) @property def last_status(self): return self.job_status.status if self.job_status else None @property def is_running(self): return JobLifeCycle.is_running(self.last_status) @property def is_done(self): return JobLifeCycle.is_done(self.last_status) @property def started_at(self): status = self.statuses.filter(status=JobLifeCycle.BUILDING).first() if not status: status = self.statuses.filter(status=JobLifeCycle.RUNNING).first() if status: return status.created_at return None @property def finished_at(self): status = self.statuses.filter( status__in=JobLifeCycle.DONE_STATUS).last() if status: return status.created_at return None def set_status(self, status, message=None, details=None): current_status = self.last_status # We should not update statuses statuses anymore if JobLifeCycle.is_done(current_status): logger.info('Received a new status `{}` for job `{}`. ' 'But the job is already done with status `{}`'.format( status, self.unique_name, current_status)) return False if status != current_status: # Add new status to the job ExperimentJobStatus.objects.create(job=self, status=status, message=message, details=details) return True return False
class DocumentNotificationSubscription(models.Model): template_name = 'document_notification' enabled = models.BooleanField(null=False, blank=False, default=False) document_type = models.ForeignKey(DocumentType, blank=False, null=False, on_delete=CASCADE) event = models.CharField(max_length=100, blank=False, null=False, choices=DOCUMENT_EVENTS_CHOICES) recipients = models.CharField(max_length=100, blank=False, null=False, choices=NOTIFICATION_RECIPIENTS_CHOICES) specified_role = models.ForeignKey(Role, blank=True, null=True, on_delete=CASCADE) specified_user = models.ForeignKey(User, blank=True, null=True, on_delete=CASCADE) recipients_cc = models.CharField(max_length=1024, blank=True, null=True, help_text='''Semi-colon separated list of emails to add as CC to each notification email.''') subject = models.CharField(max_length=1024, null=True, blank=True, help_text='''Template of the email subject in Jinja2 syntax. Leave empty for using the default. Example: {0}'''. format(DocumentLoadedEvent.default_subject)) header = models.CharField( max_length=2048, null=True, blank=True, help_text= '''Template of the header in Jinja2 syntax. Leave empty for using the default. Example: {0}'''.format( DocumentLoadedEvent.default_header)) generic_fields = JSONField( encoder=ImprovedDjangoJSONEncoder, default=document_notification_subscription_generic_fields_default) user_fields = models.ManyToManyField( DocumentField, blank=True, help_text='''Fields of the documents to render in the email. Should match the specified document type. Leave empty for rendering all fields. ''') max_stack = models.IntegerField(blank=True, default=1, null=False, help_text='Messages limit per email.') def get_recipients_info(self) -> Optional[NotificationRecipients]: if not self.recipients: return None return NOTIFICATION_RECIPIENTS_BY_CODE.get(self.recipients) def resolve_recipients( self, document_fields: Dict[str, Any]) -> Optional[Set[User]]: recipients_info = self.get_recipients_info() if not recipients_info: return None return recipients_info.resolve(self, document_fields) @classmethod def get_addrs(cls, semicolon_separated) -> Optional[Set[str]]: if not semicolon_separated: return None return {addr.strip() for addr in semicolon_separated.split(';')} def get_cc_addrs(self): return self.get_addrs(self.recipients_cc) def get_event_info(self) -> Optional[DocumentEvent]: if not self.event: return None return DOCUMENT_EVENTS_BY_CODE.get(self.event)
class Annotation(models.Model): identifier = models.CharField(max_length=256, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) image = models.ForeignKey(Image, on_delete=models.CASCADE) category = models.ForeignKey(Category, on_delete=models.CASCADE) # boundingbox and segmentation width = models.IntegerField(null=True) height = models.IntegerField(null=True) area = models.FloatField(default=0) is_crowd = models.BooleanField(default=False) # only boundingboxes part x_min = models.FloatField(null=True) x_max = models.FloatField(null=True) y_min = models.FloatField(null=True) y_max = models.FloatField(null=True) # only segmentation part mask = models.TextField(null=True) segmentation = JSONField(blank=True, null=True) # query scopes objects = models.Manager() boundingbox_objects = AnnotationBoundingboxManager() segmentation_objects = AnnotationSegmentationManager() def flatten(lst): ''' flatten list of lists of lists ''' result = [] for i in lst: if not isinstance(i, list): result.append(i) else: result.extend(Annotation.flatten(i)) return result def get_coords_from_segmentation(segmentation): segmentation = list(Annotation.flatten(segmentation)) points = np.array_split(segmentation, len(segmentation) / 2) x_min = min([point[0] for point in points]) x_max = max([point[0] for point in points]) y_min = min([point[1] for point in points]) y_max = max([point[1] for point in points]) return (x_min, x_max, y_min, y_max) def save(self, *args, **kwargs): if self.segmentation and len(self.segmentation) > 0 and type( self.segmentation) is list: self.x_min, self.x_max, self.y_min, self.y_max = Annotation.get_coords_from_segmentation( self.segmentation) self.height = self.y_max - self.y_min self.width = self.x_max - self.x_min #self.area = Annotation.area_from_segmentation(self.segmentation) elif self.x_min is not None: if self.x_max is None and self.x_min is not None and self.width is not None: self.x_max = self.x_min + self.width else: self.width = self.x_max - self.x_min if self.y_max is None and self.y_min is not None and self.height is not None: self.y_max = self.y_min + self.height else: self.height = self.y_max - self.y_min super(Annotation, self).save(*args, **kwargs) def __str__(self): return str(self.id) def types(self): ''' return filles types of annotation ''' types = ['annotation'] if self.x_min is not None: types.append('boundingbox') if self.segmentation is not None: types.append('segmentation') return types def category_name(self): return self.category.name def image_name(self): return self.image.name def boundingbox_distance(a, b): ''' calculate distance between two boundingboxes ''' xA = max(a['left'], b['left']) yA = max(a['top'], b['top']) xB = min(a['left'] + a['width'], b['left'] + b['width']) yB = min(a['top'] + a['height'], b['top'] + b['height']) # Die Schnittmengen beider Flächen. area_of_intersection = (xB - xA + 1) * (yB - yA + 1) # Flächen beider einzelnen Boundingboxes jeweils a_area = (a['width'] + 1) * (a['height'] + 1) b_area = (b['width'] + 1) * (b['height'] + 1) # Die Fläche beider Boundingboxen zusammengenommen. area_of_union = float(a_area + b_area - area_of_intersection) distance = area_of_intersection / area_of_union return distance
class Collection(models.Model): '''A container collection is a build (multiple versions of the same image) created by an owner, with other possible contributors ''' # Container Collection Descriptors name = models.CharField( max_length=250, # name of registry collection unique=True, # eg, tensorflow <-- /tensorflow blank=False, # corresponding to a folder, eg tensorflow null=False) add_date = models.DateTimeField('date published', auto_now_add=True) modify_date = models.DateTimeField('date modified', auto_now=True) secret = models.CharField(max_length=200, null=False, verbose_name="Collection secret for webhook") metadata = JSONField( default={}) # open field for metadata about a collection # Users owner = models.ForeignKey('users.User', blank=True, default=None, null=True) contributors = models.ManyToManyField( 'users.User', related_name="container_collection_contributors", related_query_name="contributor", blank=True, help_text="users with edit permission to the collection", verbose_name="Contributors") # By default, collections are public private = models.BooleanField(choices=PRIVACY_CHOICES, default=get_privacy_default, verbose_name="Accessibility") def get_absolute_url(self): return_cid = self.id return reverse('collection_details', args=[str(return_cid)]) def __str__(self): return self.get_uri() def __unicode__(self): return self.get_uri() def sizes(self, container_name=None): '''return list of sizes for containers across collection. Optionally limited to container name''' if container_name is not None: queryset = self.containers.filter(name=container_name) else: queryset = self.containers.all() return [ x.metadata['size_mb'] for x in queryset if 'size_mb' in x.metadata ] def mean_size(self, container_name=None): total = self.total_size(container_name=container_name) if total == 0: return total return sum(sizes) / len(sizes) def total_size(self, container_name=None): sizes = self.sizes(container_name=container_name) return sum(sizes) def get_uri(self): return "%s:%s" % (self.name, self.containers.count()) def get_label(self): return "collection" def labels(self): '''return common *shared* collection labels''' return Label.objects.filter( containers__in=self.containers.all()).distinct() def container_names(self): '''return distinct container names''' return list([ x[0] for x in self.containers.values_list('name').distinct() if len(x) > 0 ]) # Permissions def has_edit_permission(self, request): '''can the user of the request edit the collection ''' return has_edit_permission(request=request, instance=self) def has_view_permission(self, request): '''can the user of the request view the collection ''' return has_view_permission(request=request, instance=self) def has_collection_star(self, request): '''returns true or false to indicate if a user has starred a collection''' has_star = False if request.user.is_authenticated(): try: star = Star.objects.get(user=request.user, collection=self) has_star = True except: pass return has_star class Meta: app_label = 'main' permissions = (('del_container_collection', 'Delete container collection'), ('edit_container_collection', 'Edit container collection'))
class Grant(SuperModel): """Define the structure of a Grant.""" class Meta: """Define the metadata for Grant.""" ordering = ['-created_on'] active = models.BooleanField(default=True, help_text=_('Whether or not the Grant is active.')) title = models.CharField(default='', max_length=255, help_text=_('The title of the Grant.')) slug = AutoSlugField(populate_from='title') description = models.TextField(default='', blank=True, help_text=_('The description of the Grant.')) reference_url = models.URLField(blank=True, help_text=_('The associated reference URL of the Grant.')) logo = models.ImageField( upload_to=get_upload_filename, null=True, blank=True, help_text=_('The Grant logo image.'), ) logo_svg = models.FileField( upload_to=get_upload_filename, null=True, blank=True, help_text=_('The Grant logo SVG.'), ) admin_address = models.CharField( max_length=255, default='0x0', help_text=_('The wallet address for the administrator of this Grant.'), ) amount_goal = models.DecimalField( default=1, decimal_places=4, max_digits=50, help_text=_('The contribution goal amount for the Grant in DAI.'), ) amount_received = models.DecimalField( default=0, decimal_places=4, max_digits=50, help_text=_('The total amount received for the Grant in USDT/DAI.'), ) token_address = models.CharField( max_length=255, default='0x0', help_text=_('The token address to be used with the Grant.'), ) token_symbol = models.CharField( max_length=255, default='', help_text=_('The token symbol to be used with the Grant.'), ) contract_address = models.CharField( max_length=255, default='0x0', help_text=_('The contract address of the Grant.'), ) contract_version = models.DecimalField( default=0, decimal_places=0, max_digits=3, help_text=_('The contract version the Grant.'), ) transaction_hash = models.CharField( max_length=255, default='0x0', help_text=_('The transaction hash of the Grant.'), ) metadata = JSONField( default=dict, blank=True, help_text=_('The Grant metadata. Includes creation and last synced block numbers.'), ) network = models.CharField( max_length=8, default='mainnet', help_text=_('The network in which the Grant contract resides.'), ) required_gas_price = models.DecimalField( default='0', decimal_places=0, max_digits=50, help_text=_('The required gas price for the Grant.'), ) admin_profile = models.ForeignKey( 'dashboard.Profile', related_name='grant_admin', on_delete=models.CASCADE, help_text=_('The Grant administrator\'s profile.'), null=True, ) team_members = models.ManyToManyField( 'dashboard.Profile', related_name='grant_teams', help_text=_('The team members contributing to this Grant.'), ) # Grant Query Set used as manager. objects = GrantQuerySet.as_manager() def __str__(self): """Return the string representation of a Grant.""" return f"id: {self.pk}, active: {self.active}, title: {self.title}, description: {self.description}" def percentage_done(self): """Return the percentage of token received based on the token goal.""" return ((self.amount_received / self.amount_goal) * 100) @property def abi(self): """Return grants abi.""" from grants.abi import abi_v0 return abi_v0 @property def contract(self): """Return grants contract.""" from dashboard.utils import get_web3 web3 = get_web3(self.network) grant_contract = web3.eth.contract(self.contract_address, abi=self.abi) return grant_contract
class Container(models.Model): '''A container is a base (singularity) container, stored as a file (image) with a unique id and name ''' add_date = models.DateTimeField('date container added', auto_now=True) collection = models.ForeignKey('main.Collection', null=False, blank=False, related_name="containers") image = models.ForeignKey( ImageFile, null=True, blank=False) # an image upload, or maybe change it? metadata = JSONField(default={}, blank=True) metrics = JSONField(default={}, blank=True) name = models.CharField(max_length=250, null=False, blank=False) tag = models.CharField(max_length=250, null=False, blank=False, default="latest") secret = models.CharField(max_length=250, null=True, blank=True) version = models.CharField(max_length=250, null=True, blank=True) tags = TaggableManager() frozen = models.BooleanField( choices=FROZEN_CHOICES, default=False, verbose_name= "is the container frozen, meaning builds will not be replaced?") # A container only gets a version when it's frozen, otherwise known by tag def get_short_uri(self): return "%s/%s:%s" % (self.collection.name, self.name, self.tag) def get_uri(self): # shub://username/reponame:branch@tag if self.frozen is False: return self.get_short_uri() version = "%s@%s" % (self.tag, self.version) return "%s/%s:%s" % (self.collection.name, self.name, version) def update_secret(self, save=True): '''secret exists to make brute force download not possible''' self.secret = str(uuid.uuid4()) if save is True: self.save() def save(self, *args, **kwargs): '''update secret on each save''' self.update_secret(save=False) super(Container, self).save(*args, **kwargs) def get_image_path(self): if self.image not in [None, ""]: return self.image.datafile.path return None def get_download_name(self): extension = "img" image_path = self.get_image_path() if image_path is not None: if image_path.endswith('gz'): extension = "img.gz" return "%s.%s" % (self.get_uri().replace('/', '-'), extension) def get_download_url(self): if self.image not in [None, ""]: return self.image.datafile.file return None def get_label(self): return "container" def __str__(self): return self.get_uri() def __unicode__(self): return self.get_uri() class Meta: ordering = ['name'] app_label = 'main' unique_together = (("name", "tag", "collection"), ) def get_absolute_url(self): return_cid = self.id return reverse('container_details', args=[str(return_cid)]) def labels(self): return Label.objects.filter(containers=self) def has_edit_permission(self, request): return has_edit_permission(request=request, instance=self.collection) def has_view_permission(self, request): return has_view_permission(request=request, instance=self.collection)
class BeltFishSUSQLModel(BaseSUSQLModel): # Unique combination of these fields defines a single (pseudo) sample unit. # All other fields are aggregated. su_fields = BaseSUSQLModel.se_fields + [ "depth", "transect_number", "transect_len_surveyed", "data_policy_beltfish", ] _su_fields = ", ".join(su_fields) _su_fields_qualified = ", ".join([f"beltfish_obs.{f}" for f in su_fields]) _agg_su_fields = ", ".join(BaseSUSQLModel.agg_su_fields) _su_aggfields_sql = BaseSUSQLModel.su_aggfields_sql sql = f""" WITH beltfish_obs AS ( {BeltFishObsSQLModel.sql} ) SELECT NULL AS id, beltfish_su.pseudosu_id, {_su_fields}, beltfish_su.{_agg_su_fields}, reef_slope, transect_width_name, size_bin, total_abundance, biomass_kgha, biomass_kgha_by_trophic_group, biomass_kgha_by_fish_family FROM ( SELECT pseudosu_id, jsonb_agg(DISTINCT sample_unit_id) AS sample_unit_ids, SUM(beltfish_obs.count) AS total_abundance, {_su_fields_qualified}, {_su_aggfields_sql}, string_agg(DISTINCT reef_slope::text, ', '::text ORDER BY (reef_slope::text)) AS reef_slope, string_agg(DISTINCT transect_width_name::text, ', '::text ORDER BY (transect_width_name::text)) AS transect_width_name, string_agg(DISTINCT size_bin::text, ', '::text ORDER BY (size_bin::text)) AS size_bin FROM beltfish_obs GROUP BY pseudosu_id, {_su_fields_qualified} ) beltfish_su INNER JOIN ( SELECT pseudosu_id, SUM(biomass_kgha) AS biomass_kgha, jsonb_object_agg( CASE WHEN trophic_group IS NULL THEN 'other'::character varying ELSE trophic_group END, ROUND(biomass_kgha, 2) ) AS biomass_kgha_by_trophic_group FROM ( SELECT pseudosu_id, COALESCE(SUM(biomass_kgha), 0::numeric) AS biomass_kgha, trophic_group FROM beltfish_obs GROUP BY pseudosu_id, trophic_group ) beltfish_obs_tg GROUP BY pseudosu_id ) beltfish_tg ON (beltfish_su.pseudosu_id = beltfish_tg.pseudosu_id) INNER JOIN ( SELECT pseudosu_id, jsonb_object_agg( CASE WHEN fish_family IS NULL THEN 'other'::character varying ELSE fish_family END, ROUND(biomass_kgha, 2) ) AS biomass_kgha_by_fish_family FROM ( SELECT pseudosu_id, COALESCE(SUM(biomass_kgha), 0::numeric) AS biomass_kgha, fish_family FROM beltfish_obs GROUP BY pseudosu_id, fish_family ) beltfish_obs_fam GROUP BY pseudosu_id ) beltfish_families ON (beltfish_su.pseudosu_id = beltfish_families.pseudosu_id) INNER JOIN ( SELECT pseudosu_id, jsonb_agg(DISTINCT observer) AS observers FROM ( SELECT pseudosu_id, jsonb_array_elements(observers) AS observer FROM beltfish_obs GROUP BY pseudosu_id, observers ) beltfish_obs_obs GROUP BY pseudosu_id ) beltfish_observers ON (beltfish_su.pseudosu_id = beltfish_observers.pseudosu_id) """ sql_args = dict(project_id=SQLTableArg(required=True)) objects = SQLTableManager() sample_unit_ids = JSONField() total_abundance = models.PositiveIntegerField() transect_number = models.PositiveSmallIntegerField() transect_len_surveyed = models.PositiveSmallIntegerField( verbose_name=_("transect length surveyed (m)")) transect_width_name = models.CharField(max_length=100, null=True, blank=True) reef_slope = models.CharField(max_length=50) size_bin = models.CharField(max_length=100) biomass_kgha = models.DecimalField( max_digits=8, decimal_places=2, verbose_name=_("biomass (kg/ha)"), null=True, blank=True, ) biomass_kgha_by_trophic_group = JSONField(null=True, blank=True) biomass_kgha_by_fish_family = JSONField(null=True, blank=True) data_policy_beltfish = models.CharField(max_length=50) class Meta: db_table = "belt_fish_su_sm" managed = False
class Job(AbstractJob): """A model that represents the configuration for run job.""" user = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='+') project = models.ForeignKey( 'db.Project', on_delete=models.CASCADE, related_name='jobs') config = JSONField( help_text='The compiled polyaxonfile for the run job.', validators=[validate_job_spec_config]) code_reference = models.ForeignKey( 'db.CodeReference', on_delete=models.SET_NULL, blank=True, null=True, related_name='+') build_job = models.ForeignKey( 'db.BuildJob', on_delete=models.SET_NULL, blank=True, null=True, related_name='+') status = models.OneToOneField( 'db.JobStatus', related_name='+', blank=True, null=True, editable=True, on_delete=models.SET_NULL) class Meta: app_label = 'db' def __str__(self): return self.unique_name def save(self, *args, **kwargs): # pylint:disable=arguments-differ if self.pk is None: last = Job.objects.filter(project=self.project).last() self.sequence = 1 if last: self.sequence = last.sequence + 1 super(Job, self).save(*args, **kwargs) @property def unique_name(self): return '{}.jobs.{}'.format(self.project.unique_name, self.sequence) @cached_property def specification(self): return BuildSpecification(values=self.config) @cached_property def image(self): return self.specification.build.image @cached_property def build_steps(self): return self.specification.build.build_steps @cached_property def resources(self): return None @cached_property def node_selectors(self): return None @cached_property def env_vars(self): return self.specification.build.env_vars def set_status(self, status, message=None, details=None): # pylint:disable=arguments-differ return self._set_status(status_model=JobStatus, logger=logger, status=status, message=message, details=details)
class Facility(TimeStampedModel, InsertableModel, UpdateableModel, PrintableModel, RankableModel): FacilityId = models.AutoField(primary_key=True, null=False, blank=False) BasePartyId = models.ForeignKey(BaseParty, on_delete=models.CASCADE, default=None, null=True, blank=True) HostId = models.ForeignKey(Host, on_delete=models.CASCADE, default=None, null=True, blank=True) DataView = models.IntegerField(null=True, blank=True) RequestType = models.ForeignKey(RequestType, on_delete=models.CASCADE, default=None, null=True, blank=True) FacilityIdHost = models.CharField(max_length=50, null=True, blank=True) FacilityIdOther1 = models.CharField(max_length=50, null=True, blank=True) FacilityIdOther2 = models.CharField(max_length=50, null=True, blank=True) FacilityIdParent = models.ForeignKey('self', on_delete=models.CASCADE, default=None, null=True, blank=True) ArrangementClass = models.ForeignKey(ArrangementClass, on_delete=models.CASCADE, default=None, null=True, blank=True) ArrangementTypeHost = models.CharField(max_length=20, null=True, blank=True) ArrangementType = models.ForeignKey(ArrangementType, on_delete=models.CASCADE, default=None, null=True, blank=True) ActionableFlag = models.BooleanField(default=False) CreditApplicationId = models.ForeignKey(CreditApplication, on_delete=models.CASCADE, default=None, null=True, blank=True) Description1 = JSONField(null=True, blank=True) Description2 = JSONField(null=True, blank=True) Purpose = JSONField(null=True, blank=True) Currency = JSONField(null=True, blank=True) NominalFXRate = JSONField(null=True, blank=True) CurrentFXRate = JSONField(null=True, blank=True) NegotiatedFXRate = JSONField(null=True, blank=True) NegotiatedFXDealRef = JSONField(null=True, blank=True) Sector = JSONField(null=True, blank=True) Industry = JSONField(null=True, blank=True) FrequencyTenor = JSONField(null=True, blank=True) FrequencyRepayment = JSONField(null=True, blank=True) TenorOriginal = JSONField(null=True, blank=True) TenorRemaining = JSONField(null=True, blank=True) DisbursementsCount = JSONField(null=True, blank=True) RelationshipOfficer1 = JSONField(null=True, blank=True) RelationshipOfficer2 = JSONField(null=True, blank=True) RelationshipOfficer3 = JSONField(null=True, blank=True) Account1 = JSONField(null=True, blank=True) Account2 = JSONField(null=True, blank=True) Account3 = JSONField(null=True, blank=True) Account4 = JSONField(null=True, blank=True) ParameterSummary1 = JSONField(null=True, blank=True) ParameterSummary2 = JSONField(null=True, blank=True) ParameterSummary3 = JSONField(null=True, blank=True) ApprovalDate = JSONField(null=True, blank=True) OriginationDate = JSONField(null=True, blank=True) LastDisbursementDate = JSONField(null=True, blank=True) BalanceDate = JSONField(null=True, blank=True) NegotiatedFXRateDate = JSONField(null=True, blank=True) MaturityDate = JSONField(null=True, blank=True) ExpiryDate = JSONField(null=True, blank=True) FirstPaymentDate = JSONField(null=True, blank=True) LastPaymentDate = JSONField(null=True, blank=True) NextPaymentDate = JSONField(null=True, blank=True) PrincipalDueDate = JSONField(null=True, blank=True) InterestDueDate = JSONField(null=True, blank=True) DefaultDate = JSONField(null=True, blank=True) ApprovalValue = JSONField(null=True, blank=True) OriginalValue = JSONField(null=True, blank=True) CommitmentValue = JSONField(null=True, blank=True) BalanceValue = JSONField(null=True, blank=True) ProposedValue = models.FloatField(null=True, blank=True) DisbursedValue = JSONField(null=True, blank=True) UndisbursedValue = JSONField(null=True, blank=True) UtilisedValue = JSONField(null=True, blank=True) UnutilizedValue = JSONField(null=True, blank=True) ExposureTotal = JSONField(null=True, blank=True) ExposureUnused = JSONField(null=True, blank=True) RepaymentAmount = JSONField(null=True, blank=True) TotalPrincipalPaid = JSONField(null=True, blank=True) TotalInterestPaid = JSONField(null=True, blank=True) TotalFeesPaid = JSONField(null=True, blank=True) ApprovedBaseRate = JSONField(null=True, blank=True) ApprovedSpreadFloor = JSONField(null=True, blank=True) ApprovedSpreadCeiling = JSONField(null=True, blank=True) ApprovedSpreadRate = JSONField(null=True, blank=True) EffectiveBaseRate = JSONField(null=True, blank=True) EffectiveSpreadFloor = JSONField(null=True, blank=True) EffectiveSpreadCeiling = JSONField(null=True, blank=True) EffectiveSpreadRate = JSONField(null=True, blank=True) CostOfFunds = JSONField(null=True, blank=True) Fee1 = JSONField(null=True, blank=True) Fee2 = JSONField(null=True, blank=True) Fee3 = JSONField(null=True, blank=True) Fee4 = JSONField(null=True, blank=True) Fee5 = JSONField(null=True, blank=True) Fee6 = JSONField(null=True, blank=True) Fee7 = JSONField(null=True, blank=True) Fee8 = JSONField(null=True, blank=True) Fee9 = JSONField(null=True, blank=True) Fee10 = JSONField(null=True, blank=True) Fee11 = JSONField(null=True, blank=True) Fee12 = JSONField(null=True, blank=True) Fee13 = JSONField(null=True, blank=True) Fee14 = JSONField(null=True, blank=True) Fee15 = JSONField(null=True, blank=True) Fee16 = JSONField(null=True, blank=True) Fee17 = JSONField(null=True, blank=True) Fee18 = JSONField(null=True, blank=True) Fee19 = JSONField(null=True, blank=True) Fee20 = JSONField(null=True, blank=True) IsSecured = JSONField(null=True, blank=True) IsGuaranteed = JSONField(null=True, blank=True) OpenMarketValue = JSONField(null=True, blank=True) DiscountedValue = JSONField(null=True, blank=True) ForcedSaleValue = JSONField(null=True, blank=True) CoverageByMV = JSONField(null=True, blank=True) CoverageByDV = JSONField(null=True, blank=True) CoverageByFSV = JSONField(null=True, blank=True) PastDueBucket = JSONField(null=True, blank=True) PastDueExposure = JSONField(null=True, blank=True) PastDuePrincipal = JSONField(null=True, blank=True) PastDueInterest = JSONField(null=True, blank=True) PenaltyInterestDue = JSONField(null=True, blank=True) PastDueBucket1 = JSONField(null=True, blank=True) PastDueBucket2 = JSONField(null=True, blank=True) PastDueBucket3 = JSONField(null=True, blank=True) PastDueBucket4 = JSONField(null=True, blank=True) PastDueBucket5 = JSONField(null=True, blank=True) PastDueBucket6 = JSONField(null=True, blank=True) SpecificProvisions = JSONField(null=True, blank=True) InterestProvisions = JSONField(null=True, blank=True) TotalProvisions = JSONField(null=True, blank=True) ExcessProvisions = JSONField(null=True, blank=True) MinimumProvisions = JSONField(null=True, blank=True) WriteDown = JSONField(null=True, blank=True) WriteBack = JSONField(null=True, blank=True) NetDebt = JSONField(null=True, blank=True) RecoveredAmount = JSONField(null=True, blank=True) ProfessionalFeesPaid = JSONField(null=True, blank=True) NetBalance = JSONField(null=True, blank=True) BISAssetClass = JSONField(null=True, blank=True) RatingInternalPD = JSONField(null=True, blank=True) RatingInternalLGD = JSONField(null=True, blank=True) ProbabilityOfDefault = JSONField(null=True, blank=True) LossGivenDefault = JSONField(null=True, blank=True) ExposureAtDefault = JSONField(null=True, blank=True) ExpectedLoss = JSONField(null=True, blank=True) UnexpectedLoss = JSONField(null=True, blank=True) OriginationCost = JSONField(null=True, blank=True) ServicingCost = JSONField(null=True, blank=True) AverageFees = JSONField(null=True, blank=True) OperationCost = JSONField(null=True, blank=True) OperationCapital = JSONField(null=True, blank=True) RegulatoryCapital = JSONField(null=True, blank=True) EconomicCapital = JSONField(null=True, blank=True) RAROC = JSONField(null=True, blank=True) RWADrawn = JSONField(null=True, blank=True) RWAUndrawn = JSONField(null=True, blank=True) RWATotal = JSONField(null=True, blank=True) Code1 = JSONField(null=True, blank=True) Code2 = JSONField(null=True, blank=True) Code3 = JSONField(null=True, blank=True) Code4 = JSONField(null=True, blank=True) Code5 = JSONField(null=True, blank=True) Code6 = JSONField(null=True, blank=True) Code7 = JSONField(null=True, blank=True) Code8 = JSONField(null=True, blank=True) Code9 = JSONField(null=True, blank=True) Code10 = JSONField(null=True, blank=True) Code11 = JSONField(null=True, blank=True) Code12 = JSONField(null=True, blank=True) Code13 = JSONField(null=True, blank=True) Code14 = JSONField(null=True, blank=True) Code15 = JSONField(null=True, blank=True) Code16 = JSONField(null=True, blank=True) Code17 = JSONField(null=True, blank=True) Code18 = JSONField(null=True, blank=True) Code19 = JSONField(null=True, blank=True) Code20 = JSONField(null=True, blank=True) Code21 = JSONField(null=True, blank=True) Code22 = JSONField(null=True, blank=True) Code23 = JSONField(null=True, blank=True) Code24 = JSONField(null=True, blank=True) Code25 = JSONField(null=True, blank=True) Code26 = JSONField(null=True, blank=True) Code27 = JSONField(null=True, blank=True) Code28 = JSONField(null=True, blank=True) Code29 = JSONField(null=True, blank=True) Code30 = JSONField(null=True, blank=True) Value1 = JSONField(null=True, blank=True) Value2 = JSONField(null=True, blank=True) Value3 = JSONField(null=True, blank=True) Value4 = JSONField(null=True, blank=True) Value5 = JSONField(null=True, blank=True) Value6 = JSONField(null=True, blank=True) Value7 = JSONField(null=True, blank=True) Value8 = JSONField(null=True, blank=True) Value9 = JSONField(null=True, blank=True) Value10 = JSONField(null=True, blank=True) Value11 = JSONField(null=True, blank=True) Value12 = JSONField(null=True, blank=True) Value13 = JSONField(null=True, blank=True) Value14 = JSONField(null=True, blank=True) Value15 = JSONField(null=True, blank=True) Value16 = JSONField(null=True, blank=True) Value17 = JSONField(null=True, blank=True) Value18 = JSONField(null=True, blank=True) Value19 = JSONField(null=True, blank=True) Value20 = JSONField(null=True, blank=True) Value21 = JSONField(null=True, blank=True) Value22 = JSONField(null=True, blank=True) Value23 = JSONField(null=True, blank=True) Value24 = JSONField(null=True, blank=True) Value25 = JSONField(null=True, blank=True) Value26 = JSONField(null=True, blank=True) Value27 = JSONField(null=True, blank=True) Value28 = JSONField(null=True, blank=True) Value29 = JSONField(null=True, blank=True) Value30 = JSONField(null=True, blank=True) Text1 = JSONField(null=True, blank=True) Text2 = JSONField(null=True, blank=True) Text3 = JSONField(null=True, blank=True) Text4 = JSONField(null=True, blank=True) Text5 = JSONField(null=True, blank=True) Text6 = JSONField(null=True, blank=True) Text7 = JSONField(null=True, blank=True) Text8 = JSONField(null=True, blank=True) Text9 = JSONField(null=True, blank=True) Text10 = JSONField(null=True, blank=True) Text11 = JSONField(null=True, blank=True) Text12 = JSONField(null=True, blank=True) Text13 = JSONField(null=True, blank=True) Text14 = JSONField(null=True, blank=True) Text15 = JSONField(null=True, blank=True) Text16 = JSONField(null=True, blank=True) Text17 = JSONField(null=True, blank=True) Text18 = JSONField(null=True, blank=True) Text19 = JSONField(null=True, blank=True) Text20 = JSONField(null=True, blank=True) Text21 = JSONField(null=True, blank=True) Text22 = JSONField(null=True, blank=True) Text23 = JSONField(null=True, blank=True) Text24 = JSONField(null=True, blank=True) Text25 = JSONField(null=True, blank=True) Text26 = JSONField(null=True, blank=True) Text27 = JSONField(null=True, blank=True) Text28 = JSONField(null=True, blank=True) Text29 = JSONField(null=True, blank=True) Text30 = JSONField(null=True, blank=True) Date1 = JSONField(null=True, blank=True) Date2 = JSONField(null=True, blank=True) Date3 = JSONField(null=True, blank=True) Date4 = JSONField(null=True, blank=True) Date5 = JSONField(null=True, blank=True) Date6 = JSONField(null=True, blank=True) Date7 = JSONField(null=True, blank=True) Date8 = JSONField(null=True, blank=True) Date9 = JSONField(null=True, blank=True) Date10 = JSONField(null=True, blank=True) Date11 = JSONField(null=True, blank=True) Date12 = JSONField(null=True, blank=True) Date13 = JSONField(null=True, blank=True) Date14 = JSONField(null=True, blank=True) Date15 = JSONField(null=True, blank=True) Date16 = JSONField(null=True, blank=True) Date17 = JSONField(null=True, blank=True) Date18 = JSONField(null=True, blank=True) Date19 = JSONField(null=True, blank=True) Date20 = JSONField(null=True, blank=True) Date21 = JSONField(null=True, blank=True) Date22 = JSONField(null=True, blank=True) Date23 = JSONField(null=True, blank=True) Date24 = JSONField(null=True, blank=True) Date25 = JSONField(null=True, blank=True) Date26 = JSONField(null=True, blank=True) Date27 = JSONField(null=True, blank=True) Date28 = JSONField(null=True, blank=True) Date29 = JSONField(null=True, blank=True) Date30 = JSONField(null=True, blank=True) objects = models.Manager() def __str__(self): return "{} - {}".format(self.FacilityId, self.ArrangementType) class Meta: verbose_name = 'Facility' verbose_name_plural = 'Facility' db_table = 'Facility'
class Project(models.Model): SECS_IN_HOUR = 3600.0 title = models.CharField(max_length=255) description = models.CharField(max_length=1000) # TODO: profile --> owner # TODO: profile --> many to many relationship? owner = models.ForeignKey( Profile, null=True, related_name="projects", on_delete=models.CASCADE ) sponsor = models.ForeignKey( Profile, null=True, related_name="sponsored_projects", on_delete=models.SET_NULL ) is_public = models.BooleanField(default=True) status = models.CharField( choices=( ("live", "live"), ("pending", "pending"), ("requires fixes", "requires fixes"), ), default="live", max_length=32, ) # functions meta_parameters = JSONField(default=None, blank=True, null=True) package_defaults = models.CharField(max_length=1000) parse_user_adjustments = models.CharField(max_length=1000) run_simulation = models.CharField(max_length=1000) # install installation = models.CharField(max_length=1000) # server resources server_cost = models.DecimalField(max_digits=6, decimal_places=3, null=True) # ram, vcpus def callabledefault(): return [4, 2] server_size = ArrayField( models.CharField(max_length=5), default=callabledefault, size=2 ) exp_task_time = models.IntegerField(null=True) exp_num_tasks = models.IntegerField(null=True) # model parameter type inputs_style = models.CharField( choices=(("paramtools", "paramtools"), ("taxcalc", "taxcalc")), max_length=32 ) # permission type of the model permission_type = models.CharField( choices=(("default", "default"), ("sponsored", "sponsored")), default="default", max_length=32, ) @staticmethod def get_or_none(**kwargs): try: res = Project.objects.get(**kwargs) except Project.DoesNotExist: res = None return res def exp_job_info(self, adjust=False): rate_per_sec = self.server_cost / 3600 job_time = self.exp_task_time * (self.exp_num_tasks or 1) cost = round(rate_per_sec * job_time, 4) if adjust: return max(cost, 0.01), job_time else: return cost, job_time def run_cost(self, run_time, adjust=False): """ Calculate the cost of a project run. The run time is scaled by the time required for it to cost one penny. If adjust is true and the cost is less than one penny, then it is rounded up to a penny. """ cost = round(run_time / self.n_secs_per_penny) / 100 if adjust: return max(cost, 0.01) else: return cost @property def n_secs_per_penny(self): """ Calculate the number of seconds a project sim needs to run such that the cost of that run is one penny. """ return 0.01 / self.server_cost_in_secs @property def server_cost_in_secs(self): """ Convert server cost from $P/hr to $P/sec. """ return float(self.server_cost) / self.SECS_IN_HOUR @staticmethod def dollar_to_penny(c): return int(round(c * 100, 0)) @property def app_url(self): return reverse( "app", kwargs={"title": self.title, "username": self.owner.user.username} ) def worker_ext(self, action): return f"{self.owner.user.username}/{self.title}/{action}" @property def display_sponsor(self): if self.sponsor is not None: return self.sponsor.user.username else: return "Not sponsored" @property def number_runs(self): return Inputs.objects.filter(project=self).count() @cached_property def parsed_meta_parameters(self): if isinstance(self.meta_parameters, str): meta_params = json.loads(self.meta_parameters) else: meta_params = self.meta_parameters return translate_to_django(meta_params) @property def safe_description(self): return mark_safe(markdown.markdown(self.description)) class Meta: permissions = (("write_project", "Write project"),)
class Vessel_Identifier(Model): dataset = ForeignKey('Dataset', related_name='vessel_identifiers', on_delete=CASCADE) column_name = CharField(max_length=200) values_list = JSONField()
class WorkItem(UUIDModel): STATUS_READY = "ready" STATUS_COMPLETED = "completed" STATUS_CANCELED = "canceled" STATUS_SKIPPED = "skipped" STATUS_SUSPENDED = "suspended" STATUS_CHOICE_TUPLE = ( (STATUS_READY, "Work item is ready to be processed."), (STATUS_COMPLETED, "Work item is done."), (STATUS_CANCELED, "Work item is canceled."), (STATUS_SKIPPED, "Work item is skipped."), (STATUS_SUSPENDED, "Work item is suspended."), ) name = LocalizedField( blank=False, null=False, required=False, help_text="Will be set from Task, if not provided.", ) description = LocalizedField( blank=True, null=True, required=False, help_text="Will be set from Task, if not provided.", ) closed_at = models.DateTimeField( blank=True, null=True, help_text="Time when work item has either been canceled or completed", ) closed_by_user = models.CharField(max_length=150, blank=True, null=True) closed_by_group = models.CharField(max_length=150, blank=True, null=True) deadline = models.DateTimeField(blank=True, null=True) task = models.ForeignKey( Task, on_delete=models.DO_NOTHING, related_name="work_items" ) status = ChoicesCharField(choices=STATUS_CHOICE_TUPLE, max_length=50, db_index=True) meta = JSONField(default=dict) addressed_groups = ArrayField( models.CharField(max_length=150), default=list, help_text=( "Offer work item to be processed by a group of users, " "such are not committed to process it though." ), ) controlling_groups = ArrayField( models.CharField(max_length=150), default=list, help_text="List of groups this work item is assigned to for controlling.", ) assigned_users = ArrayField( models.CharField(max_length=150), default=list, help_text="Users responsible to undertake given work item.", ) case = models.ForeignKey(Case, related_name="work_items", on_delete=models.CASCADE) child_case = models.OneToOneField( Case, related_name="parent_work_item", on_delete=models.SET_NULL, null=True, blank=True, help_text="Defines case of a sub-workflow", ) document = models.OneToOneField( "caluma_form.Document", on_delete=models.PROTECT, related_name="work_item", blank=True, null=True, ) previous_work_item = models.ForeignKey( "self", on_delete=models.SET_NULL, related_name="succeeding_work_items", blank=True, null=True, ) class Meta: indexes = [ GinIndex(fields=["addressed_groups"]), GinIndex(fields=["assigned_users"]), GinIndex(fields=["meta"]), ]
class Order(models.Model): purchaser = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='orders', on_delete=models.CASCADE) rate = models.CharField(max_length=40) company_name = models.CharField(max_length=200, null=True) company_addr = models.TextField(null=True) status = models.CharField(max_length=10) stripe_charge_id = models.CharField(max_length=80) stripe_charge_created = models.DateTimeField(null=True) stripe_charge_failure_reason = models.CharField(max_length=400, blank=True) unconfirmed_details = JSONField() created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) id_scrambler = Scrambler(1000) class Manager(models.Manager): def get_by_order_id_or_404(self, order_id): id = self.model.id_scrambler.backward(order_id) return get_object_or_404(self.model, pk=id) def create_pending(self, purchaser, rate, days_for_self=None, email_addrs_and_days_for_others=None, company_details=None): assert days_for_self is not None or email_addrs_and_days_for_others is not None if rate == 'corporate': assert company_details is not None company_name = company_details['name'] company_addr = company_details['addr'] elif rate in ['individual', 'education']: assert company_details is None company_name = None company_addr = None else: assert False unconfirmed_details = { 'days_for_self': days_for_self, 'email_addrs_and_days_for_others': email_addrs_and_days_for_others, } return self.create( purchaser=purchaser, rate=rate, company_name=company_name, company_addr=company_addr, status='pending', unconfirmed_details=unconfirmed_details, ) objects = Manager() def __str__(self): return self.order_id @property def order_id(self): if self.id is None: return None return self.id_scrambler.forward(self.id) def get_absolute_url(self): return reverse('tickets:order', args=[self.order_id]) def update(self, rate, days_for_self=None, email_addrs_and_days_for_others=None, company_details=None): assert self.payment_required() assert days_for_self is not None or email_addrs_and_days_for_others is not None if rate == 'corporate': assert company_details is not None self.company_name = company_details['name'] self.company_addr = company_details['addr'] elif rate in ['individual', 'education']: assert company_details is None self.company_name = None self.company_addr = None else: assert False self.rate = rate self.unconfirmed_details = { 'days_for_self': days_for_self, 'email_addrs_and_days_for_others': email_addrs_and_days_for_others, } self.save() def confirm(self, charge_id, charge_created): assert self.payment_required() days_for_self = self.unconfirmed_details['days_for_self'] if days_for_self is not None: self.tickets.create_for_user(self.purchaser, days_for_self) email_addrs_and_days_for_others = self.unconfirmed_details[ 'email_addrs_and_days_for_others'] if email_addrs_and_days_for_others is not None: for email_addr, days in email_addrs_and_days_for_others: self.tickets.create_with_invitation(email_addr, days) self.stripe_charge_id = charge_id self.stripe_charge_created = datetime.fromtimestamp(charge_created, tz=timezone.utc) self.stripe_charge_failure_reason = '' self.status = 'successful' self.save() def mark_as_failed(self, charge_failure_reason): self.stripe_charge_failure_reason = charge_failure_reason self.status = 'failed' self.save() def march_as_errored_after_charge(self, charge_id): self.stripe_charge_id = charge_id self.stripe_charge_failure_reason = '' self.status = 'errored' self.save() def delete_tickets_and_mark_as_refunded(self): self.tickets.all().delete() self.status = 'refunded' self.save() def all_tickets(self): if self.payment_required(): tickets = [] days_for_self = self.unconfirmed_details['days_for_self'] if days_for_self is not None: ticket = UnconfirmedTicket( order=self, owner=self.purchaser, days=days_for_self, ) tickets.append(ticket) email_addrs_and_days_for_others = self.unconfirmed_details[ 'email_addrs_and_days_for_others'] if email_addrs_and_days_for_others is not None: for email_addr, days in email_addrs_and_days_for_others: ticket = UnconfirmedTicket( order=self, email_addr=email_addr, days=days, ) tickets.append(ticket) return tickets else: return self.tickets.order_by('id') def form_data(self): assert self.payment_required() data = {'rate': self.rate} days_for_self = self.unconfirmed_details['days_for_self'] email_addrs_and_days_for_others = self.unconfirmed_details[ 'email_addrs_and_days_for_others'] if days_for_self is None: assert email_addrs_and_days_for_others is not None data['who'] = 'others' elif email_addrs_and_days_for_others is None: assert days_for_self is not None data['who'] = 'self' else: data['who'] = 'self and others' return data def self_form_data(self): assert self.payment_required() days_for_self = self.unconfirmed_details['days_for_self'] if days_for_self is None: return None return {'days': days_for_self} def others_formset_data(self): assert self.payment_required() email_addrs_and_days_for_others = self.unconfirmed_details[ 'email_addrs_and_days_for_others'] if email_addrs_and_days_for_others is None: return None data = { 'form-TOTAL_FORMS': str(len(email_addrs_and_days_for_others)), 'form-INITIAL_FORMS': str(len(email_addrs_and_days_for_others)), } for ix, (email_addr, days) in enumerate(email_addrs_and_days_for_others): data[f'form-{ix}-email_addr'] = email_addr data[f'form-{ix}-days'] = days return data def company_details_form_data(self): if self.rate == 'corporate': return { 'company_name': self.company_name, 'company_addr': self.company_addr, } else: return None def ticket_details(self): return [ticket.details() for ticket in self.all_tickets()] def ticket_summary(self): num_tickets_by_num_days = defaultdict(int) for ticket in self.all_tickets(): num_tickets_by_num_days[ticket.num_days()] += 1 summary = [] for ix in range(5): num_days = ix + 1 if num_tickets_by_num_days[num_days]: num_tickets = num_tickets_by_num_days[num_days] summary.append({ 'num_days': num_days, 'num_tickets': num_tickets, 'per_item_cost_excl_vat': cost_excl_vat(self.rate, num_days), 'per_item_cost_incl_vat': cost_incl_vat(self.rate, num_days), 'total_cost_excl_vat': cost_excl_vat(self.rate, num_days) * num_tickets, 'total_cost_incl_vat': cost_incl_vat(self.rate, num_days) * num_tickets, }) return summary def brief_summary(self): summary = f'{self.num_tickets()} {self.rate}-rate ticket' if self.num_tickets() > 1: summary += 's' return summary def cost_excl_vat(self): return sum(ticket.cost_excl_vat() for ticket in self.all_tickets()) def cost_incl_vat(self): return sum(ticket.cost_incl_vat() for ticket in self.all_tickets()) def vat(self): return self.cost_incl_vat() - self.cost_excl_vat() def cost_pence_incl_vat(self): return 100 * self.cost_incl_vat() def num_tickets(self): return len(self.all_tickets()) def unclaimed_tickets(self): return self.tickets.filter(owner=None) def ticket_for_self(self): tickets = [ ticket for ticket in self.all_tickets() if ticket.owner == self.purchaser ] if len(tickets) == 0: return None elif len(tickets) == 1: return tickets[0] else: assert False def tickets_for_others(self): return [ ticket for ticket in self.all_tickets() if ticket.owner != self.purchaser ] def payment_required(self): return self.status in ['pending', 'failed'] def company_addr_formatted(self): if self.rate == 'corporate': lines = [ line.strip(',') for line in self.company_addr.splitlines() if line ] return ', '.join(lines) else: return None
class Rate(models.Model): created_at = models.DateTimeField(auto_now_add=True) data = JSONField(default={})
class Settings(models.Model): boundaries = JSONField() loginHours = models.IntegerField(default=36) scoreRules = JSONField()
class ExperimentChangeLog(models.Model): STATUS_NONE_DRAFT = "Created Experiment" STATUS_DRAFT_DRAFT = "Edited Experiment" STATUS_DRAFT_REVIEW = "Ready for Sign-Off" STATUS_REVIEW_DRAFT = "Return to Draft" STATUS_REVIEW_REVIEW = "Edited Experiment" STATUS_REVIEW_SHIP = "Marked as Ready to Ship" STATUS_SHIP_ACCEPTED = "Accepted by Normandy" STATUS_SHIP_REVIEW = "Canceled Ready to Ship" STATUS_ACCEPTED_LIVE = "Launched Experiment" STATUS_LIVE_COMPLETE = "Completed Experiment" STATUS_ADDED_RESULTS = "Added Results" PRETTY_STATUS_LABELS = { None: { Experiment.STATUS_DRAFT: STATUS_NONE_DRAFT }, Experiment.STATUS_DRAFT: { Experiment.STATUS_DRAFT: STATUS_DRAFT_DRAFT, Experiment.STATUS_REVIEW: STATUS_DRAFT_REVIEW, }, Experiment.STATUS_REVIEW: { Experiment.STATUS_DRAFT: STATUS_REVIEW_DRAFT, Experiment.STATUS_REVIEW: STATUS_REVIEW_REVIEW, Experiment.STATUS_SHIP: STATUS_REVIEW_SHIP, }, Experiment.STATUS_SHIP: { Experiment.STATUS_REVIEW: STATUS_SHIP_REVIEW, Experiment.STATUS_ACCEPTED: STATUS_SHIP_ACCEPTED, }, Experiment.STATUS_ACCEPTED: { Experiment.STATUS_LIVE: STATUS_ACCEPTED_LIVE }, Experiment.STATUS_LIVE: { Experiment.STATUS_COMPLETE: STATUS_LIVE_COMPLETE, Experiment.STATUS_LIVE: STATUS_ADDED_RESULTS, }, Experiment.STATUS_COMPLETE: { Experiment.STATUS_COMPLETE: STATUS_ADDED_RESULTS }, } def current_datetime(): return timezone.now() experiment = models.ForeignKey( Experiment, blank=False, null=False, related_name="changes", on_delete=models.CASCADE, ) changed_on = models.DateTimeField(default=current_datetime) changed_by = models.ForeignKey(get_user_model(), on_delete=models.CASCADE) old_status = models.CharField(max_length=255, blank=True, null=True, choices=Experiment.STATUS_CHOICES) new_status = models.CharField(max_length=255, blank=False, null=False, choices=Experiment.STATUS_CHOICES) message = models.TextField(blank=True, null=True) changed_values = JSONField(encoder=DjangoJSONEncoder, blank=True, null=True) objects = ExperimentChangeLogManager() class Meta: verbose_name = "Experiment Change Log" verbose_name_plural = "Experiment Change Logs" ordering = ("changed_on", ) def __str__(self): if self.message: return self.message else: return self.pretty_status @property def pretty_status(self): return self.PRETTY_STATUS_LABELS.get(self.old_status, {}).get(self.new_status, "")
class SlidingPuzzle(Entertainment): piecesURLS = JSONField() # Should be list of paths width = models.IntegerField() height = models.IntegerField()
class OperationRun(RunModel): """A model that represents an execution behaviour/run of instance of an operation.""" STATUSES = OperationStatuses operation = models.ForeignKey('db.Operation', on_delete=models.CASCADE, related_name='runs') pipeline_run = models.ForeignKey('db.PipelineRun', on_delete=models.CASCADE, related_name='operation_runs') upstream_runs = models.ManyToManyField('self', blank=True, symmetrical=False, related_name='downstream_runs') status = models.OneToOneField('db.OperationRunStatus', related_name='+', blank=True, null=True, editable=True, on_delete=models.SET_NULL) celery_task_context = JSONField( blank=True, null=True, help_text='The kwargs required to execute the celery task.') celery_task_id = models.CharField(max_length=36, null=False, blank=True) class Meta: app_label = 'db' def set_status(self, status, message=None, **kwargs): if self.can_transition(status): OperationRunStatus.objects.create(operation_run=self, status=status, message=message) def check_concurrency(self): """Checks the concurrency of the operation run. Checks the concurrency of the operation run to validate if we can start a new operation run. Returns: boolean: Whether to start a new operation run or not. """ if not self.operation.concurrency: # No concurrency set return True ops_count = self.operation.runs.filter( status__status__in=self.STATUSES.RUNNING_STATUS).count() return ops_count < self.operation.concurrency def check_upstream_trigger(self): """Checks the upstream and the trigger rule.""" if self.operation.trigger_policy == TriggerPolicy.ONE_DONE: return self.upstream_runs.filter( status__status__in=self.STATUSES.DONE_STATUS).exists() if self.operation.trigger_policy == TriggerPolicy.ONE_SUCCEEDED: return self.upstream_runs.filter( status__status=self.STATUSES.SUCCEEDED).exists() if self.operation.trigger_policy == TriggerPolicy.ONE_FAILED: return self.upstream_runs.filter( status__status=self.STATUSES.FAILED).exists() statuses = self.upstream_runs.values_list('status__status', flat=True) if self.operation.trigger_policy == TriggerPolicy.ALL_DONE: return not bool([ True for status in statuses if status not in self.STATUSES.DONE_STATUS ]) if self.operation.trigger_policy == TriggerPolicy.ALL_SUCCEEDED: return not bool([ True for status in statuses if status != self.STATUSES.SUCCEEDED ]) if self.operation.trigger_policy == TriggerPolicy.ALL_FAILED: return not bool([ True for status in statuses if status not in self.STATUSES.FAILED_STATUS ]) @property def is_upstream_done(self): statuses = self.upstream_runs.values_list('status__status', flat=True) return not bool([ True for status in statuses if status not in self.STATUSES.DONE_STATUS ]) def schedule_start(self): """Schedule the task: check first if the task can start: 1. we check that the task is still in the CREATED state. 2. we check that the upstream dependency is met. 3. we check that pipeline can start a new task; i.e. we check the concurrency of the pipeline. 4. we check that operation can start a new instance; i.e. we check the concurrency of the operation. -> If all checks pass we schedule the task start it. -> 1. If the operation is not in created status, nothing to do. -> 2. If the upstream dependency check is not met, two use cases need to be validated: * The upstream dependency is not met but could be met in the future, because some ops are still CREATED/SCHEDULED/RUNNING/... in this case nothing need to be done, every time an upstream operation finishes, it will notify all the downstream ops including this one. * The upstream dependency is not met and could not be met at all. In this case we need to mark the task with `UPSTREAM_FAILED`. -> 3. If the pipeline has reached it's concurrency limit, we just delay schedule based on the interval/time delay defined by the user. The pipeline scheduler will keep checking until the task can be scheduled or stopped. -> 4. If the operation has reached it's concurrency limit, Same as above we keep trying based on an interval defined by the user. Returns: boolean: Whether to try to schedule this operation run in the future or not. """ if self.last_status != self.STATUSES.CREATED: return False upstream_trigger_check = self.check_upstream_trigger() if not upstream_trigger_check and self.is_upstream_done: # This task cannot be scheduled anymore self.on_upstream_failed() return False if not self.pipeline_run.check_concurrency(): return True if not self.check_concurrency(): return True self.on_scheduled() self.start() return False def start(self): """Start the celery task of this operation.""" kwargs = self.celery_task_context # Update we the operation run id kwargs['operation_run_id'] = self.id # pylint:disable=unsupported-assignment-operation async_result = celery_app.send_task(self.operation.celery_task, kwargs=kwargs, **self.operation.get_run_params()) self.celery_task_id = async_result.id self.save() def stop(self, message=None): if self.is_running: task = AsyncResult(self.celery_task_id) task.revoke(terminate=True, signal='SIGKILL') self.on_stop(message=message) def skip(self, message=None): self.on_skip(message=message) def on_retry(self): self.set_status(status=self.STATUSES.RETRYING) def on_upstream_failed(self): self.set_status(status=self.STATUSES.UPSTREAM_FAILED) def on_failure(self, message=None): self.set_status(status=self.STATUSES.FAILED, message=message) self.save() def on_success(self, message=None): self.set_status(status=self.STATUSES.SUCCEEDED, message=message)