class Job(models.Model): """ Database model for an 'Export'. Immutable, except in the case of HDX Export Regions. """ id = models.AutoField(primary_key=True, editable=False) uid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False, db_index=True) user = models.ForeignKey(User, related_name='owner') name = models.CharField(max_length=100, db_index=True, blank=False) description = models.CharField(max_length=1000, db_index=True, default='', blank=True) event = models.CharField(max_length=100, db_index=True, default='', blank=True) export_formats = ArrayField(models.CharField(max_length=10), validators=[validate_export_formats], blank=False) published = models.BooleanField(default=False, db_index=True) the_geom = models.GeometryField(verbose_name='Uploaded geometry', srid=4326, blank=False) simplified_geom = models.GeometryField(verbose_name='Simplified geometry', srid=4326, blank=True, null=True) objects = models.GeoManager() feature_selection = models.TextField( blank=False, validators=[validate_feature_selection]) created_at = models.DateTimeField(default=timezone.now, editable=False) updated_at = models.DateTimeField(default=timezone.now, editable=False) mbtiles_maxzoom = models.IntegerField(null=True, blank=True) mbtiles_minzoom = models.IntegerField(null=True, blank=True) mbtiles_source = models.TextField(null=True, blank=True) # flags buffer_aoi = models.BooleanField(default=False) unlimited_extent = models.BooleanField(default=False) hidden = models.BooleanField(default=False) # hidden from the list page expire_old_runs = models.BooleanField(default=True) pinned = models.BooleanField(default=False) unfiltered = models.BooleanField(default=False) class Meta: # pragma: no cover managed = True db_table = 'jobs' @property def osma_link(self): bounds = self.the_geom.extent return "http://osm-analytics.org/#/show/bbox:{0},{1},{2},{3}/buildings/recency".format( *bounds) @property def area(self): return get_geodesic_area(self.the_geom) def save(self, *args, **kwargs): self.the_geom = force2d(self.the_geom) self.simplified_geom = simplify_geom(self.the_geom, force_buffer=self.buffer_aoi) super(Job, self).save(*args, **kwargs) def __str__(self): return str(self.uid)
class AHJInspection(models.Model): InspectionID = models.AutoField(db_column='InspectionID', primary_key=True) AHJPK = models.ForeignKey(AHJ, models.DO_NOTHING, db_column='AHJPK') InspectionType = models.ForeignKey('InspectionType', on_delete=models.DO_NOTHING, db_column='InspectionType', null=True) AHJInspectionName = models.CharField(db_column='AHJInspectionName', max_length=255) AHJInspectionNotes = models.CharField(db_column='AHJInspectionNotes', max_length=255, blank=True) Description = models.CharField(db_column='Description', max_length=255, blank=True) FileFolderURL = models.CharField(db_column='FileFolderURL', max_length=255, blank=True) TechnicianRequired = models.BooleanField(db_column='TechnicianRequired', null=True) InspectionStatus = models.BooleanField(db_column='InspectionStatus', null=True) history = HistoricalRecords() def get_contacts(self): return [ contact for contact in Contact.objects.filter(ParentTable='AHJInspection', ParentID=self.InspectionID) if contact.ContactStatus is True ] def get_uncon_con(self): return [ contact for contact in Contact.objects.filter(ParentTable='AHJInspection', ParentID=self.InspectionID) if contact.ContactStatus is None ] def create_relation_to(self, to): if to.__class__.__name__ == 'AHJ': self.InspectionStatus = None return self else: raise ValueError( '\'AHJInspection\' cannot be related to \'{to_model}\''.format( to_model=to.__class__.__name__)) def get_relation_status_field(self): return 'InspectionStatus' class Meta: managed = True db_table = 'AHJInspection' verbose_name = 'AHJ Inspection' verbose_name_plural = 'AHJ Inspections' unique_together = (('AHJPK', 'AHJInspectionName'), ) SERIALIZER_EXCLUDED_FIELDS = [ 'InspectionID', 'UnconfirmedContacts', 'InspectionStatus' ]
class User(AbstractBaseUser): UserID = models.AutoField(db_column='UserID', primary_key=True) ContactID = models.ForeignKey(Contact, models.DO_NOTHING, db_column='ContactID', null=True) Username = models.CharField(db_column='Username', unique=True, max_length=254) password = models.CharField(max_length=128) Email = models.CharField(db_column='Email', unique=True, max_length=254) is_staff = models.BooleanField(db_column='IsStaff', default=False) is_active = models.BooleanField(db_column='IsActive', default=False) is_superuser = models.BooleanField(db_column='IsSuperuser', default=False) SignUpDate = models.DateField(db_column='SignUpDate', blank=True) PersonalBio = models.CharField(db_column='PersonalBio', max_length=255, blank=True) URL = models.CharField(db_column='URL', max_length=255, blank=True, null=True) CompanyAffiliation = models.CharField(db_column='CompanyAffiliation', max_length=255, blank=True) Photo = models.CharField(db_column='Photo', max_length=255, blank=True, null=True) AcceptedEdits = models.IntegerField(db_column='NumAcceptedEdits', default=0) SubmittedEdits = models.IntegerField(db_column='NumSubmittedEdits', default=0) CommunityScore = models.IntegerField(db_column='CommunityScore', default=0) SecurityLevel = models.IntegerField(db_column='SecurityLevel', default=3) history = HistoricalRecords() USERNAME_FIELD = 'Email' SERIALIZER_EXCLUDED_FIELDS = ['APIToken', 'is_superuser', 'MaintainedAHJs'] objects = UserManager() def has_perm(self, perm, obj=None): return self.is_superuser def has_module_perms(self, core): return self.is_superuser def get_email_field_name(self=None): return "Email" def get_maintained_ahjs(self): return [ ahjpk.AHJPK.AHJPK for ahjpk in AHJUserMaintains.objects.filter( UserID=self).filter(MaintainerStatus=True) ] def is_ahj_official(self): return len(self.get_maintained_ahjs()) > 0 def get_API_token(self): api_token = APIToken.objects.filter(user=self).first() if api_token is None: return '' return api_token.key class Meta: db_table = 'User' verbose_name = 'User' verbose_name_plural = 'Users' managed = True
class AHJ(models.Model): AHJPK = models.AutoField(db_column='AHJPK', primary_key=True) AHJID = models.CharField(db_column='AHJID', unique=True, max_length=36) AHJCode = models.CharField(db_column='AHJCode', max_length=20, blank=True) AHJLevelCode = models.ForeignKey('AHJLevelCode', on_delete=models.DO_NOTHING, db_column='AHJLevelCode', null=True) PolygonID = models.ForeignKey('Polygon', on_delete=models.DO_NOTHING, db_column='PolygonID', null=True) AddressID = models.ForeignKey('Address', on_delete=models.DO_NOTHING, db_column='AddressID', null=True) AHJName = models.CharField(db_column='AHJName', max_length=100) Description = models.CharField(db_column='Description', max_length=255, blank=True) DocumentSubmissionMethodNotes = models.CharField( db_column='DocumentSubmissionMethodNotes', max_length=255, blank=True) PermitIssueMethodNotes = models.CharField( db_column='PermitIssueMethodNotes', max_length=255, blank=True) EstimatedTurnaroundDays = models.IntegerField( db_column='EstimatedTurnaroundDays', null=True) FileFolderURL = models.CharField(db_column='FileFolderURL', max_length=255, blank=True) URL = models.CharField(db_column='URL', max_length=2048, blank=True) BuildingCode = models.ForeignKey('BuildingCode', on_delete=models.DO_NOTHING, db_column='BuildingCode', null=True) BuildingCodeNotes = models.CharField(db_column='BuildingCodeNotes', max_length=255, blank=True) ElectricCode = models.ForeignKey('ElectricCode', on_delete=models.DO_NOTHING, db_column='ElectricCode', null=True) ElectricCodeNotes = models.CharField(db_column='ElectricCodeNotes', max_length=255, blank=True) FireCode = models.ForeignKey('FireCode', on_delete=models.DO_NOTHING, db_column='FireCode', null=True) FireCodeNotes = models.CharField(db_column='FireCodeNotes', max_length=255, blank=True) ResidentialCode = models.ForeignKey('ResidentialCode', on_delete=models.DO_NOTHING, db_column='ResidentialCode', null=True) ResidentialCodeNotes = models.CharField(db_column='ResidentialCodeNotes', max_length=255, blank=True) WindCode = models.ForeignKey('WindCode', on_delete=models.DO_NOTHING, db_column='WindCode', null=True) WindCodeNotes = models.CharField(db_column='WindCodeNotes', max_length=255, blank=True) history = HistoricalRecords() class Meta: managed = True db_table = 'AHJ' verbose_name = 'AHJ' verbose_name_plural = 'AHJs' def get_contacts(self): return [ contact for contact in Contact.objects.filter(ParentTable='AHJ', ParentID=self.AHJPK) if contact.ContactStatus is True ] def get_unconfirmed(self): return [ contact for contact in Contact.objects.filter(ParentTable='AHJ', ParentID=self.AHJPK) if contact.ContactStatus is None ] def get_comments(self): return [ comment for comment in Comment.objects.filter( AHJPK=self.AHJPK).order_by('-Date') ] def get_inspections(self): return [ ins for ins in AHJInspection.objects.filter(AHJPK=self.AHJPK) if ins.InspectionStatus is True ] def get_unconfirmed_inspections(self): return [ ins for ins in AHJInspection.objects.filter(AHJPK=self.AHJPK) if ins.InspectionStatus is None ] def get_document_submission_methods(self): return [ dsm for dsm in AHJDocumentSubmissionMethodUse.objects.filter( AHJPK=self.AHJPK) if dsm.MethodStatus is True ] def get_uncon_dsm(self): return [ dsm for dsm in AHJDocumentSubmissionMethodUse.objects.filter( AHJPK=self.AHJPK) if dsm.MethodStatus is None ] def get_permit_submission_methods(self): return [ pim for pim in AHJPermitIssueMethodUse.objects.filter(AHJPK=self.AHJPK) if pim.MethodStatus is True ] def get_uncon_pim(self): return [ pim for pim in AHJPermitIssueMethodUse.objects.filter(AHJPK=self.AHJPK) if pim.MethodStatus is None ] def get_err(self): return [ err for err in EngineeringReviewRequirement.objects.filter( AHJPK=self.AHJPK) if err.EngineeringReviewRequirementStatus is True ] def get_uncon_err(self): return [ err for err in EngineeringReviewRequirement.objects.filter( AHJPK=self.AHJPK) if err.EngineeringReviewRequirementStatus is None ] def get_fee_structures(self): return [ fs for fs in FeeStructure.objects.filter(AHJPK=self.AHJPK) if fs.FeeStructureStatus is True ] def get_uncon_fs(self): return [ fs for fs in FeeStructure.objects.filter(AHJPK=self.AHJPK) if fs.FeeStructureStatus is None ] SERIALIZER_EXCLUDED_FIELDS = [ 'Polygon', 'AHJPK', 'Comments', 'UnconfirmedContacts', 'UnconfirmedEngineeringReviewRequirements', 'UnconfirmedDocumentSubmissionMethods', 'UnconfirmedPermitIssueMethods', 'UnconfirmedInspections', 'UnconfirmedFeeStructures' ]
class Contact(models.Model): ParentTable = models.CharField(db_column='ParentTable', max_length=255, blank=True) ParentID = models.IntegerField(db_column='ParentID', null=True) ContactID = models.AutoField(db_column='ContactID', primary_key=True) AddressID = models.ForeignKey(Address, models.DO_NOTHING, db_column='AddressID', null=True) FirstName = models.CharField(db_column='FirstName', max_length=255, blank=True) MiddleName = models.CharField(db_column='MiddleName', max_length=255, blank=True) LastName = models.CharField(db_column='LastName', max_length=255, blank=True) HomePhone = models.CharField(db_column='HomePhone', max_length=31, blank=True) MobilePhone = models.CharField(db_column='MobilePhone', max_length=31, blank=True) WorkPhone = models.CharField(db_column='WorkPhone', max_length=31, blank=True) ContactType = models.ForeignKey('ContactType', on_delete=models.DO_NOTHING, db_column='ContactType', null=True) ContactTimezone = models.CharField(db_column='ContactTimezone', max_length=255, blank=True) Description = models.CharField(db_column='Description', max_length=255, blank=True) Email = models.CharField(db_column='Email', max_length=50, blank=True) Title = models.CharField(db_column='Title', max_length=255, blank=True) URL = models.CharField(db_column='URL', max_length=255, blank=True) PreferredContactMethod = models.ForeignKey( 'PreferredContactMethod', on_delete=models.DO_NOTHING, db_column='PreferredContactMethod', null=True) ContactStatus = models.BooleanField(db_column='ContactStatus', null=True) history = HistoricalRecords() class Meta: managed = True db_table = 'Contact' verbose_name = 'Contact' verbose_name_plural = 'Contacts' index_together = (('ParentTable', 'ParentID'), ) SERIALIZER_EXCLUDED_FIELDS = ['ContactID'] def create_relation_to(self, to): if to.__class__.__name__ != 'AHJ' and to.__class__.__name__ != 'AHJInspection': raise ValueError( '\'Contact\' cannot be related to \'{to_model}\''.format( to_model=to.__class__.__name__)) self.ParentTable = to.__class__.__name__ self.ParentID = to.pk self.ContactStatus = None self.save() return self def get_relation_status_field(self): return 'ContactStatus'
class Job(TimeStampedModelMixin): """ Model for a Job. """ id = models.AutoField(primary_key=True, editable=False) uid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False, db_index=True) user = models.ForeignKey(User, related_name='owner') name = models.CharField(max_length=100, db_index=True) description = models.CharField(max_length=1000, db_index=True) event = models.CharField(max_length=100, db_index=True, default='', blank=True) export_formats = ArrayField(models.CharField(max_length=10), default=list) config = models.ForeignKey(ExportConfig, related_name='config', null=True) published = models.BooleanField(default=False, db_index=True) # publish export feature_save = models.BooleanField( default=False, db_index=True) # save feature selections feature_pub = models.BooleanField( default=False, db_index=True) # publish feature selections the_geom = models.GeometryField(verbose_name='Extent for export', srid=4326, default='') objects = models.GeoManager() feature_selection = models.TextField(blank=True) buffer_aoi = models.BooleanField(default=False) class Meta: # pragma: no cover managed = True db_table = 'jobs' def save(self, *args, **kwargs): super(Job, self).save(*args, **kwargs) def __str__(self): return '{0}'.format(self.name) @property def feature_selection_object(self): """ a valid FeatureSelection object based off the feature_selection column. """ fs = FeatureSelection(self.feature_selection) # assert fs.valid, 'Feature selection is invalid' return fs @property def tag_dict(self, ): """ Return the unique set of Tag keys from this export with their associated geometry types. Used by Job.categorised_tags (below) to categorize tags according to their geometry types. """ # get the unique keys from the tags for this export uniq_keys = list(self.tags.values('key').distinct('key')) tag_dict = {} # mapping of tags to geom_types for entry in uniq_keys: key = entry['key'] tag_dict['key'] = key geom_types = list(self.tags.filter(key=key).values('geom_types')) geom_type_list = [] for geom_type in geom_types: geom_list = geom_type['geom_types'] geom_type_list.extend([i for i in geom_list]) tag_dict[key] = list( set(geom_type_list)) # get unique values for geomtypes return tag_dict @property def filters(self, ): """ Return key=value pairs for each tag in this export. Used in utils.overpass.filter to filter the export. """ filters = [] for tag in self.tags.all(): kv = '{0}={1}'.format(tag.key, tag.value) filters.append(kv) return filters @property def categorised_tags(self, ): """ Return tags mapped according to their geometry types. """ points = [] lines = [] polygons = [] for tag in self.tag_dict: for geom in self.tag_dict[tag]: if geom == 'point': points.append(tag) if geom == 'line': lines.append(tag) if geom == 'polygon': polygons.append(tag) return { 'points': sorted(points), 'lines': sorted(lines), 'polygons': sorted(polygons) }
class Manufacturer(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=128)
class Device(models.Model): device_iid = models.AutoField(primary_key=True) device_id = models.CharField("Id Dispositivo", max_length=128, help_text="Identificador del Dispositivo", unique=True) ua_string = models.CharField("User Agent", max_length=512, help_text="User Agent", null=True, blank=True) status = models.CharField("Estado", max_length=32, help_text="Estado del Dispositivo", default="ACTIVE") dev_brand = models.CharField("Marca", max_length=128, help_text="Marca del Dispositivo", null=True, blank=True) dev_family = models.CharField("Familia", max_length=128, help_text="Familia del Dispositivo", null=True, blank=True) dev_model = models.CharField("Modelo", max_length=128, help_text="Modelo del Dispositivo", null=True, blank=True) os_family = models.CharField("SO", max_length=128, help_text="Sistema Operativo", null=True, blank=True) os_version = models.CharField("Version SO", max_length=32, help_text="Versión del Sistema Operativo", null=True, blank=True) browser_family = models.CharField("Navegador", max_length=64, help_text="Navegador del User Agent", null=True, blank=True) browser_version = models.CharField( "Version Navegador", max_length=32, help_text="Versión del Navegador del User Agent", null=True, blank=True) created = models.DateTimeField( "Creado", help_text="Fecha de Creación del Dispositivo", auto_now=True) last_seen = models.DateTimeField("Última Visita", help_text="Última Visita del Dispositivo", auto_now_add=True) created_ip_address = models.CharField( "IP de creación", help_text="Dirección IP desde la que fue creado", max_length=32, null=True, blank=True) push_notification_token = models.CharField( "Token de Notificación", help_text="Token de Notificación para envíos tipo PUSH", max_length=128, null=True, blank=True)
class User(models.Model): user_iid = models.AutoField(primary_key=True) user_type = models.CharField("Tipo", max_length=32, help_text="Tipo de usuario") user_value = models.CharField("Sujeto", max_length=128, help_text="Valor/Nombre de Usuario") name = models.CharField("Nombre Completo", max_length=512, help_text="Nombre Completo del Usuario", null=True, blank=True) email = models.CharField("Correo Electrónico", max_length=256, help_text="Correo Electrónico del Usuario", null=True, blank=True) phone = models.CharField("Teléfono", max_length=64, help_text="Número Telefónico del Usuario", null=True, blank=True) created = models.DateTimeField( "Creado", help_text="Fecha de Creación del Dispositivo", auto_now=True) last_seen = models.DateTimeField("Última Visita", help_text="Última Visita del Dispositivo", auto_now_add=True) created_ip_address = models.CharField( "IP de creación", help_text="Dirección IP desde la que fue creado", max_length=32, null=True, blank=True) address = models.CharField( "Dirección", help_text="Dirección por defecto del Usuario", max_length=400, blank=True, null=True, ) location = models.PointField( "Ubicación", help_text="Ubicación por defecto del Usuario", blank=True, null=True, ) city = models.CharField("Ciudad", max_length=30, help_text="Dirección por defecto del Usuario", blank=True, null=True) city_code = models.CharField( "Código Ciudad", max_length=30, help_text="Código de Ciudad por Defecto del Usuario", blank=True, null=True) password_hash = models.CharField("Password", max_length=64, help_text="Contraseña del Usuario", blank=True, null=True) password_salt = models.CharField( "Password Salta", max_length=64, help_text="Salt de Contraseña del Usuario", blank=True, null=True) history = HistoricalRecords()
class DataFile(models.Model): class Meta: db_table = 'd2qc_data_files' # Make sure files get unique filenames def file_store_path(self, filename): id = self.owner.id if self.owner else 0 return DataFile.get_file_store_path(filename, user_id=id) @staticmethod def get_file_store_path(filename, user_id=0): # clear filename of illegal characters filename = re.sub('[^a-zA-Z0-9\.\-\_]', '', filename) path = os.path.join(settings.DATA_FOLDER, 'UID_{}'.format(user_id)) i = 0 name = '{}__{}'.format(i, filename) while os.path.isfile(os.path.join(path, name)): i += 1 name = '{}__{}'.format(i, filename) return os.path.join(path, name) id = models.AutoField(primary_key=True) filepath = models.FileField(upload_to=file_store_path, null=True) name = models.CharField(max_length=255, blank=True) description = models.CharField(max_length=255, blank=True) headers = models.TextField(blank=True) created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) owner = models.ForeignKey(User, on_delete=models.PROTECT, blank=True, null=True, editable=False) import_errors = models.TextField(blank=True) import_started = models.DateTimeField(null=True) import_finnished = models.DateTimeField(null=True) # Messages reported while importing data _messages = [] def __str__(self): return self.name if self.name else str(self.filepath) # Delete files as object is deleted def delete(self): # Dont delete file if file has related data set(s) if not DataSet.objects.filter(data_file_id=self.id).exists(): self.filepath.delete() super().delete() def read_file(self): filearray = [] if self.filepath: path = os.path.join( settings.BASE_DIR, str(self.filepath), ) try: with open(path, encoding="utf-8") as excfile: filearray = excfile.readlines() except: with open(path, encoding="iso-8859-1") as excfile: filearray = excfile.readlines() return filearray def _write_messages(self, append=False, save=False): if append: self.import_errors += '\n'.join(self._messages) else: self.import_errors = '\n'.join(self._messages) if save: self.save() def import_data(self): """ Import data from this data file. Abort if data existsself. Import errors are appended to the import_errors field. return True if data was imported, else return False """ if self.import_started: self._messages = ["File data already beeing imported"] self._write_messages(append=True, save=True) return False self.import_started = timezone.now() self.save() datagrid = excread.excread(str(self.filepath)) MANDATORY_VARS = ( 'EXPOCODE', 'EXC_DATETIME', 'EXC_CTDDEPTH', 'STNNBR', 'LATITUDE', 'LONGITUDE', ) # Variables not to be treated as data variables IGNORE = ( 'EXPOCODE', 'EXC_DATETIME', 'EXC_CTDDEPTH', 'STNNBR', 'SECT_ID', 'DATE', 'TIME', 'LATITUDE', 'LONGITUDE', 'BTLNBR', 'BTLNBR_FLAG_W', 'SAMPNO', 'CASTNO', 'CTDDEPTH', 'CTDDEP', 'HOUR', 'MINUTE', 'DEPTH', 'HOUR', 'MINUTE', ) QC_SUFFIX = '_FLAG_W' # Check all mandatory variables are there depth = '' stnnbr = '' castno = '' data_set = None station = None cast = None depth = None # Raise an exception if mandatory columns are missing if not all(key in datagrid.columns for key in MANDATORY_VARS): message = "Data file missing some mandatory column: {}".format( ', '.join(MANDATORY_VARS)) self._messages.append(message) self._write_messages() self.import_finnished = timezone.now() self.save() return False # Import data types missing_vars = [] data_type_names = { str(type_): type_ for type_ in DataTypeName.objects.all() } for var in datagrid.columns: if var in IGNORE: continue if var.endswith(QC_SUFFIX): continue if var not in data_type_names: missing_vars.append(var) if missing_vars: message = """There where variables in the dataset that are not defined in the system. These cannot be handled. An administrator has to add the variables as data types for them to be treated. Unhandled variables in the data set: {} """.format('\n - '.join(missing_vars)) self._messages.append(message) missing_depth_warning = False # Indicate missing depth already warned missing_position_warning = False # (Hopefully sensible) defaults for authoritative temp, salin, pressure temp_aut = DataTypeName.objects.filter(name="CTDTMP").first() salin_aut = DataTypeName.objects.filter(name="CTDSAL").first() press_aut = DataTypeName.objects.filter(name="CTDPRS").first() value_list = [] line_no = 0 for i, expo in enumerate(datagrid['EXPOCODE']): line_no += 1 if not data_set or expo != data_set.expocode: # Add new dataset data_set = DataSet( expocode=expo, is_reference=False, data_file=self, owner=self.owner, temp_aut=temp_aut, salin_aut=salin_aut, press_aut=press_aut, ) if DataSet.objects.filter(expocode=expo, owner=self.owner).exists(): # TODO Support files with multiple datasets, where one or # more might already exist in database, but not all. message = 'Dataset {} already exists for this user'.format( expo) self._messages = [message] self._write_messages() self.import_finnished = timezone.now() self.save() return False data_set.save() station = None cast = None depth = None if not station or datagrid['STNNBR'][i] != station.station_number: longitude = datagrid['LONGITUDE'][i] latitude = datagrid['LATITUDE'][i] if math.isnan(longitude) or math.isnan(latitude): if missing_position_warning: continue # Warning and dont insert if depth is NaN message = """Latitude or longitude is nan on line {}. Station will not be added when position is missing. Subsequent missing position errors are supressed for this file. """.format(i) self._messages.append(message) missing_position_warning = True continue # Add new station station = Station(data_set=data_set, position=Point(longitude, latitude), station_number=int(datagrid['STNNBR'][i])) station.save() cast = None depth = None if (not cast or ('CASTNO' in datagrid and datagrid['CASTNO'][i] != cast.cast)): # Add new cast cast_ = 1 if 'CASTNO' in datagrid: cast_ = int(datagrid['CASTNO'][i]) cast = Cast(station=station, cast=cast_) cast.save() depth = None if (not depth or depth.depth != datagrid['EXC_CTDDEPTH'][i] or ('BTLNBR' in datagrid and depth.bottle != datagrid['BTLNBR'][i])): if math.isnan(datagrid['EXC_CTDDEPTH'][i]): if missing_depth_warning: continue # Warning and dont insert if depth is NaN message = """Depth is nan on line {}. Data will not be added when depth is nan. Subsequent missing depth errors are supressed for this file. """.format(i) self._messages.append(message) missing_depth_warning = True continue # Add new depth btlnbr = datagrid.get('BTLNBR', False) depth = Depth( cast=cast, depth=float(datagrid['EXC_CTDDEPTH'][i]), bottle=1 if btlnbr is False else btlnbr[i], date_and_time=datagrid['EXC_DATETIME'][i], ) try: depth.save() except Exception as e: m = "Line {}, Error {}".format( i, str(e), ) self._messages = [m] self._write_messages(append=True, save=True) raise e elif (depth.depth == datagrid['EXC_CTDDEPTH'][i] and datagrid['CASTNO'][i] == cast.cast and datagrid['STNNBR'][i] == station.station_number and expo == data_set.expocode): # Implies duplicate line. Skip this line with a warning m = "Line {}, Error {}".format(i, "Duplicate, ignores line") self._messages = [m] self._write_messages(append=True, save=True) continue temp_val = salin_val = press_val = None for key in datagrid.columns: if key in IGNORE: continue if not key in data_type_names: # Variable not found in database. Already reported. continue v = datagrid[key][i].item() # collect temp, press, salin values if key == temp_aut.name: temp_val = v if key == salin_aut.name: salin_val = v if key == press_aut.name: press_val = v # Don't import missing values: if numpy.isnan(v) or v < -10: continue qc_flag = None if (key + QC_SUFFIX in datagrid and not numpy.isnan(datagrid[key + QC_SUFFIX][i])): qc_flag = int(datagrid[key + QC_SUFFIX][i]) value = DataValue(depth=depth, value=v, qc_flag=qc_flag, data_type_name=data_type_names[key]) value_list.append(value) # If all are set, we can calculate sigma4 if not None in [temp_val, salin_val, press_val]: try: sigma4 = gsw.density.sigma4( gsw.conversions.SA_from_SP( salin_val, press_val, longitude, latitude, ), temp_val, ) depth.sigma4 = sigma4 depth.save() except Exception as e: m = "Line {}, Error {}".format( i, str(e), ) self._messages = [m] self._write_messages(append=True, save=True) raise e # Apply sql on every 500 line, so memory is not exhausted if line_no % 500 == 0 and value_list: DataValue.objects.bulk_create(value_list) value_list = [] # Save data values if value_list: DataValue.objects.bulk_create(value_list) self._write_messages() self.import_finnished = timezone.now() self.save() return True
class Migration(migrations.Migration): dependencies = [ ("rasterapp", "0001_setup_extensions"), ] operations = [ migrations.CreateModel( name="RasterModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "rast", models.fields.RasterField( blank=True, null=True, srid=4326, verbose_name="A Verbose Raster Name", ), ), ( "rastprojected", models.fields.RasterField( null=True, srid=3086, verbose_name="A Projected Raster Table", ), ), ("geom", models.fields.PointField(null=True, srid=4326)), ], options={ "required_db_features": ["supports_raster"], }, ), migrations.CreateModel( name="RasterRelatedModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "rastermodel", models.ForeignKey( on_delete=deletion.CASCADE, to="rasterapp.rastermodel", ), ), ], options={ "required_db_features": ["supports_raster"], }, ), ]
class Occurrence_test(models.Model): chars = {'l1':15,'l2':15,'l3':25,'l4':100,'l5':60,'l6':70,'l7':100} id = models.AutoField(primary_key=True, db_column="id_gbif") # id_gbif = models.IntegerField() dataset_id = models.CharField(db_index=True, max_length=chars['l5'],blank=True, null=True) institution_code = models.CharField(db_index=True, max_length=chars['l1'],blank=True, null=True) collection_code = models.CharField(db_index=True, max_length=chars['l1'],blank=True, null=True) catalog_number = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) basis_of_record = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) scientific_name = models.CharField(db_index=True, max_length=chars['l7'],blank=True, null=True) scientific_name_author = models.CharField(db_index=True, max_length=chars['l4'],blank=True, null=True) taxon_id = models.IntegerField(blank=True, null=True) kingdom = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) phylum = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) _class = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) _order = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) family = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) genus = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) specific_epithet = models.CharField(db_index=True, max_length=chars['l4'],blank=True, null=True) kingdom_id = models.IntegerField(db_index=True,blank=True, null=True) phylum_id = models.IntegerField(db_index=True,blank=True, null=True) class_id = models.IntegerField(db_index=True,blank=True, null=True) order_id = models.IntegerField(db_index=True,blank=True, null=True) family_id = models.IntegerField(db_index=True,blank=True, null=True) genus_id = models.IntegerField(db_index=True,blank=True, null=True) species_id = models.IntegerField(db_index=True,blank=True, null=True) country_code = models.CharField(db_index=True, max_length=7,blank=True, null=True) latitude = models.FloatField(db_index=True,blank=True, null=True) longitude = models.FloatField(db_index=True,blank=True, null=True) year = models.IntegerField(db_index=True,blank=True, null=True) month = models.IntegerField(db_index=True,blank=True, null=True) event_date = models.DateTimeField(db_index=True,blank=True, null=True) elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) verbatim_scientific_name = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) taxon_rank = models.IntegerField(db_index=True,blank=True, null=True) verbatim_kingdom = models.CharField(db_index=True,max_length=chars['l3'],blank=True, null=True) verbatim_phylum = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_class = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_order = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_family = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_genus = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_specific_epithet = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_infraspecific_epithet = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) verbatim_latitude = models.FloatField(db_index=True,blank=True, null=True) verbatim_longitude = models.FloatField(db_index=True,blank=True, null=True) coordinate_precision = models.FloatField(db_index=True,blank=True, null=True) maximum_elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) minimum_elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) elevation_precision = models.FloatField(db_index=True,blank=True, null=True) minimum_depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) maximum_depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) depth_precision = models.FloatField(db_index=True,blank=True, null=True) continent_ocean = models.FloatField(db_index=True,blank=True, null=True) state_province = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) county = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) country = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) recorded_by = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) locality = models.CharField(db_index=True,max_length=chars['l6'],blank=True, null=True) verbatim_year = models.IntegerField(db_index=True,blank=True, null=True) verbatim_month = models.IntegerField(db_index=True,blank=True, null=True) day = models.IntegerField(db_index=True,blank=True, null=True) verbatim_basis_of_record = models.CharField(db_index=True,max_length=chars['l4'],blank=True, null=True) identified_by = models.CharField(db_index=True,max_length=chars['l6'],blank=True, null=True) date_identified = models.DateTimeField(db_index=True,blank=True, null=True) created = models.DateTimeField(db_index=True,blank=True, null=True) modified = models.DateTimeField(db_index=True,blank=True, null=True) geom = models.PointField() objects = models.GeoManager() def __repr__(self): """ String representation of the object """ a = "<GBIF/: Occurrence %s --%s />" %(self.id,self.scientific_name) return a def getfullDescription(self): """ Retrieves the total description of the fields for the this. registry. """ fields = self._meta.get_all_field_names() cadena = ["<GBIF/: Occurrence %s --%s />\n" %(self.id,self.scientific_name)] for f in fields: c = "\t < %s: %s />\n" %(f,getattr(self,f)) cadena.append(c) return reduce(lambda x,y : x+y,cadena) def preprocess(self): """ This function preprocess the entry (originally from a CSV format) and cast it properly into de datatype specified by the model. Returns: String """ # build the object as is. respecting the order of the fields. # I'm thinking that perhaps using R would have been more easy. keys = self._meta.get_all_field_names() #for k in keys: def validateNfix(self): """ Validates and fix (if possible) the features type given by the list of string features. Return True if validation is correct. False otherwise. See log file if it's the case. """ fields = self._meta.get_all_field_names() status = [] for f in fields: internalType = self._meta.get_field_by_name(f)[0].get_internal_type() if isinstance(getattr(self,f),str) and ('Char' not in internalType): msg = "Non character type found in: %s but need to be: %s \n Casting feature..." %(getattr(self,f),internalType) logger_ins.info(msg) #print msg if 'Float' in internalType: try: setattr(self,f,float(getattr(self,f))) status.append(True) except: if not getattr(self,f): setattr(self,f,settings.NULL_DATA_FLOAT) msg = "Cannot cast null data to Float type. Value changed to: %s" %settings.NULL_DATA_FLOAT logger_ins.warn(msg) status.append(True) #print msg elif 'Integer' in internalType: try: setattr(self,f,int(getattr(self,f))) status.append(True) except: if not getattr(self,f): setattr(self,f,settings.NULL_DATA_INTEGER) msg = "Cannot cast null data to Integer type. Value changed to: %s" %settings.NULL_DATA_INTEGER logger_ins.warn(msg) status.append(True) #print msg elif 'Date' in internalType: try: datestr = getattr(self,f).replace('\n','') setattr(self,f,dateutil.parser.parse(datestr)) status.append(True) except: msg = "Cannot convert string to dateformat for this record: %s" %self.getfullDescription() logger_ins.error(msg) status.append(False) else: msg = "Validation for %s complete" %self logger_ins.debug(msg) #print msg # super cool way of making a multidimensional and! isvalid = reduce(lambda x,y : x and y, status) return isvalid def insertOccurrence(self): """ Insert occurence in table if validation test is successful """ isvalid = self.validateNfix() if isvalid: pnt = Point(self.longitude, self.latitude, srid=4326) setattr(self,'geom',pnt) try: self.save() except utils.DataError: desc = self.getfullDescription() msg = "Data did not fit with in the varchars limits. \n Description: %s" %desc logger_ins.critical(msg)
class Occurrence(models.Model): """ .. _gbif.models.occurrece: This is the Base class that maps the Occurrence (and further taxonomic aggregates) with the spatial enabled database. The current database is built on Postgis. It includes the field string length definition for automatic populating the database using a standard CSV provided by GBIF. Attributes ---------- id : int Identification value of each occurrence. Unique to any element of the GBIF dataset. dataset_id : int Identification of the collection (Currently not used) institution_code : int Identification for the institution resposible for storing, capturing or recording the occurrence. collection_code : int Identification of the collection (Currently not used) catalog_number : int Identification for catalog number basis_of_record : int Unknown value scientific_name : String Species name in the binomial nomenclature kingdom : String Name of the kingdom to whom these occurrence belongs phylum : String Name of the phylum to whom these occurrence belongs _class : String Name of the class to whom these occurrence belongs _order : String Name of the order to whom these occurrence belongs family :String Name of the family to whom these occurrence belongs genus : String Name of the genus to whom these occurrence belongs specific_epithet : string Name of the epithet to whom these occurrence belongs kingdom_id : int Identification number for the belonging kingdom (indexed). phylum_id : int Identification number for the belonging phylum (indexed). class_id : int Identification number for the belonging class (indexed). order_id : int Identification number for the belonging order (indexed). family_id : int Identification number for the belonging family (indexed). genus_id : int Identification number for the belonging genus (indexed). species_id : int Identification number for the belonging species (indexed). country_code : string String representing the country's code latitude : Float Latitude in WGS84 (degrees) longitude : Float Longitud in WGS84 (degrees) year : int Year of record month : int Month of record event_date : datetime Timestamp of record state_province : String Name of state or province county : String Name of country geom : Geometric Point Geometric Value in WKB objects : models.GeoManager() Wrapper for GeoDjango """ chars = {'l1':15,'l2':15,'l3':25,'l4':100,'l5':60,'l6':70,'l7':100} id = models.AutoField(primary_key=True, db_column="id_gbif") # id_gbif = models.IntegerField() dataset_id = models.CharField(db_index=True, max_length=chars['l5'],blank=True, null=True) institution_code = models.CharField(db_index=True, max_length=chars['l1'],blank=True, null=True) collection_code = models.CharField(db_index=True, max_length=chars['l1'],blank=True, null=True) catalog_number = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) basis_of_record = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) scientific_name = models.CharField(db_index=True, max_length=chars['l7'],blank=True, null=True) #scientific_name_author = models.CharField(db_index=True, max_length=chars['l4'],blank=True, null=True) #taxon_id = models.IntegerField(blank=True, null=True) kingdom = models.CharField(db_index=True, max_length=chars['l2'],blank=True, null=True) phylum = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) _class = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) _order = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) family = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) genus = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) specific_epithet = models.CharField(db_index=True, max_length=chars['l4'],blank=True, null=True) kingdom_id = models.IntegerField(db_index=True,blank=True, null=True) phylum_id = models.IntegerField(db_index=True,blank=True, null=True) class_id = models.IntegerField(db_index=True,blank=True, null=True) order_id = models.IntegerField(db_index=True,blank=True, null=True) family_id = models.IntegerField(db_index=True,blank=True, null=True) genus_id = models.IntegerField(db_index=True,blank=True, null=True) species_id = models.IntegerField(db_index=True,blank=True, null=True) country_code = models.CharField(db_index=True, max_length=7,blank=True, null=True) latitude = models.FloatField(db_index=True,blank=True, null=True) longitude = models.FloatField(db_index=True,blank=True, null=True) year = models.IntegerField(db_index=True,blank=True, null=True) month = models.IntegerField(db_index=True,blank=True, null=True) event_date = models.DateTimeField(db_index=True,blank=True, null=True) #elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) #depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) #verbatim_scientific_name = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) #taxon_rank = models.IntegerField(db_index=True,blank=True, null=True) #verbatim_kingdom = models.CharField(db_index=True,max_length=chars['l3'],blank=True, null=True) #verbatim_phylum = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_class = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_order = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_genus = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_family = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_specific_epithet = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_infraspecific_epithet = models.CharField(db_index=True, max_length=chars['l3'],blank=True, null=True) #verbatim_latitude = models.FloatField(db_index=True,blank=True, null=True) #verbatim_longitude = models.FloatField(db_index=True,blank=True, null=True) #coordinate_precision = models.FloatField(db_index=True,blank=True, null=True) #maximum_elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) #minimum_elevation_in_meters = models.FloatField(db_index=True,blank=True, null=True) #elevation_precision = models.FloatField(db_index=True,blank=True, null=True) #minimum_depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) #maximum_depth_in_meters = models.FloatField(db_index=True,blank=True, null=True) #depth_precision = models.FloatField(db_index=True,blank=True, null=True) #continent_ocean = models.FloatField(db_index=True,blank=True, null=True) state_province = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) county = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) country = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) #recorded_by = models.CharField(db_index=True,max_length=chars['l5'],blank=True, null=True) #locality = models.CharField(db_index=True,max_length=chars['l6'],blank=True, null=True) #verbatim_month = models.IntegerField(db_index=True,blank=True, null=True) #verbatim_year = models.IntegerField(db_index=True,blank=True, null=True) #day = models.IntegerField(db_index=True,blank=True, null=True) #verbatim_basis_of_record = models.CharField(db_index=True,max_length=chars['l4'],blank=True, null=True) #date_identified = models.DateTimeField(db_index=True,blank=True, null=True) #identified_by = models.CharField(db_index=True,max_length=chars['l6'],blank=True, null=True) #created = models.DateTimeField(db_index=True,blank=True, null=True) geom = models.PointField() #modified = models.DateTimeField(db_index=True,blank=True, null=True) objects = models.GeoManager() class Meta: managed = False # remote server table name db_table = settings.GBIF_DATATABLE #db_table = "gbif_occurrence" # Local table name #db_table = "mexico_gbif_subset" def __unicode__(self): """ .. String representation of Occurrence Returns ------- info : string Name """ return u'<GBIF Occurrence: %s scientific_name: %s>\n Kingdom: %s \n,\t Phylum: %s \n,\t \t Order: %s,\n \t \t \t Class: %s, \n \t \t \t \t Family: %s, \n \t \t \t \t \t Location: s<\GBIF Occurrence>' %(self.id,self.scientific_name,self.kingdom,self.phylum,self._order,self._class,self.family) #,self.geom) def getfullDescription(self): """ .. Retrieves the total description of the fields for the this registry. Returns ------- info : string The information of all fields. Good for exporting raw data to CSV. """ fields = self._meta.get_all_field_names() cadena = ["<GBIF/: Occurrence %s --%s />\n" %(self.id,self.scientific_name)] for f in fields: c = "\t < %s: %s />\n" %(f,getattr(self,f)) cadena.append(c) return reduce(lambda x,y : x+y,cadena)
class DummyMultipleInheritanceModel(DummyDefaultFieldsModel, Person): my_id = models.AutoField(primary_key=True) my_dummy_field = models.IntegerField()
class Aquifer(AuditModel): """ An underground layer of water-bearing permeable rock, rock fractures or unconsolidated materials (gravel, sand, or silt), from which groundwater is extracted using a water well. This table holds ONLY the aquifers to which we have associated one or more wells. It is not the definitive source of all aquifers in the province. Note on db_comments: db_comment properties on model columns are overriden by the db_column_supplemental_comments provided below. db_column_supplemental_comments provides an easier way for the DA to add/update comments in bulk. """ aquifer_id = models.AutoField( primary_key=True, verbose_name="Aquifer ID Number", db_comment= ('System generated unique sequential number assigned to each mapped aquifer. The' ' aquifer_id identifies which aquifer a well is in. An aquifer can have multiple' ' wells, while a single well can only be in one aquifer.')) aquifer_name = models.CharField( max_length=100, blank=True, null=True, db_comment= ('Name assigned for a specific aquifer. Typically derived from geographic names or names ' 'in common use, but may also be lithologic or litho-stratigraphic units, e.g. ' 'Abbotsford-Sumas, McDougall Creek Deltaic.')) location_description = models.CharField( max_length=100, blank=True, null=True, verbose_name='Description of Location', db_comment= ('Brief description of the geographic location of the aquifer. The description is usually ' 'referenced to a nearby major natural geographic area or community, e.g., Grand Forks.' )) material = models.ForeignKey( AquiferMaterial, db_column='aquifer_material_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Material Reference", related_name='aquifers', db_comment= ('Code for valid options for the broad grouping of geological material found in the' ' aquifer, i.e. SG, S, G, B')) subtype = models.ForeignKey( AquiferSubtype, db_column='aquifer_subtype_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Subtype Reference", related_name='aquifers', db_comment= ('Categorizes an aquifer based on how it was formed geologically (depositional' ' description). Understanding of how aquifers were formed governs important' ' attributes such as their productivity, vulnerability to contamination as well as' ' proximity and likelihood of hydraulic connection to streams. The code value is a' ' combination of an aquifer type represented by a number and an optional letter' ' representing a more specific aquifer sub-type. E.g. 1a, 2, 6a.')) area = models.DecimalField( max_digits=5, decimal_places=1, blank=True, null=True, verbose_name='Size (square km)', db_comment='Approximate size of the aquifer in square kilometers.') vulnerability = models.ForeignKey( AquiferVulnerabilityCode, # TODO: Spelling mistake below! db_column='aquifer_vulnerablity_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Aquifer Vulnerabiliy", db_comment= ('Standard terms used to define an aquifer’s relative intrinsic vulnerability to' ' impacts from human activities on the land surface. Vulnerability is based on: the' ' type, thickness, and extent of geologic materials above the aquifer, depth to' ' water table (or to top of confined aquifer), and type of aquifer materials, i.e.,' ' Low, Moderate, High.')) productivity = models.ForeignKey( AquiferProductivity, db_column='aquifer_productivity_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Productivity Reference", related_name='aquifers', db_comment= ('Valid code for the aquifer\'s productivity, which represent an aquifers ability to' ' transmit and yield groundwater; i.e., L, M, H')) demand = models.ForeignKey( AquiferDemand, db_column='aquifer_demand_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Demand Reference", related_name='aquifers', db_comment= ('Describes the level of groundwater use at the time aquifer was mapped; i.e., High,' ' Moderate, Low.')) known_water_use = models.ForeignKey( WaterUse, db_column='water_use_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Known Water Use Reference", related_name='aquifers', db_comment= ('Standard terms that define the type of known water use of an aquifer at the time of' ' mapping. It indicates the variability or diversity of uses of the aquifer water as' ' a supply source. I.e. Domestic, Multiple, Potential Domestic')) quality_concern = models.ForeignKey( QualityConcern, db_column='quality_concern_code', blank=True, null=True, on_delete=models.PROTECT, verbose_name="Quality Concern Reference", related_name='aquifers', db_comment= ('Standard terms used to represent the extent of documented concerns of contaminants' ' in the aquifer at the time of mapping. i.e. isloated, local, regional, none.' )) litho_stratographic_unit = models.CharField( max_length=100, blank=True, null=True, verbose_name='Lithographic Stratographic Unit', db_comment= ('Permeable geologic unit (where available) that comprises the aquifer. It is typically ' 'either; the era of deposition, the name of a specific formation and/or the broad ' 'material types, e.g., Paleozoic to Mesozoic Era, Cache Creek Complex, Intrusive Rock.' )) mapping_year = models.PositiveIntegerField( validators=[ MinValueValidator(1990), MaxValueValidator(timezone.now().year) ], blank=True, null=True, verbose_name="Date of Mapping", help_text="Use the following format: <YYYY>", db_comment='The year the aquifer was initially mapped or last updated.' ) notes = models.TextField( max_length=2000, blank=True, null=True, verbose_name='Notes on Aquifer, for internal use only.', db_comment= ('Details about the mapped aquifer that the province deems important to maintain such as' ' local knowledge about the aquifer or decisions for changes related to attributes of' ' the mapped aquifer.')) effective_date = models.DateTimeField( default=timezone.now, null=False, db_comment='The date and time that the aquifer became published.') expiry_date = models.DateTimeField( default=timezone.make_aware(timezone.datetime.max, timezone.get_default_timezone()), null=False, db_comment= 'The date and time after which the aquifer became unpublished.') retire_date = models.DateTimeField( default=timezone.make_aware(timezone.datetime.max, timezone.get_default_timezone()), null=False, db_comment= 'The date and time after which the aquifer is considered to be retired' ) geom = models.MultiPolygonField(srid=3005, null=True) # This version is pre-rendered in WGS 84 for display on web-maps. # Only used by the v1 API geom_simplified = models.MultiPolygonField(srid=4326, null=True) history = GenericRelation(Version) @property def status_retired(self): return timezone.now() > self.retire_date @property def status_draft(self): return timezone.now() < self.effective_date @property def status_published(self): now = timezone.now() return now >= self.effective_date and now < self.expiry_date @property def status_unpublished(self): return now >= self.expiry_date def load_shapefile(self, f): """ Given a shapefile with a single feature, update spatial fields of the aquifer. You must still call aquifer.save() afterwards. """ try: zip_ref = zipfile.ZipFile(f) except zipfile.BadZipFile as e: raise Aquifer.BadShapefileException(str(e)) ret = zip_ref.testzip() if ret is not None: raise Aquifer.BadShapefileException("Bad zipfile, info: %s" % ret) the_shapefile = None output_dir = tempfile.mkdtemp() for item in zip_ref.namelist(): # Check filename endswith shp zip_ref.extract(item, output_dir) if item.endswith('.shp'): # Extract a single file from zip the_shapefile = os.path.join(output_dir, item) # break zip_ref.close() if the_shapefile is None: raise Aquifer.BadShapefileException( "Bad zipfile. No shapefile found.") ds = DataSource(the_shapefile) self.update_geom_from_feature(ds[0][0]) def update_geom_from_feature(self, feat): """ Given a spatial feature with Geometry, update spatial fields of the aquifer. You must still call aquifer.save() afterwards. """ geom = feat.geom if not geom.srid: raise Aquifer.BadShapefileException( "Shapefile contains no projection information") # Make a GEOSGeometry object using the string representation. # Eliminate any 3d geometry so it fits in PostGIS' 2d geometry schema. wkt = wkt_w(dim=2).write(GEOSGeometry(geom.wkt, srid=geom.srid)).decode() geos_geom = GEOSGeometry(wkt, srid=geom.srid) geos_geom.transform(3005) # Convert plain Polygons to MultiPolygons, if isinstance(geos_geom, geos.MultiPolygon): geos_geom_out = geos_geom elif isinstance(geos_geom, geos.Polygon): geos_geom_out = MultiPolygon(geos_geom) else: raise Aquifer.BadShapefileException( "Bad geometry type: {}, skipping.".format(geos_geom.__class__)) self.geom = geos_geom_out class Meta: db_table = 'aquifer' ordering = ['aquifer_id'] verbose_name_plural = 'Aquifers' db_table_comment = ( 'A geological formation, a group of geological formations, or a part of one or more ' 'geological formations that is groundwater bearing and capable of storing, ' 'transmitting and yielding groundwater.') class BadShapefileException(Exception): pass def __str__(self): return '{} - {}'.format(self.aquifer_id, self.aquifer_name) db_column_supplemental_comments = { "aquifer_demand_code": "Describes the level of groundwater use at the time the aquifer was mapped; i.e., High, Moderate, Low.", "aquifer_id": "System generated sequential number assigned to each aquifer. It is widely used by groundwater staff as it is the only consistent unique identifier for a mapped aquifer. It is also commonly referred to as Aquifer Number.", "aquifer_material_code": "Describes the broad grouping of geological material found in the aquifer, i.e., Sand and Gravel, Sand, Gravel, Bedrock", "aquifer_productivity_code": "Describes the aquifer's productivity which represent an aquifers ability to transmit and yield groundwater; i.e., Low, Moderate, High", "aquifer_subtype_code": "Categorizes an aquifer based on how it was formed geologically (depositional description). Understanding of how aquifers were formed governs important attributes such as their productivity, vulnerability to contamination as well as proximity and likelihood of hydraulic connection to streams. The code value is a combination of an aquifer type represented by a number and an optional letter representing a more specific aquifer sub-type. There are six major aquifer types, some with multiple subtypes. E.g. aquifer sub-type code 6b is comprised of the aquifer type number (6: Crystalline bedrock aquifers) and subtype letter (b) specifically described as: Fractured crystalline (igneous intrusive or metamorphic, meta-sedimentary, meta-volcanic, volcanic) rock aquifers. Code values range from 1a to 6b.", "aquifer_vulnerablity_code": "Describes an aquifer’s relative intrinsic vulnerability to impacts from human activities on the land surface. Vulnerability is based on: the type, thickness, and extent of geologic materials above the aquifer, depth to water table (or to top of confined aquifer), and type of aquifer materials, i.e., Low, Moderate, High.", "quality_concern_code": "Extent of documented concerns of contaminants in the aquifer at the time of mapping. i.e. isloated, local, regional, none.", "water_use_code": "Describes the type of known water use of an aquifer at the time of mapping. It indicates the variability or diversity of uses of the aquifer water as a supply source. I.e. Domestic, Multiple, Potential Domestic", }
class Locality(models.Model): locality_number = models.AutoField(primary_key=True) # NOT NULL locality_field_number = models.CharField(null=True, blank=True, max_length=50) name = models.CharField(null=True, blank=True, max_length=50) # Locality Name date_discovered = models.DateField(null=True, blank=True) formation = models.CharField(null=True, blank=True, max_length=50) # Formation member = models.CharField(null=True, blank=True, max_length=50) NALMA = models.CharField(null=True, blank=True, max_length=50) survey = models.CharField(null=True, blank=True, max_length=50) quad_sheet = models.CharField(null=True, blank=True, max_length=50) verbatim_latitude = models.CharField(null=True, blank=True, max_length=50) # Latitude verbatim_longitude = models.CharField(null=True, blank=True, max_length=50) # Longitude verbatim_utm = models.CharField(null=True, blank=True, max_length=50) # UTM verbatim_gps_coordinates = models.CharField(null=True, blank=True, max_length=50) # GPS verbatim_elevation = models.IntegerField(null=True, blank=True) # Elevation gps_date = models.DateField(null=True, blank=True, editable=True) resource_area = models.CharField(null=True, blank=True, max_length=50) notes = models.TextField(null=True, blank=True) cm_locality_number = models.IntegerField(null=True, blank=True) # CM Loc # region = models.CharField(null=True, blank=True, max_length=50) blm_district = models.CharField(null=True, blank=True, max_length=50) county = models.CharField(null=True, blank=True, max_length=50) image = models.FileField(max_length=255, blank=True, upload_to="uploads/images/gdb", null=True) geom = models.GeometryField(srid=4326, blank=True, null=True) date_last_modified = models.DateTimeField("Date Last Modified", auto_now_add=True, auto_now=True) def __unicode__(self): """ This method returns the locality number and name if both exist, or a string with just the locality number if there is no name. """ if self.name: return str(self.locality_number) + "-" + self.name else: return str(self.locality_number) def update_geom_from_verbatim(self): if self.verbatim_latitude is not None: lat_dms = self.verbatim_latitude lon_dms = self.verbatim_longitude lat_string = lat_dms.split() lon_string = lon_dms.split() if len(lat_string) == 4 and len(lon_string) == 4: lat_dd = (float(lat_string[2]) / 60 + float(lat_string[1])) / 60 + float(lat_string[0]) if lat_string[3] == "S": lat_dd *= -1 # southern latitudes should be negative lon_dd = (float(lon_string[2]) / 60 + float(lon_string[1])) / 60 + float(lon_string[0]) if lon_string[3] == "W": lon_dd *= -1 return "POINT (" + str(lon_dd) + " " + str(lat_dd) + ")" def point_x(self): try: return self.geom.coords[1] except: return 0 def point_y(self): try: return self.geom.coords[0] except: return 0 def easting(self): try: utmPoint = utm.from_latlon(self.geom.coords[1], self.geom.coords[0]) return utmPoint[0] except: return 0 def northing(self): try: utmPoint = utm.from_latlon(self.geom.coords[1], self.geom.coords[0]) return utmPoint[1] except: return 0 class Meta: verbose_name_plural = "GDB Localities"