class Post(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) date_posted = db.Column(db.DateTime(), nullable=False, default=datetime.now(tz=None)) date_updated = db.Column(db.DateTime(), nullable=False, default=datetime.now(tz=None)) content = db.Column(db.Text, nullable=False) #rel to users user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) def __repr__(self): return f"User('{self.title}', '{self.date_posted}')"
class Person(db.Model): __tablename__ = 'person' def __repr__(self): return f"Person('{self.person_id}', '{self.birth_datetime}', '{self.race_concept_id}', '{self.ethnicity_concept_id}')" person_id = db.Column(db.Integer(), primary_key=True) ##types.BIGINT(), gender_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), year_of_birth = db.Column(db.Integer(), default=None) ##types.INTEGER(), month_of_birth = db.Column(db.Integer(), default=None) ##types.INTEGER(), day_of_birth = db.Column(db.Integer(), default=None) ##types.INTEGER(), birth_datetime = db.Column(db.DateTime(), default=None) ##DateTime(), race_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), ethnicity_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), location_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), provider_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), care_site_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), person_source_value = db.Column(db.String(), default=None) ##types.VARCHAR(length=255), gender_source_value = db.Column(db.String(), default=None) ##types.VARCHAR(length=255), gender_source_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), race_source_value = db.Column(db.String(), default=None) ##types.VARCHAR(length=50), race_source_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER(), ethnicity_source_value = db.Column( db.String(), default=None) ##types.VARCHAR(length=50), ethnicity_source_concept_id = db.Column(db.Integer(), default=None) ##types.INTEGER()})
class VisitOccurrence(db.Model): __tablename__ = 'visit_occurrence' visit_occurrence_id = db.Column(db.Integer(), primary_key=True) person_id = db.Column(db.Integer(), default=None) visit_concept_id = db.Column(db.Integer(), default=None) visit_start_date = db.Column(db.Date(), default=None) visit_start_datetime = db.Column(db.DateTime(), default=None) visit_end_date = db.Column(db.Date(), default=None) visit_end_datetime = db.Column(db.DateTime(), default=None) visit_type_concept_id = db.Column(db.Integer(), default=None) provider_id = db.Column(db.Integer(), default=None) care_site_id = db.Column(db.Integer(), default=None) visit_source_value = db.Column(db.Integer(), default=None) visit_source_concept_id = db.Column(db.Integer(), default=None) admitting_source_concept_id = db.Column(db.Integer(), default=None) admitting_source_value = db.Column(db.String(), default=None) discharge_to_concept_id = db.Column(db.Integer(), default=None) discharge_to_source_value = db.Column(db.String(), default=None) preceding_visit_occurrence_id = db.Column(db.Integer(), default=None)
class Post(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) post_name = db.Column(db.String(100)) epoch_image = db.Column(db.Text) hypnogram_image = db.Column(db.Text) pie_image = db.Column(db.Text) result1 = db.Column(db.Text) result2 = db.Column(db.Text) post_time = db.Column(db.DateTime(), index=True, default=datetime.datetime.now)
class Death(db.Model): __tablename__ = 'death' def __repr__(self): return f"Death('{self.person_id}', '{self.death_datetime}', '{self.death_type_concept_id}')" person_id = db.Column(db.Integer(), primary_key=True) death_date = db.Column(db.Date(), default=None) death_datetime = db.Column(db.DateTime(), default=None) death_type_concept_id = db.Column(db.Integer(), default=None) cause_concept_id = db.Column(db.Integer(), default=None) cause_source_value = db.Column(db.String(), default=None) cause_source_concept_id = db.Column(db.Integer(), default=None)
class Post(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) date_posted = db.Column(db.DateTime(), nullable=False, default=datetime.utcnow) content = db.Column(db.Text, nullable=False) user_id = db.Column( db.Integer, db.ForeignKey('user.id')) #user here is the class User - User.id def __repr__(self): return f"Post('{self.title}', '{self.date_posted}')"
class ChannelModel(db.Model, BaseModel): # The name of the table at the data base. __tablename__ = TableNames.T_CHANNELS # The table columns. id = db.Column(db.String(16), primary_key=True) station_id = db.Column(db.String(16), db.ForeignKey(TableNames.T_STATIONS + ".id"), nullable=False) location_id = db.Column(db.String(16), db.ForeignKey(TableNames.T_LOCATIONS + ".id"), nullable=False) name = db.Column(db.String(5), nullable=False) latitude = db.Column(db.Float, nullable=False) longitude = db.Column(db.Float, nullable=False) elevation = db.Column(db.Float, nullable=False) depth = db.Column(db.Float, nullable=False) azimuth = db.Column(db.Float, nullable=False, default=0.0) dip = db.Column(db.Float, nullable=False, default=0.0) gain = db.Column(db.String(50), nullable=False) sample_rate = db.Column(db.Integer, nullable=False) dl_no = db.Column(db.String(16), nullable=False) sensor_number = db.Column(db.String(16), nullable=False) start_time: datetime = db.Column(db.DateTime(timezone=True), nullable=False) stop_time: datetime = db.Column(db.DateTime(timezone=True), nullable=True) equipments = db.relationship(RelationShip.CHANNEL_EQUIPMENTS, backref="channel", cascade="save-update, merge, delete", lazy=True) seismic_data = db.relationship(RelationShip.SEISMIC_DATA, backref="channel", cascade="save-update, merge, delete", lazy=True) def __repr__(self): return "ChannelModel(id={},station_id={},station_id={}, name={},gain={},dl_no={},dl_no={},sample_rate={}, " \ "start_time={}, stop_time={})".format(self.id, self.station_id, self.location_id, self.name, self.gain, self.sample_rate, self.dl_no, self.sensor_number, self.start_time, self.stop_time) @property def start_time_utc(self): return DateUtils.convert_datetime_to_utc(self.start_time) @property def stop_time_utc(self): return DateUtils.convert_datetime_to_utc(self.stop_time) @property def location(self) -> LocationModel: return LocationModel.find_by_id(self.location_id) def to_dict(self): """ Convert Channel into a dictionary, this way we can convert it to a JSON response. :return: A clean dictionary form of this model. """ # convert columns to dict dict_representation = super().to_dict() # add equipments dict_representation["equipments"] = [ eqs.get_equipment().to_dict() for eqs in self.equipments ] # add info of how much data files are attach to it dict_representation["number_of_files"] = self.number_of_files() # add location code. dict_representation["location_code"] = self.location.name return dict_representation def is_within_deltatime(self, start_time: datetime, stop_time: datetime): if self.start_time <= start_time and self.stop_time >= stop_time: return True return False def is_time_overlap(self, other): # ignore the same object if self.id == other.id: return False if self.start_time == self.stop_time: raise CreateEntityError( "Channel can't have the same start and stop time") overlap = self.start_time < other.stop_time and other.start_time < self.stop_time return overlap def get_station(self) -> StationModel: """ Get the station in which this channel belongs. :return: The station from this channel. """ return StationModel.find_by_id(self.station_id) def rename_data(self, pe: ProgressEvent = None): """ Rename all seismic data belonging to this channel. The renam :param pe: (Optional) A progress bar event :return: """ with LockById(self.id): data_list = self.seismic_data total = len(data_list) processed = 0. for sd in data_list: sd.rename_mseed() processed += 1 if pe: pe.set_progress(processed * 100. / total) @classmethod def from_dict(cls, channel_dict: dict): channel: ChannelModel = super().from_dict(channel_dict) # cast string dates to date format start_time_str: str = channel_dict.get("start_time") stop_time_str: str = channel_dict.get("stop_time") channel.start_time = DateUtils.convert_string_to_datetime( start_time_str) channel.stop_time = DateUtils.convert_string_to_datetime(stop_time_str) return channel def creation_validation(self): channels = self.find_by(location_id=self.location_id, name=self.name, get_first=False) if channels: for channel in channels: if self.is_time_overlap(channel): rd = channel.stop_time_utc.strftime("%d-%m-%Y, %H:%M:%S") st = channel.start_time_utc.strftime("%d-%m-%Y, %H:%M:%S") msg = "Time overlap with channel {} started at {} and stopped at {}". \ format(channel.name, st, rd) raise CreateEntityError(msg) def add_equipment(self, equipment_id: str): """ Add equipment to this channel. Important: This will not be added to the database until channel is saved. :param equipment_id: The equipment id. """ channel_eq = ChannelEquipmentsModel(channel_id=self.id, equipment_id=equipment_id) self.equipments.append(channel_eq) def _delete_equipments(self): """ This will remove all equipments for this channel at the database. """ for eq in self.equipments: eq.delete() def add_equipments(self, equipments: List[EquipmentModel]): """ Add equipments to this channel. :param equipments: A list of equipments to be added. :return: """ if equipments and len(equipments) == 2: for eq in equipments: self.add_equipment(equipment_id=eq.id) else: raise CreateEntityError( "Channel must have 2 equipments. A datalogger and a sensor.") def has_data(self): data = self.seismic_data if not data or len(data) == 0: return False return True def number_of_files(self): data = self.seismic_data if not data: return 0 return len(data) def make_tar_file(self): """ Create a .tar file contain all mseed files that belong to this channel. :return: The .tar file path. """ file_paths = [] for sd in self.seismic_data: file_paths.append(sd.file_path) return file_utils.tar_files(file_paths) def bash_rsync_files(self, client_destine=None): """ Create a .sh file to rsync all mseed files that belong to this channel. :param client_destine: The client destination to rsync the files. :return: The .sh file path. """ files_path = [sd.file_path for sd in self.seismic_data] return file_utils.create_rsync_bash(ConfigSSHWhiteDwarf.USER, ConfigSSHWhiteDwarf.REMOTE_IP, ConfigSSHWhiteDwarf.PSW, files_path, client_destine) @classmethod def create_channel(cls, channel_dict: dict): channel: ChannelModel = cls.from_dict(channel_dict) channel.id = app_utils.generate_id(16) # validate creation channel.creation_validation() # Add equipments relational field. equipments = [ EquipmentModel.from_dict(eq_dict) for eq_dict in channel_dict.get("equipments") ] channel.add_equipments(equipments=equipments) return channel.save() @classmethod def update(cls, channel_dict: dict): """ Update the current channel. Import: You must use save() to storage it in the database. :param channel_dict: A dictionary representation of the channel. :return: The updated channel or None if user if not valid. """ channel: ChannelModel = cls.from_dict(channel_dict) valid_channel: ChannelModel = ChannelModel.find_by_id(channel.id) if not valid_channel: return None # validate creation to check if there is time overlap. channel.creation_validation() # Copy all attributes from channel to valid_channel. valid_channel << channel # update equipments. equipments = [ EquipmentModel.from_dict(eq_dict) for eq_dict in channel_dict.get("equipments") ] valid_channel._delete_equipments() valid_channel.add_equipments(equipments) return valid_channel
class SeismicDataModel(db.Model, BaseModel): # The name of the table at the data base. __tablename__ = TableNames.T_SEISMIC_DATA # The table columns. id = db.Column(db.String(16), primary_key=True) filename = db.Column(db.String(50), nullable=False) relative_path = db.Column(db.String(400), nullable=False) target_folder_id = db.Column(db.String(16), db.ForeignKey(TableNames.T_TARGET_FOLDERS + ".id"), nullable=False) start_time: datetime = db.Column(db.DateTime(timezone=True), nullable=False) stop_time: datetime = db.Column(db.DateTime(timezone=True), nullable=False) channel_id = db.Column(db.String(16), db.ForeignKey(TableNames.T_CHANNELS + ".id"), nullable=False) file_data = db.relationship(RelationShip.FILE_DATA, backref="data", cascade="save-update, merge, delete", lazy=True) def __repr__(self): return "SeismicDataModel(id={},filename={},relative_path={},target_folder_id={}, start_time={}, " \ "stop_time={}, channel_id={})".format(self.id, self.filename, self.relative_path, self.target_folder_id, self.start_time, self.stop_time, self.channel_id) @staticmethod def validate_path(path: str): """ This method will check if path exists. If it doesn't an :class:`FileNotFound` will be sent back as an exception response. :param path: A string pointing to the path. :return: """ if not os.path.exists(path): raise FileNotFound( "The dir or file {} was not found at the server.".format(path)) def is_valid_upload_file(self, upload_file: UploadMseedFiles): """ Check if upload file is valid to be transferred to the storage area. :param upload_file: The UploadMseedFile structure. :return: A tuple (isValid: boolean, message: str) """ ch: ChannelModel = ChannelModel.find_by_id(self.channel_id) if not ch: raise EntityNotFound("Channel id {} not found".format( self.channel_id)) start_time = DateUtils.convert_string_to_utc(upload_file.start_time) stop_time = DateUtils.convert_string_to_utc(upload_file.end_time) sd = SeismicDataModel.find_by(channel_id=self.channel_id, start_time=start_time) if sd: return False, "File {} is in conflict with file {} at the channel {}-{}"\ .format(upload_file.file_name, sd.filename, ch.station.name, ch.name) if not ch.is_within_deltatime(start_time, stop_time): return False, "File {} is not within the channel's time interval".format( upload_file.file_name) if ch.sample_rate != upload_file.sample_rate: return False, "File {} has not the same sample rate than channel {}-{} {}"\ .format(upload_file.file_name, ch.station.name, ch.name, DateUtils.convert_datetime_to_utc(ch.start_time)) return True, "" def delete_file(self): """ This will delete the file at the storage area. :return: """ if os.path.isfile(self.file_path): os.remove(self.file_path) def is_public(self): channel: ChannelModel = ChannelModel.find_by_id(self.channel_id) return channel.get_station().public_data def get_containers(self): """ Gets the channel and station that this data belong. :return: A tuple containing channel and station. """ ch: ChannelModel = ChannelModel.find_by_id(self.channel_id) station = ch.get_station() return ch, station def fix_mseed_metadata(self): """ Check if Network, Station and Channel metadata in the mseed file is equal to the Network id, Station name and Channel name that this data was linked with. If not, it will change the metadata of this fields in the mseed file. :return: """ stream = obspy.read(self.file_path) ch, station = self.get_containers() should_rewrite = False for tr in stream: stats = tr.stats if not stats.get(ObspyStatsKeys.NETWORK) == station.network_id: stats[ObspyStatsKeys.NETWORK] = station.network_id should_rewrite = True if not stats.get(ObspyStatsKeys.STATION) == station.name: stats[ObspyStatsKeys.STATION] = station.name should_rewrite = True if not stats.get(ObspyStatsKeys.CHANNEL) == ch.name: stats[ObspyStatsKeys.CHANNEL] = ch.name should_rewrite = True if should_rewrite: stream.write(self.file_path, format="MSEED") def _create_name(self): """ Create a name for the file based on its channel, station, network and start time. :return: The file name that this file should have. """ utc_time = DateUtils.convert_datetime_to_utc(self.start_time) time_stamp = DateUtils.create_stamp(utc_time) ch, station = self.get_containers() return station.network_id + '.' + station.name + time_stamp + '.' + ch.name def rename_mseed(self): self.fix_mseed_metadata() new_name = self._create_name() if self.filename != new_name: new_file_path = os.path.join(self.folder_path, new_name) os.rename(self.file_path, new_file_path) self.filename = new_name self.save() @property def folder_path(self): """ The folder location for this data. :return: The folder location of this data. """ target_folder: TargetFolderModel = TargetFolderModel.find_by_id( self.target_folder_id) if not target_folder: raise EntityNotFound("Location for file {} not found".format( self.filename)) folder_path = os.path.join(target_folder.path, self.relative_path) return folder_path @property def file_path(self): file_path = os.path.join(self.folder_path, self.filename) SeismicDataModel.validate_path(file_path) return file_path @property def files(self) -> List[FileTransferredModel]: """ Gets the list of :class:`FileTransferredModel` :return: A list of FileTransferredModel """ files = [fd.transferred_file for fd in self.file_data] return files def add_file_data(self, file_transferred_id: str): fd = FileDataModel(data_id=self.id, file_id=file_transferred_id) self.file_data.append(fd) @classmethod def create_data(cls, **kwargs): """ This will create a new seismic data entity, with new id. Import: You must use save() to storage it in the database. :param kwargs: A dictionary contain the kwargs: id: str, filename: str, relative_path: str, target_folder_id: str, start_time: datetime, stop_time: datetime, channel_id: str :return: An instance of seismic data """ data: SeismicDataModel = cls.from_dict(kwargs) data.id = app_utils.generate_id(16) # Add file data relational field. At creation filename must be the id of transferred file table. transferred_file_id = data.filename data.add_file_data(transferred_file_id) return data def to_dict(self): """ Convert SeismicData into a dictionary, this way we can convert it to a JSON response. :return: A clean dictionary form of this model. """ # convert columns to dict dict_representation = super().to_dict() # add file transferred dict_representation["files"] = [file.to_dict() for file in self.files] # add file location dict_representation["folder_path"] = self.folder_path # add is public dict_representation["is_public"] = self.is_public() return dict_representation @classmethod def join_search(cls, seismic_search: SeismicDataSearch): """ Make a join search with Station and ChannelModel. :param seismic_search: A SeismicDataSearch instance use in the search. :return: A SearchResult instance. """ search_filters = [] if seismic_search.Station: search_filters.append( StationModel.name == seismic_search.Station.upper().strip()) if seismic_search.Network: search_filters.append(StationModel.network_id == seismic_search.Network.upper().strip()) if seismic_search.Channel: search_filters.append( ChannelModel.name == seismic_search.Channel.upper().strip()) if seismic_search.StartTime: # noinspection PyTypeChecker search_filters.append( SeismicDataModel.start_time >= seismic_search.StartTime) if seismic_search.StopTime: # noinspection PyTypeChecker search_filters.append( SeismicDataModel.stop_time <= seismic_search.StopTime) if seismic_search.Filename: search_filters.append( cls.filename.like('%{}%'.format( seismic_search.Filename.strip()))) # Need at least one filter, otherwise return empty result. if not search_filters: return SearchResult([], 0) query = cls.query.join(ChannelModel, cls.channel_id == ChannelModel.id).\ join(StationModel, ChannelModel.station_id == StationModel.id)\ .filter(*search_filters).order_by(cls.start_time) page = query.paginate(per_page=seismic_search.PerPage, page=seismic_search.Page) entities = page.items total = page.total if entities: return SearchResult(entities, total) return SearchResult([], 0)
class User(db.Model, UserMixin): """ System users model""" id = db.Column(db.Integer, primary_key=True) # User Authentication information username = db.Column(db.String(50), nullable=False, unique=True) password = db.Column(db.String(255), nullable=False, default='') reset_password_token = db.Column(db.String(100), nullable=False, default='') # User Email information email = db.Column(db.String(255), nullable=False, unique=True) confirmed_at = db.Column(db.DateTime()) # User information is_enabled = db.Column(db.Boolean(), nullable=False, default=False) first_name = db.Column(db.String(50), nullable=False, default='') last_name = db.Column(db.String(50), nullable=False, default='') group_id = db.Column(db.Integer, db.ForeignKey('group.id')) balance = db.Column(db.Float, default=0) auth_token = db.Column(db.String(120), unique=True) def is_active(self): return self.is_enabled def __repr__(self): return '<User %r>' % self.username def update_balance(self, amount): """Update the user balance + amount.""" db.engine.execute("""UPDATE user SET balance = balance+%s WHERE id = %s """ % (amount, self.id)) @staticmethod def update_user_balance(user_id, amount): """Update the user balance + amount.""" db.engine.execute("""UPDATE user SET balance = balance+%s WHERE id = %s """ % (amount, user_id)) def update_token(self): """ Use this function before save the model in database""" self.auth_token = uuid.uuid4().hex @property def is_authenticated(self): """ Returns True if the user is authenticated, i.e. they have provided valid credentials. (Only authenticated users will fulfill the criteria of login_required.)""" return True @property def is_anonymous(self): """ Returns True if this is an anonymous user. (Actual users should return False instead.)""" return False def get_id(self): """ Returns a unicode that uniquely identifies this user, and can be used to load the user from the user_loader callback. Note that this must be a unicode - if the ID is natively an int or some other type, you will need to convert it to unicode.""" try: return unicode(self.id) except AttributeError: raise NotImplementedError('No `id` attribute - override `get_id`')