Esempio n. 1
0
    def adapt_dtype_of_empty_db_columns(self):
        """
        Changes the data types of empty columns in the SQL table defined
        in given instance of a PandasSpecialEngine.

        This should only happen in case of data type mismatches.
        This means with columns for which the sqlalchemy table
        model for df and the model for the SQL table have different data types.
        """
        empty_db_columns = self.get_empty_columns()
        db_table = self.get_db_table_schema()
        # if column does not have value in db and there are values
        # in the frame then change the column type if needed
        for col in empty_db_columns:
            # check if the column also exists in df
            if col.name not in self.df.columns:
                continue
            # check same type
            orig_type = db_table.columns[col.name].type.compile(self.engine.dialect)
            dest_type = self.table.columns[col.name].type.compile(self.engine.dialect)
            # remove character count e.g. "VARCHAR(50)" -> "VARCHAR" 
            orig_type = RE_CHARCOUNT_COL_TYPE.sub('', orig_type)
            dest_type = RE_CHARCOUNT_COL_TYPE.sub('', dest_type)
            # if same type or we want to insert TEXT instead of JSON continue
            # (JSON is not supported on some DBs so it's normal to have TEXT instead)
            if ((orig_type == dest_type) or
                ((orig_type == 'JSON') and (dest_type == 'TEXT'))):
                continue
            # grab the col/index from the df
            # so we can check if there are any values
            if col.name in self.df.index.names:
                df_col = self.df.index.get_level_values(col.name)
            else:
                df_col = self.df[col.name]
            if df_col.notna().any():
                # raise error if we have to modify the dtype but we have a SQlite engine
                # (SQLite does not support data type alteration)
                if self._db_type == 'sqlite':
                    raise ValueError('SQlite does not support column data type alteration!')
                with self.engine.connect() as con:
                    ctx = MigrationContext.configure(con)
                    op = Operations(ctx)
                    new_col = self.table.columns[col.name]
                    # check if postgres (in which case we have to use "using" syntax
                    # to alter columns data types)
                    if self._db_type == 'postgres':
                        escaped_col = str(new_col.compile(dialect=self.engine.dialect))
                        compiled_type = new_col.type.compile(dialect=self.engine.dialect)
                        alter_kwargs = {'postgresql_using':f'{escaped_col}::{compiled_type}'}
                    else:
                        alter_kwargs = {}
                    op.alter_column(table_name=self.table.name,
                                    column_name=new_col.name,
                                    type_=new_col.type,
                                    schema=self.schema,
                                    **alter_kwargs)
                    log(f"Changed type of column {new_col.name} "
                        f"from {col.type} to {new_col.type} "
                        f'in table {self.table.name} (schema="{self.schema}")')
Esempio n. 2
0
def test_standalone_op():
    eng, buf = capture_db()

    env = MigrationContext.configure(eng)
    op = Operations(env)

    op.alter_column("t", "c", nullable=True)
    eq_(buf, ['ALTER TABLE t ALTER COLUMN c DROP NOT NULL'])
Esempio n. 3
0
    def _alter_column_name(self, base, old_name, new_name):
        # NOTE: Create alembic connection and operation object! By John Doe

        db_conn = config.ENGINE.connect()
        alembic_ctx = MigrationContext.configure(db_conn)
        alembic_op = Operations(alembic_ctx)
        doc_table = get_doc_table(base.metadata.name, config.METADATA,
                                  **base.relational_fields)
        alembic_op.alter_column(doc_table.name,
                                old_name,
                                new_column_name=new_name)
        db_conn.close()
Esempio n. 4
0
def change_column_nullable(table_oid, column_index, nullable, engine):
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.alter_column(table.name,
                        column.name,
                        nullable=nullable,
                        schema=table.schema)
    return tables.reflect_table_from_oid(table_oid,
                                         engine).columns[column_index]
Esempio n. 5
0
    def _alter_column_name(self, base, old_name, new_name):
        # NOTE: Create alembic connection and operation object! By John Doe

        db_conn=config.ENGINE.connect()
        alembic_ctx=MigrationContext.configure(db_conn)
        alembic_op=Operations(alembic_ctx)
        doc_table=get_doc_table(
            base.metadata.name,
            config.METADATA,
            **base.relational_fields
        )
        alembic_op.alter_column(
            doc_table.name,
            old_name,
            new_column_name=new_name
        )
        db_conn.close()
Esempio n. 6
0
def setup(context, drop):
    """ Initialize a database for openFRED data.

    Connect to the database specified in the `[openFRED]` section of oemof's
    configuration file and set the database up to hold openFRED data.
    This means that the configured schema is created if it doesn't already
    exists. The same holds for the tables necessary to store openFRED data
    inside the schema.
    """
    section = context.obj["db"]["section"]
    schema = oemof.db.config.get(section, "schema")
    engine = oemof.db.engine(section)
    inspector = inspect(engine)
    metadata = MetaData(schema=schema, bind=engine, reflect=(not drop))
    classes = mapped_classes(metadata)

    if drop == "schema":
        with engine.connect() as connection:
            connection.execute(
                "DROP SCHEMA IF EXISTS {} CASCADE".format(schema))
    elif drop == "tables":
        classes["__Base__"].metadata.drop_all(engine)
    if schema not in inspector.get_schema_names():
        engine.execute(CreateSchema(schema))

    with engine.connect() as connection:
        connection.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
        connection.execute("CREATE EXTENSION IF NOT EXISTS postgis_topology;")
    classes["__Base__"].metadata.create_all(engine)

    with db_session(engine) as session:
        timespan = classes["Timespan"]
        try:
            ts = (session.query(timespan).filter_by(start=None,
                                                    stop=None).one_or_none())
        except MRF:
            click.echo("Multiple timestamps found which have no `start` "
                       "and/or `stop` values.\nAborting.")
        ts = ts or classes["Timespan"]()
        session.add(ts)
        session.flush()

        context = MigrationContext.configure(session.connection())
        ops = Operations(context)
        ops.alter_column(
            table_name=str(classes["Series"].__table__.name),
            column_name="timespan_id",
            server_default=str(ts.id),
            schema=schema,
        )

        constraint_name = "singular_null_timestamp_constraint"
        if not [
                c for c in timespan.__table__.constraints
                if c.name == constraint_name
        ]:
            constraint = CheckConstraint(
                "(id = {}) OR ".format(ts.id) +
                "(start IS NOT NULL AND stop IS NOT NULL)",
                name=constraint_name,
            )
            timespan.__table__.append_constraint(constraint)
            session.execute(AddConstraint(constraint))

    return classes
Esempio n. 7
0
def submitRunlistData():
    """ Create/edit experimental run list layout. Insert/edit experimental parameters and parameter types. This request must send its parameters as a single JSON encoded string and the `content-type`_ header must be supplied as `application/json`_

    .. _content-type: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
    .. _application/json: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types

    .. :quickref: Create/Edit run list; Create or edit the experimental run list layout

    :param: isNewRunlist : (Boolean) if runlist is new
    :param: clock : (Integer) creation timestamp
    :param: runlistType : (Integer) Run list type (unused)
    :param: runlistRemarks : Object containing global runlist information. Contains the following keys:

            | ``cols`` : List of experimental parameter objects. Experimental parameter objects contain the following keys:

                    | ``name`` : (String) name of the parameter
                    | ``type`` : (Integer) type of parameter

                            | ``0`` : Regular parameter
                            | ``3`` : Run Title identifier (optinal declaration)
                            | ``4`` : Option parameter

                    | ``parType`` : (Integer - optional) Type of regular parameter (only used if ``type=1``)

                            | ``0`` : Number
                            | ``1`` : Text

                    | ``parUnits`` : (Integer - optional) Physical parameter units (only used if ``type=1``)
                    | ``parOptions`` : (String - optional) JSON encoded string of parameter options (only used if ``type=1``)
                    | ``action`` : (String) Action to be preformed on parameter

                            | ``add`` : add parameter
                            | ``edit`` : edit parameter
                            | ``delete`` : delete parameter

    """
    newRunList = False
    processRequest = request.get_json()
    if 'isNewRunlist' in processRequest:
        newRunList = processRequest["isNewRunlist"]
    else:
        newRunList = True
    try:
        Session = sessionmaker(bind=current_user.engineObj)
        session = Session()
        conn = current_user.engineObj.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        result = session.query(
            daqbrokerDatabase.runs).filter_by(clock=session.query(
                func.max(daqbrokerDatabase.runs.clock)).first()[0]).first()
        if not result:
            oldRemarks = {}
        else:
            oldRemarks = json.loads(result.runlistRemarks)
        #dbQuery=text("INSERT INTO runs VALUES(:clock,:clock,0,:linkRemarks,:runlistType,:runlistRemarks)")
        #startTableAlter="ALTER TABLE runlist "
        if newRunList:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                if ((not (int(col["type"]) == 1))
                        and (not (int(col["type"]) == 2))
                        and (not (int(col["type"]) == 3))):
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        newType = daqbrokerDatabase.Float
                    newCol = daqbrokerDatabase.Column(col["name"], newType)
                    op.add_column("runlist", newCol)
                processRequest["runlistRemarks"]["cols"][i][
                    "action"] = "addOld"
        else:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                extra = ''
                #print(col)
                if not int(col["type"]) == 3:
                    if i >= len(oldRemarks["cols"]):
                        column = col
                    else:
                        column = oldRemarks["cols"][i]
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        extra = "\"" + column["name"] + "\"::double precision"
                        newType = daqbrokerDatabase.Float
                    if col['action'] == 'add':
                        newCol = daqbrokerDatabase.Column(col["name"], newType)
                        op.add_column("runlist", newCol)
                        processRequest["runlistRemarks"]["cols"][i][
                            "action"] = "addOld"
                    elif col['action'] == 'edit':
                        if col["name"] != column["name"]:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            new_column_name=col["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                        else:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                    elif col['action'] == 'delete':
                        op.drop_column("runlist", col["name"])
                    #print("done")
            daqbrokerDatabase.daqbroker_database.metadata.remove(
                daqbrokerDatabase.daqbroker_database.metadata.tables["runlist"]
            )
            daqbrokerDatabase.daqbroker_database.metadata.reflect(
                current_user.engineObj, extend_existing=True)
            processRequest["runlistRemarks"]["cols"] = [
                x for x in processRequest["runlistRemarks"]["cols"]
                if x['action'] != 'delete'
            ]
        newRuns = daqbrokerDatabase.runs(clock=processRequest["clock"],
                                         linkRemarks='',
                                         runlistRemarks=json.dumps(
                                             processRequest["runlistRemarks"]))
        session.add(newRuns)
        session.commit()
        conn.close()
    except Exception as e:
        session.rollback()
        traceback.print_exc()
        raise InvalidUsage(str(e), status_code=500)
    return jsonify('done')
Esempio n. 8
0
def insertInstrument():
    """ Insert a new instrument or edit an existing instrument on a DAQBroker database. Guest users are not allowed to
    create instruments. Created instruments are

    .. :quickref: Create/Edit instrument; Creates or edits a DAQBroker instrument instrument

    :param: Name : (String) unique instrument name
    :param: instid : (Integer) unique instrument identifier. Used to edit an existing instrument
    :param: description : (String) description of the instrument and its
    :param: email : (String) contact information for the instrument operator
    :param: Files : (Optional) JSON encoded list of instrument data source objects. Each Contains the following keys:

            | ``name`` : (String) name of the data source
            | ``metaid`` : (Integer) unique data source identifier. Only used to edit existing data sources
            | ``type`` : (Integer) type of instrument data source
            | ``node`` : (String) unique network node identifier
            | ``remarks`` : (String) JSON encoded object of extra data source information
            | ``channels`` : (Optional) JSON encoded list of data channel objects. Each contains the following keys:

                    | ``Name`` : (String) data channel name
                    | ``channelid`` : (Integer) unique channel identifier. -1 if the channel is new. Positive integer
                    if the channel already exists
                    | ``description`` : (String) data channel description
                    | ``units`` : (String) data channel physical units
                    | ``channeltype`` : (Integer) type of data channel

                            | ``0`` : Number
                            | ``1`` : Text
                            | ``2`` : Custom

                    | ``active`` : (Boolean) channel is shown on interface
                    | ``fileorder`` : (Integer) Used to order channels in a data source
                    | ``alias`` : (String) Original data channel name. Kept constant when name changes
                    | ``remarks`` : (String) JSON encoded object with extra information
                    | ``oldname`` : (String) Old channel name. Used to detect changes in the channel name
                    | ``channeltypeOld`` : (Integer) Old channel type. Used to detect changes in the channel type

    """
    processRequest = request.get_json()
    Session = sessionmaker(bind=current_user.engineObj)
    session = Session()
    conn = current_user.engineObj.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    try:
        if 'instid' in processRequest:
            newInst = False
            instid = processRequest['instid']
            instrument = session.query(daqbrokerDatabase.instruments).filter_by(instid=instid).first()
        else:
            newInst = True
            maxInst = session.query(func.max(daqbrokerDatabase.instruments.instid)).one_or_none()
            # print(maxInst==None)
            if maxInst[0]:
                maxInstid = maxInst[0]
            else:
                maxInstid = 0
            instid = maxInstid + 1
            instrument = daqbrokerDatabase.instruments(
                Name=processRequest['Name'],
                instid=instid,
                active=False,
                description=processRequest['description'],
                username=current_user.username,
                email=processRequest['email'],
                insttype=0,
                log=None)
        # Now I have an object called "instrument" that I can use to add sources
        # and metadatas and to those metadatas I should be able to add channels.
        for file in processRequest['files']:
            if 'metaid' in file:
                metadata = session.query(daqbrokerDatabase.instmeta).filter_by(metaid=file["metaid"]).first()
                metadata.clock = time.time() * 1000
                metadata.name= file['name']
                metadata.type=file['type']
                metadata.node=file['node']
                metadata.remarks=json.dumps(file['remarks'])
            else:
                maxMeta = session.query(func.max(daqbrokerDatabase.instmeta.metaid)).first()
                if maxMeta[0]:
                    maxMetaid = maxMeta[0]
                else:
                    maxMetaid = 0
                metaid = maxMetaid + 1
                metadata = daqbrokerDatabase.instmeta(
                    clock=time.time() * 1000,
                    name=file['name'],
                    metaid=metaid,
                    type=file["type"],
                    node=file["node"],
                    remarks=json.dumps(
                        file['remarks']),
                    sentRequest=False,
                    lastAction=0,
                    lasterrortime=0,
                    lasterror='',
                    lockSync=False)
                instrument.sources.append(metadata)
            channelid = None
            if 'channels' in file:
                channelsInsert = []
                for channel in file['channels']:
                    if int(channel['channelid']) < 0:  # New channel - have to insert
                        maxChannel = session.query(func.max(daqbrokerDatabase.channels.channelid)).first()
                        if not channelid:
                            if maxChannel[0]:
                                maxChannelid = maxChannel[0]
                            else:
                                maxChannelid = 0
                            channelid = maxChannelid + 1
                        else:
                            channelid = channelid + 1
                        if 'remarks' in channel:
                            if len(channel["remarks"].keys())>0:
                                theRemarks = json.dumps(channel["remarks"])
                            else:
                                theRemarks = json.dumps({})
                        else:
                            theRemarks = json.dumps({})
                        theChannel = daqbrokerDatabase.channels(
                            Name=channel["Name"],
                            channelid=channelid,
                            channeltype=int(
                                channel["channeltype"]),
                            valuetype=0,
                            units=channel['units'],
                            description=channel['description'],
                            active=int(
                                channel['active']) == 1,
                            remarks=theRemarks,
                            lastclock=0,
                            lastValue=None,
                            firstClock=0,
                            fileorder=channel['fileorder'],
                            alias=channel['alias'])
                        metadata.channels.append(theChannel)
                        channelsInsert.append({'name': channel["Name"], 'type': int(channel["channeltype"])})
                        if not newInst:
                            extra = ''
                            if int(channel['channeltype']) == 1:
                                newType = daqbrokerDatabase.Float
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 2:
                                newType = daqbrokerDatabase.Text
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 3:
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                theType = daqbrokerDatabase.Float
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_custom", column)
                    elif not newInst:
                        theChannel = session.query(
                            daqbrokerDatabase.channels).filter_by(
                            channelid=channel['channelid']).first()
                        theChannel.Name = channel["Name"]
                        theChannel.channeltype = int(channel["channeltype"])
                        theChannel.units = channel['units']
                        theChannel.description = channel['description']
                        theChannel.active = int(channel['active']) == 1
                        theChannel.fileorder = channel['fileorder']
                        theChannel.alias = channel['alias']
                        if (not channel['channeltypeOld'] == channel['channeltype']) or (
                                not channel['oldName'] == str(channel['Name'])):
                            if not channel['oldName'] == str(channel['Name']):
                                newName = str(channel['Name'])
                                oldName = channel['oldName']
                            else:
                                oldName = str(channel['Name'])
                                newName = None
                            if not channel['channeltypeOld'] == channel['channeltype']:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 3:
                                    newType = daqbrokerDatabase.Float
                                    extra = "\"" + oldName + "\"::double precision"
                                else:
                                    newType = daqbrokerDatabase.Text
                                    extra = None
                            else:
                               newType = None
                            if not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltype'] == 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    theType = daqbrokerDatabase.Float
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_data", oldName)
                                op.add_column(processRequest['Name'] + "_custom", column)
                            elif not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltypeOld'] != 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    if channel['channeltypeOld'] == 1:
                                        theType = daqbrokerDatabase.Float
                                    else:
                                        theType = daqbrokerDatabase.Text
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_custom", oldName)
                                op.add_column(processRequest['Name'] + "_data", column)
                            else:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 2:
                                    if extra:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data", oldName, new_column_name=newName, type_=newType)
                                else:
                                    if extra=='':
                                        op.alter_column(
                                            processRequest['Name'] + "_custom", oldName, new_column_name=newName, type_=newType)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                    elif newInst:
                        raise InvalidUsage("Cannot issue edit channels on new instrument", status_code=401)
        if newInst:
            daqbrokerDatabase.createInstrumentTable(processRequest['Name'], channelsInsert, True)
            session.add(instrument)
            daqbrokerDatabase.daqbroker_database.metadata.create_all(current_user.engineObj)
        session.commit()
        conn.close()
        current_user.updateDB()
        return jsonify('done')
    except Exception as e:
        traceback.print_exc()
        session.rollback()
        # for statement in deleteStatements:
        #	connection.execute(statement)
        raise InvalidUsage(str(e), status_code=500)