Esempio n. 1
0
    def add_new_columns(self):
        """
        Adds columns present in df but not in the SQL table
        for given instance of PandasSpecialEngine.

        Notes
        -----
        Sadly, it seems that we cannot create JSON columns.
        """
        # create deepcopies of the column because we are going to unbound
        # them for the table model (otherwise alembic would think we add
        # a column that already exists in the database)
        cols_to_add = [deepcopy(col) for col in self.table.columns
                       if col.name not in self.get_db_columns_names()]
        # check columns are not index levels
        if any((c.name in self.df.index.names for c in cols_to_add)):
            raise ValueError(('Cannot add any column that is part of the df index!\n'
                              "You'll have to update your table primary key or change your "
                              "df index"))
        
        with self.engine.connect() as con:
            ctx = MigrationContext.configure(con)
            op = Operations(ctx)
            for col in cols_to_add:
                col.table = None # Important! unbound column from table
                op.add_column(self.table.name, col, schema=self.schema)
                log(f"Added column {col} (type: {col.type}) in table {self.table.name} "
                    f'(schema="{self.schema}")')
Esempio n. 2
0
def upgrade(engine):
    session_maker = sessionmaker(bind=engine)
    session = session_maker()

    r = (session.query(model.ByteHubVersion).order_by(
        model.ByteHubVersion.timestamp.desc()).first())
    if not r:
        current_version = "0"
    else:
        current_version = r.version

    if parse(current_version) >= parse(version.__version__):
        # Up-to-date: nothing to do
        session.close()
        return

    if parse(current_version) < parse(version.__version__):
        print(f"Upgrading ByteHub database schema...")
        with engine.connect() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            try:
                op.add_column("namespace", Column("backend", String(128)))
            except OperationalError:
                pass

    # Add version number
    obj = model.ByteHubVersion()
    obj.version = version.__version__
    session.add(obj)
    session.commit()

    session.close()
Esempio n. 3
0
def add_columns(engine, raw_diffs, table_names):
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)

        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)
Esempio n. 4
0
def add_columns(engine, raw_diffs, table_names):
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)

        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)
Esempio n. 5
0
def add_columns(engine, diffs):
    with engine.begin() as conn:
        ctx = get_migration_context(conn)
        op = Operations(ctx)
        col_diffs = _filter_diffs(diffs, [DiffTypes.ADD_NULLABLE_COLUMN])
        for diff in col_diffs:
            col = diff.column
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)

    return col_diffs
Esempio n. 6
0
def add_columns(engine, raw_diffs, table_names):
    changes = defaultdict(list)
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)
        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            changes[table_name].append({
                'type': DiffTypes.ADD_COLUMN,
                'item_name': col.name
            })
            op.add_column(table_name, col)

    return dict(changes)
Esempio n. 7
0
def add_columns(engine, raw_diffs, table_names):
    changes = defaultdict(list)
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)
        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            changes[table_name].append({
                'type': DiffTypes.ADD_COLUMN,
                'value': col.name
            })
            op.add_column(table_name, col)

    return dict(changes)
Esempio n. 8
0
def add_measurement_columns(session):
    """Inspects all SampleMeasurements finding distinct values for
    measurement_types, then creates new columns in the ExperimentMeasurement
    table using an Alembic migration context."""
    # TODO: Consider index on SampleMeasurement.measurement_types.
    ExperimentMeasurement = get_ExperimentMeasurement()
    measurement_types = session.query(
        SampleMeasurement.measurement_type).distinct().all()
    measurement_types = sorted([_ for (_, ) in measurement_types])

    cols = set([f"measurement_{mt}" for mt in measurement_types])
    new_cols = cols - set(ExperimentMeasurement.__table__.columns.keys())
    if new_cols:
        ctx = MigrationContext.configure(session.connection())
        op = Operations(ctx)
        for new_col in new_cols:
            print(f"creating new_col: {new_col}")
            op.add_column("experiment_measurements",
                          Column(new_col, DECIMAL(16, 6)))
def add_measurement_columns(start: int, end: int, session):
    assert start <= end

    ExperimentMeasurement = get_ExperimentMeasurement()
    # Get all the distinct measurement types in the batch that this function
    #  call is responsible for extracting
    measurement_types = sorted([
        _
        for (_, ) in session.query(SampleMeasurement.measurement_type).filter(
            SampleMeasurement.sample_id >= start,
            SampleMeasurement.sample_id <= end).distinct().all(
            )  # Don't transmit any more than you have to.
    ])
    cols = set([f"measurement_{mt}" for mt in measurement_types])
    new_cols = cols - set(ExperimentMeasurement.__table__.columns.keys())
    if new_cols:
        ctx = MigrationContext.configure(session.connection())
        op = Operations(ctx)
        for new_col in new_cols:
            print(f"creating new_col: {new_col}")
            op.add_column("experiment_measurements",
                          Column(new_col, DECIMAL(16, 6)))
Esempio n. 10
0
def create_column(engine, table_oid, column_data):
    column_type = column_data[TYPE]
    column_nullable = column_data.get(NULLABLE, True)
    supported_types = alteration.get_supported_alter_column_types(
        engine,
        friendly_names=False,
    )
    sa_type = supported_types.get(column_type)
    if sa_type is None:
        logger.warning("Requested type not supported. falling back to VARCHAR")
        sa_type = supported_types["VARCHAR"]
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = MathesarColumn(
        column_data[NAME],
        sa_type,
        nullable=column_nullable,
    )
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.add_column(table.name, column, schema=table.schema)
    return tables.reflect_table_from_oid(table_oid,
                                         engine).columns[column_data[NAME]]
Esempio n. 11
0
    def _schema_init_and_update(self):
        """
        This method tries to create the database table and update the schema.

        :return: None
        """
        try:
            # Try to create the database
            metadata.create_all(self.engine)
        except OperationalError as exx:  # pragma: no cover
            log.info("{0!r}".format(exx))

        # Schema update
        conn = self.engine.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        try:
            # Try to add resolver column
            op.add_column(
                TABLE_NAME,
                Column('resolver',
                       String(length=column_length.get("resolver"))))
        except Exception as exx:  # pragma: no cover
            log.info("{0!r}".format(exx))
Esempio n. 12
0
def create_missing_database_entities(Model, engine):
    m = Model.metadata
    current_info = get_current_database_info(engine)

    print(current_info)

    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)

    print "metadata", m
    for table_name in m.tables:
        table = m.tables[table_name]
        if current_info.has_key(table_name):
            for col in table.columns:
                print "col", col
                if not col.name in current_info[table_name]:
                    print "    IN TABLE: %s CREATING COLUMN: %s"%(table_name, col.name)
                    op.add_column(table_name, mimic_column(col))
                    print "    ... done"
        else:
            args = [table_name] + map(mimic_column, list(table.columns))
            print "CREATING TABLE: " + repr(args)
            op.create_table(*args)
Esempio n. 13
0
import os

from dotenv import load_dotenv
from sqlalchemy import create_engine, orm, MetaData, Table, Column, String
from alembic.migration import MigrationContext
from alembic.operations import Operations

load_dotenv()

PG_URL = os.environ.get('PG_URL')
PG_TABLE = os.environ.get('PG_TABLE')

engine = create_engine(PG_URL)
context = MigrationContext.configure(engine.connect())
operation = Operations(context)
operation.add_column(PG_TABLE, Column('contact_phone_normalized', String(100)))

metadata = MetaData()
metadata.reflect(bind=engine)
keys = metadata.tables[PG_TABLE].columns.keys()

print(f'All columns of the table "{PG_TABLE}": {keys}')
Esempio n. 14
0
from alembic.migration import MigrationContext
from alembic.operations import Operations
from sqlalchemy import Column, Float, UnicodeText
from sqlalchemy import create_engine

if __name__ == '__main__':
    engine = create_engine('postgresql://postgres@localhost:5432/tetres')
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    print("Adding column vht...")
    for i in range(2010, 2021):
        try:
            vht = Column("vht", Float, nullable=True)
            op.add_column("tt_{}".format(i), vht)
            print("Successfully added vht column!")
        except Exception as e:
            print("Failed adding vht column! Error: {}".format(e))
        try:
            dvh = Column("dvh", Float, nullable=True)
            op.add_column("tt_{}".format(i), dvh)
            print("Successfully added dvh column!")
        except Exception as e:
            print("Failed adding dvh column! Error: {}".format(e))
        try:
            lvmt = Column("lvmt", Float, nullable=True)
            op.add_column("tt_{}".format(i), lvmt)
            print("Successfully added lvmt column!")
        except Exception as e:
            print("Failed adding lvmt column! Error: {}".format(e))
        try:
Esempio n. 15
0
    def update_member(self, id, data):
        member=self.get_member(id)
        if member is None:
            return None

        # NOTE: BaseContext's init method sets its base to the base
        # struct contained in the request, so we need to reset it here
        # to the base struct that is actually in the database - DCarv

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        # NOTE: Set old base struct as active! By John Doe
        self.set_base(member.struct)

        # NOTE: Check for base content changes! By John Doe
        old_base=json2base(member.struct)

        new_base=json2base(data['struct'])

        # NOTE: List all fields that should be deleted! By John Doe
        del_cols=[]
        for old_col_name, old_col in old_base.content.__allstructs__.items():
            if old_col_name not in new_base.content.__allsnames__:
                del_cols.append(old_col)

        # NOTE: If any field will be deleted, delete it from all documents in
        # the base! By John Doe
        if len(del_cols) > 0:
            # NOTE: Create a fake request for DocumentCustomView and
            # DocumentContext! By John Doe

            url="/%s/doc&$$={\"limit\":null}" % new_base.metadata.name
            for col in del_cols:
                params={
                    'path': "[{\"path\":\"%s\",\"fn\":null,\"mode\":" +\
                            "\"delete\",\"args\":[]}]" % ("/".join(col.path))
                }
                request=DummyRequest(path=url, params=params)
                request.method='PUT'
                request.matchdict={"base": new_base.metadata.name}
                doc_view=DocumentCustomView(
                    DocumentContextFactory(request), 
                    request
                )
                doc_view.update_collection()

        # NOTE: Check for relation field changes (to ALTER table if needed)!
        # By John Doe
        old_doc_table=get_doc_table(old_base.metadata.name, config.METADATA,
            **old_base.relational_fields)

        new_doc_table=get_doc_table(new_base.metadata.name, config.METADATA,
            **new_base.relational_fields)

        # NOTE: List relational fields that should be deleted! By John Doe
        del_cols=[]
        for old_col in old_doc_table.columns:
            if old_col.name not in new_doc_table.columns:
                del_cols.append(old_col)

        # NOTE: List relational fields that should be added! By John Doe
        new_cols=[]
        for new_col in new_doc_table.columns:
            if new_col.name not in old_doc_table.columns:
                # NOTE: Get liblightbase.lbbase.fields object! By John Doe

                field=new_base.relational_fields[new_col.name]
                custom_col=get_custom_column(field)
                new_cols.append(custom_col)

        # NOTE: Create alembic connection and operation object! By John Doe
        db_conn=config.ENGINE.connect()

        alembic_ctx=MigrationContext.configure(db_conn)
        alembic_op=Operations(alembic_ctx)

        # NOTE: Drop columns! By John Doe
        for col in del_cols:
            alembic_op.drop_column(new_doc_table.name, col.name)

        # TODO: New_col cannot be required! By John Doe

        # NOTE: Add columns! By John Doe
        for col in new_cols:
            alembic_op.add_column(new_doc_table.name, col)

        # TODO: Alter columns? By John Doe

        db_conn.close()

        # NOTE: Check for base name change! By John Doe
        if member.name != data['name']:
            old_name='lb_doc_%s' %(member.name)
            new_name='lb_doc_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_file_%s' %(member.name)
            new_name='lb_file_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_doc_%s_id_doc_seq' %(member.name)
            new_name='lb_doc_%s_id_doc_seq' %(data['name'])
            self.session.execute('ALTER SEQUENCE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )

        # NOTE: This will add any new fields to the base struct! By John Doe
        for name in data:
            setattr(member, name, data[name])

        # NOTE: Now commits and closes session in the view instead of here
        # flush() pushes operations to DB's buffer - DCarv
        self.session.flush()

        # NOTE: Por alguma razão o objeto "session" estando com
        # "autocommit=True" não "commita" no "close" e por isso executamos um
        # "commit" aqui! "autocommit=True" não comita mais de uma operação em
        # sequência? By Questor
        # session_factory: sessionmaker(
            # class_='Session', 
            # autoflush=True, 
            # bind=Engine(postgresql://lbu:***@127.0.0.1/lb), 
            # autocommit=True, 
            # expire_on_commit=True
        # )
        # registry: <sqlalchemy.\
                # util.\
                # _collections.\
                # ThreadLocalRegistry object at 0x4143f90>
        # ! By Questor
        self.session.commit()

        model.HISTORY.create_member(**{
            'id_base': member.id_base, 
            'author': 'Author', 
            'date': datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'), 
            'name': member.name, 
            'structure': utils.json2object(member.struct), 
            'status': 'UPDATED'
        })

        self.lbirestart()

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        return member
Esempio n. 16
0
def submitRunlistData():
    """ Create/edit experimental run list layout. Insert/edit experimental parameters and parameter types. This request must send its parameters as a single JSON encoded string and the `content-type`_ header must be supplied as `application/json`_

    .. _content-type: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
    .. _application/json: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types

    .. :quickref: Create/Edit run list; Create or edit the experimental run list layout

    :param: isNewRunlist : (Boolean) if runlist is new
    :param: clock : (Integer) creation timestamp
    :param: runlistType : (Integer) Run list type (unused)
    :param: runlistRemarks : Object containing global runlist information. Contains the following keys:

            | ``cols`` : List of experimental parameter objects. Experimental parameter objects contain the following keys:

                    | ``name`` : (String) name of the parameter
                    | ``type`` : (Integer) type of parameter

                            | ``0`` : Regular parameter
                            | ``3`` : Run Title identifier (optinal declaration)
                            | ``4`` : Option parameter

                    | ``parType`` : (Integer - optional) Type of regular parameter (only used if ``type=1``)

                            | ``0`` : Number
                            | ``1`` : Text

                    | ``parUnits`` : (Integer - optional) Physical parameter units (only used if ``type=1``)
                    | ``parOptions`` : (String - optional) JSON encoded string of parameter options (only used if ``type=1``)
                    | ``action`` : (String) Action to be preformed on parameter

                            | ``add`` : add parameter
                            | ``edit`` : edit parameter
                            | ``delete`` : delete parameter

    """
    newRunList = False
    processRequest = request.get_json()
    if 'isNewRunlist' in processRequest:
        newRunList = processRequest["isNewRunlist"]
    else:
        newRunList = True
    try:
        Session = sessionmaker(bind=current_user.engineObj)
        session = Session()
        conn = current_user.engineObj.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        result = session.query(
            daqbrokerDatabase.runs).filter_by(clock=session.query(
                func.max(daqbrokerDatabase.runs.clock)).first()[0]).first()
        if not result:
            oldRemarks = {}
        else:
            oldRemarks = json.loads(result.runlistRemarks)
        #dbQuery=text("INSERT INTO runs VALUES(:clock,:clock,0,:linkRemarks,:runlistType,:runlistRemarks)")
        #startTableAlter="ALTER TABLE runlist "
        if newRunList:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                if ((not (int(col["type"]) == 1))
                        and (not (int(col["type"]) == 2))
                        and (not (int(col["type"]) == 3))):
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        newType = daqbrokerDatabase.Float
                    newCol = daqbrokerDatabase.Column(col["name"], newType)
                    op.add_column("runlist", newCol)
                processRequest["runlistRemarks"]["cols"][i][
                    "action"] = "addOld"
        else:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                extra = ''
                #print(col)
                if not int(col["type"]) == 3:
                    if i >= len(oldRemarks["cols"]):
                        column = col
                    else:
                        column = oldRemarks["cols"][i]
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        extra = "\"" + column["name"] + "\"::double precision"
                        newType = daqbrokerDatabase.Float
                    if col['action'] == 'add':
                        newCol = daqbrokerDatabase.Column(col["name"], newType)
                        op.add_column("runlist", newCol)
                        processRequest["runlistRemarks"]["cols"][i][
                            "action"] = "addOld"
                    elif col['action'] == 'edit':
                        if col["name"] != column["name"]:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            new_column_name=col["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                        else:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                    elif col['action'] == 'delete':
                        op.drop_column("runlist", col["name"])
                    #print("done")
            daqbrokerDatabase.daqbroker_database.metadata.remove(
                daqbrokerDatabase.daqbroker_database.metadata.tables["runlist"]
            )
            daqbrokerDatabase.daqbroker_database.metadata.reflect(
                current_user.engineObj, extend_existing=True)
            processRequest["runlistRemarks"]["cols"] = [
                x for x in processRequest["runlistRemarks"]["cols"]
                if x['action'] != 'delete'
            ]
        newRuns = daqbrokerDatabase.runs(clock=processRequest["clock"],
                                         linkRemarks='',
                                         runlistRemarks=json.dumps(
                                             processRequest["runlistRemarks"]))
        session.add(newRuns)
        session.commit()
        conn.close()
    except Exception as e:
        session.rollback()
        traceback.print_exc()
        raise InvalidUsage(str(e), status_code=500)
    return jsonify('done')
Esempio n. 17
0
def insertInstrument():
    """ Insert a new instrument or edit an existing instrument on a DAQBroker database. Guest users are not allowed to
    create instruments. Created instruments are

    .. :quickref: Create/Edit instrument; Creates or edits a DAQBroker instrument instrument

    :param: Name : (String) unique instrument name
    :param: instid : (Integer) unique instrument identifier. Used to edit an existing instrument
    :param: description : (String) description of the instrument and its
    :param: email : (String) contact information for the instrument operator
    :param: Files : (Optional) JSON encoded list of instrument data source objects. Each Contains the following keys:

            | ``name`` : (String) name of the data source
            | ``metaid`` : (Integer) unique data source identifier. Only used to edit existing data sources
            | ``type`` : (Integer) type of instrument data source
            | ``node`` : (String) unique network node identifier
            | ``remarks`` : (String) JSON encoded object of extra data source information
            | ``channels`` : (Optional) JSON encoded list of data channel objects. Each contains the following keys:

                    | ``Name`` : (String) data channel name
                    | ``channelid`` : (Integer) unique channel identifier. -1 if the channel is new. Positive integer
                    if the channel already exists
                    | ``description`` : (String) data channel description
                    | ``units`` : (String) data channel physical units
                    | ``channeltype`` : (Integer) type of data channel

                            | ``0`` : Number
                            | ``1`` : Text
                            | ``2`` : Custom

                    | ``active`` : (Boolean) channel is shown on interface
                    | ``fileorder`` : (Integer) Used to order channels in a data source
                    | ``alias`` : (String) Original data channel name. Kept constant when name changes
                    | ``remarks`` : (String) JSON encoded object with extra information
                    | ``oldname`` : (String) Old channel name. Used to detect changes in the channel name
                    | ``channeltypeOld`` : (Integer) Old channel type. Used to detect changes in the channel type

    """
    processRequest = request.get_json()
    Session = sessionmaker(bind=current_user.engineObj)
    session = Session()
    conn = current_user.engineObj.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    try:
        if 'instid' in processRequest:
            newInst = False
            instid = processRequest['instid']
            instrument = session.query(daqbrokerDatabase.instruments).filter_by(instid=instid).first()
        else:
            newInst = True
            maxInst = session.query(func.max(daqbrokerDatabase.instruments.instid)).one_or_none()
            # print(maxInst==None)
            if maxInst[0]:
                maxInstid = maxInst[0]
            else:
                maxInstid = 0
            instid = maxInstid + 1
            instrument = daqbrokerDatabase.instruments(
                Name=processRequest['Name'],
                instid=instid,
                active=False,
                description=processRequest['description'],
                username=current_user.username,
                email=processRequest['email'],
                insttype=0,
                log=None)
        # Now I have an object called "instrument" that I can use to add sources
        # and metadatas and to those metadatas I should be able to add channels.
        for file in processRequest['files']:
            if 'metaid' in file:
                metadata = session.query(daqbrokerDatabase.instmeta).filter_by(metaid=file["metaid"]).first()
                metadata.clock = time.time() * 1000
                metadata.name= file['name']
                metadata.type=file['type']
                metadata.node=file['node']
                metadata.remarks=json.dumps(file['remarks'])
            else:
                maxMeta = session.query(func.max(daqbrokerDatabase.instmeta.metaid)).first()
                if maxMeta[0]:
                    maxMetaid = maxMeta[0]
                else:
                    maxMetaid = 0
                metaid = maxMetaid + 1
                metadata = daqbrokerDatabase.instmeta(
                    clock=time.time() * 1000,
                    name=file['name'],
                    metaid=metaid,
                    type=file["type"],
                    node=file["node"],
                    remarks=json.dumps(
                        file['remarks']),
                    sentRequest=False,
                    lastAction=0,
                    lasterrortime=0,
                    lasterror='',
                    lockSync=False)
                instrument.sources.append(metadata)
            channelid = None
            if 'channels' in file:
                channelsInsert = []
                for channel in file['channels']:
                    if int(channel['channelid']) < 0:  # New channel - have to insert
                        maxChannel = session.query(func.max(daqbrokerDatabase.channels.channelid)).first()
                        if not channelid:
                            if maxChannel[0]:
                                maxChannelid = maxChannel[0]
                            else:
                                maxChannelid = 0
                            channelid = maxChannelid + 1
                        else:
                            channelid = channelid + 1
                        if 'remarks' in channel:
                            if len(channel["remarks"].keys())>0:
                                theRemarks = json.dumps(channel["remarks"])
                            else:
                                theRemarks = json.dumps({})
                        else:
                            theRemarks = json.dumps({})
                        theChannel = daqbrokerDatabase.channels(
                            Name=channel["Name"],
                            channelid=channelid,
                            channeltype=int(
                                channel["channeltype"]),
                            valuetype=0,
                            units=channel['units'],
                            description=channel['description'],
                            active=int(
                                channel['active']) == 1,
                            remarks=theRemarks,
                            lastclock=0,
                            lastValue=None,
                            firstClock=0,
                            fileorder=channel['fileorder'],
                            alias=channel['alias'])
                        metadata.channels.append(theChannel)
                        channelsInsert.append({'name': channel["Name"], 'type': int(channel["channeltype"])})
                        if not newInst:
                            extra = ''
                            if int(channel['channeltype']) == 1:
                                newType = daqbrokerDatabase.Float
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 2:
                                newType = daqbrokerDatabase.Text
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 3:
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                theType = daqbrokerDatabase.Float
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_custom", column)
                    elif not newInst:
                        theChannel = session.query(
                            daqbrokerDatabase.channels).filter_by(
                            channelid=channel['channelid']).first()
                        theChannel.Name = channel["Name"]
                        theChannel.channeltype = int(channel["channeltype"])
                        theChannel.units = channel['units']
                        theChannel.description = channel['description']
                        theChannel.active = int(channel['active']) == 1
                        theChannel.fileorder = channel['fileorder']
                        theChannel.alias = channel['alias']
                        if (not channel['channeltypeOld'] == channel['channeltype']) or (
                                not channel['oldName'] == str(channel['Name'])):
                            if not channel['oldName'] == str(channel['Name']):
                                newName = str(channel['Name'])
                                oldName = channel['oldName']
                            else:
                                oldName = str(channel['Name'])
                                newName = None
                            if not channel['channeltypeOld'] == channel['channeltype']:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 3:
                                    newType = daqbrokerDatabase.Float
                                    extra = "\"" + oldName + "\"::double precision"
                                else:
                                    newType = daqbrokerDatabase.Text
                                    extra = None
                            else:
                               newType = None
                            if not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltype'] == 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    theType = daqbrokerDatabase.Float
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_data", oldName)
                                op.add_column(processRequest['Name'] + "_custom", column)
                            elif not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltypeOld'] != 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    if channel['channeltypeOld'] == 1:
                                        theType = daqbrokerDatabase.Float
                                    else:
                                        theType = daqbrokerDatabase.Text
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_custom", oldName)
                                op.add_column(processRequest['Name'] + "_data", column)
                            else:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 2:
                                    if extra:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data", oldName, new_column_name=newName, type_=newType)
                                else:
                                    if extra=='':
                                        op.alter_column(
                                            processRequest['Name'] + "_custom", oldName, new_column_name=newName, type_=newType)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                    elif newInst:
                        raise InvalidUsage("Cannot issue edit channels on new instrument", status_code=401)
        if newInst:
            daqbrokerDatabase.createInstrumentTable(processRequest['Name'], channelsInsert, True)
            session.add(instrument)
            daqbrokerDatabase.daqbroker_database.metadata.create_all(current_user.engineObj)
        session.commit()
        conn.close()
        current_user.updateDB()
        return jsonify('done')
    except Exception as e:
        traceback.print_exc()
        session.rollback()
        # for statement in deleteStatements:
        #	connection.execute(statement)
        raise InvalidUsage(str(e), status_code=500)
Esempio n. 18
0
    def update_member(self, id, data):
        member = self.get_member(id)
        if member is None:
            return None

        # NOTE: BaseContext's init method sets its base to the base
        # struct contained in the request, so we need to reset it here
        # to the base struct that is actually in the database - DCarv

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        # NOTE: Set old base struct as active! By John Doe
        self.set_base(member.struct)

        # NOTE: Check for base content changes! By John Doe
        old_base = json2base(member.struct)

        new_base = json2base(data['struct'])

        # NOTE: List all fields that should be deleted! By John Doe
        del_cols = []
        for old_col_name, old_col in old_base.content.__allstructs__.items():
            if old_col_name not in new_base.content.__allsnames__:
                del_cols.append(old_col)

        # NOTE: If any field will be deleted, delete it from all documents in
        # the base! By John Doe
        if len(del_cols) > 0:
            # NOTE: Create a fake request for DocumentCustomView and
            # DocumentContext! By John Doe

            url = "/%s/doc&$$={\"limit\":null}" % new_base.metadata.name
            for col in del_cols:
                params={
                    'path': "[{\"path\":\"%s\",\"fn\":null,\"mode\":" +\
                            "\"delete\",\"args\":[]}]" % ("/".join(col.path))
                }
                request = DummyRequest(path=url, params=params)
                request.method = 'PUT'
                request.matchdict = {"base": new_base.metadata.name}
                doc_view = DocumentCustomView(DocumentContextFactory(request),
                                              request)
                doc_view.update_collection()

        # NOTE: Check for relation field changes (to ALTER table if needed)!
        # By John Doe
        old_doc_table = get_doc_table(old_base.metadata.name, config.METADATA,
                                      **old_base.relational_fields)

        new_doc_table = get_doc_table(new_base.metadata.name, config.METADATA,
                                      **new_base.relational_fields)

        # NOTE: List relational fields that should be deleted! By John Doe
        del_cols = []
        for old_col in old_doc_table.columns:
            if old_col.name not in new_doc_table.columns:
                del_cols.append(old_col)

        # NOTE: List relational fields that should be added! By John Doe
        new_cols = []
        for new_col in new_doc_table.columns:
            if new_col.name not in old_doc_table.columns:
                # NOTE: Get liblightbase.lbbase.fields object! By John Doe

                field = new_base.relational_fields[new_col.name]
                custom_col = get_custom_column(field)
                new_cols.append(custom_col)

        # NOTE: Create alembic connection and operation object! By John Doe
        db_conn = config.ENGINE.connect()

        alembic_ctx = MigrationContext.configure(db_conn)
        alembic_op = Operations(alembic_ctx)

        # NOTE: Drop columns! By John Doe
        for col in del_cols:
            alembic_op.drop_column(new_doc_table.name, col.name)

        # TODO: New_col cannot be required! By John Doe

        # NOTE: Add columns! By John Doe
        for col in new_cols:
            alembic_op.add_column(new_doc_table.name, col)

        # TODO: Alter columns? By John Doe

        db_conn.close()

        # NOTE: Check for base name change! By John Doe
        if member.name != data['name']:
            old_name = 'lb_doc_%s' % (member.name)
            new_name = 'lb_doc_%s' % (data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %
                                 (old_name, new_name))
            old_name = 'lb_file_%s' % (member.name)
            new_name = 'lb_file_%s' % (data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %
                                 (old_name, new_name))
            old_name = 'lb_doc_%s_id_doc_seq' % (member.name)
            new_name = 'lb_doc_%s_id_doc_seq' % (data['name'])
            self.session.execute('ALTER SEQUENCE %s RENAME TO %s' %
                                 (old_name, new_name))

        # NOTE: This will add any new fields to the base struct! By John Doe
        for name in data:
            setattr(member, name, data[name])

        # NOTE: Now commits and closes session in the view instead of here
        # flush() pushes operations to DB's buffer - DCarv
        self.session.flush()

        # NOTE: Por alguma razão o objeto "session" estando com
        # "autocommit=True" não "commita" no "close" e por isso executamos um
        # "commit" aqui! "autocommit=True" não comita mais de uma operação em
        # sequência? By Questor
        # session_factory: sessionmaker(
        # class_='Session',
        # autoflush=True,
        # bind=Engine(postgresql://lbu:***@127.0.0.1/lb),
        # autocommit=True,
        # expire_on_commit=True
        # )
        # registry: <sqlalchemy.\
        # util.\
        # _collections.\
        # ThreadLocalRegistry object at 0x4143f90>
        # ! By Questor
        self.session.commit()

        model.HISTORY.create_member(
            **{
                'id_base': member.id_base,
                'author': 'Author',
                'date': datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'),
                'name': member.name,
                'structure': utils.json2object(member.struct),
                'status': 'UPDATED'
            })

        self.lbirestart()

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        return member
from alembic.migration import MigrationContext
from alembic.operations import Operations
from sqlalchemy import Column, Float, UnicodeText, DateTime
from sqlalchemy import create_engine

if __name__ == '__main__':
    engine = create_engine('postgresql://postgres@localhost:5432/tetres')
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    print("Adding column processed_start_date...")
    try:
        processed_start_date = Column("processed_start_date", DateTime, nullable=True)
        op.add_column("action_log", processed_start_date)
        print("Successfully added processed_start_date column!")
    except Exception as e:
        print("Failed adding processed_start_date column! Error: {}".format(e))
    print("Adding column processed_end_date...")
    try:
        processed_end_date = Column("processed_end_date", DateTime, nullable=True)
        op.add_column("action_log", processed_end_date)
        print("Successfully added processed_end_date column!")
    except Exception as e:
        print("Failed adding processed_end_date column! Error: {}".format(e))
def upgrade(pyramid_env):
    admin_context = get_admin_context()
    op = Operations(admin_context)
    with admin_context.begin_transaction():
        op.add_column('WS.WS.SYS_DAV_RES',
        sa.Column('RES_SIZE', sa.Integer))
def upgrade(pyramid_env):
    admin_context = get_admin_context()
    op = Operations(admin_context)
    with admin_context.begin_transaction():
        op.add_column('WS.WS.SYS_DAV_RES', sa.Column('RES_SIZE', sa.Integer))
Esempio n. 22
0
from alembic.migration import MigrationContext
from alembic.operations import Operations
from sqlalchemy import Column, VARCHAR
from sqlalchemy import create_engine

if __name__ == '__main__':
    engine = create_engine('postgresql://postgres@localhost:5432/tetres')
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    try:
        status = Column("status", VARCHAR(255), nullable=True)
        op.add_column("route_wise_moe_parameters", status)
        print("Successfully added status column!")
    except Exception as e:
        print("Failed adding status column! Error: {}".format(e))
    try:
        reason = Column("reason", VARCHAR(255), nullable=True)
        op.add_column("route_wise_moe_parameters", reason)
        print("Successfully added reason column!")
    except Exception as e:
        print("Failed adding reason column! Error: {}".format(e))