Esempio n. 1
0
def drop_column(
    engine,
    table_oid,
    column_index,
):
    column_index = int(column_index)
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.drop_column(table.name, column.name, schema=table.schema)
Esempio n. 2
0
    def test_missing_column(self):
        adapter = get_indicator_adapter(self.config, raise_errors=True)
        adapter.build_table()
        with adapter.engine.begin() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            op.drop_column(adapter.get_table().name, 'name')

        doc = {
            "_id": '123',
            "domain": "domain",
            "doc_type": "CommCareCase",
            "name": 'bob'
        }
        with self.assertRaises(MissingColumnWarning):
            adapter.best_effort_save(doc)
Esempio n. 3
0
    def test_missing_column(self):
        adapter = self._get_adapter()
        adapter.build_table()
        with adapter.engine.begin() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            op.drop_column(adapter.get_table().name, 'name')

        doc = {
            "_id": '123',
            "domain": "domain",
            "doc_type": "CommCareCase",
            "name": 'bob'
        }
        with self.assertRaises(MissingColumnWarning):
            adapter.best_effort_save(doc)
Esempio n. 4
0
def deleteChannel():
    """ Delete an instrument's data channel along with its data. Only instrument owners and system administrators can
    delete data channels

    .. :quickref: Delete data channel; Deletes an instrument data channel and its data

    :param: channelid: (Integer) unique instrument data channel identifier

    """
    Session = sessionmaker(bind=current_user.engineObj)
    session = Session()
    if('channelid' in request.form):
        channelid = request.form['channelid']
    elif('channelid' in request.args):
        channelid = request.args['channelid']
    else:
        raise InvalidUsage('No channel ID provided', status_code=500)
    try:
        result = session.query(daqbrokerDatabase.channels).filter_by(channelid=channelid).first()
        if current_user.type != 1:
            if result.chann.meta.username != current_user.username:
                raise InvalidUsage("You are not the instrument operator", status_code=400)
        session.delete(result)
        conn = current_user.engineObj.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        if result.channeltype == 1 or result.channeltype == 2:
            op.drop_column(result.chann.meta.Name + "_data", result.Name)
        else:
            op.drop_column(result.chann.meta.Name + "_custom", result.Name)
        conn.close()
        session.commit()
        return jsonify('done')
    except Exception as e:
        session.rollback()
        raise InvalidUsage('Error : ' + str(e), status_code=500)
Esempio n. 5
0
    def update_member(self, id, data):
        member=self.get_member(id)
        if member is None:
            return None

        # NOTE: BaseContext's init method sets its base to the base
        # struct contained in the request, so we need to reset it here
        # to the base struct that is actually in the database - DCarv

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        # NOTE: Set old base struct as active! By John Doe
        self.set_base(member.struct)

        # NOTE: Check for base content changes! By John Doe
        old_base=json2base(member.struct)

        new_base=json2base(data['struct'])

        # NOTE: List all fields that should be deleted! By John Doe
        del_cols=[]
        for old_col_name, old_col in old_base.content.__allstructs__.items():
            if old_col_name not in new_base.content.__allsnames__:
                del_cols.append(old_col)

        # NOTE: If any field will be deleted, delete it from all documents in
        # the base! By John Doe
        if len(del_cols) > 0:
            # NOTE: Create a fake request for DocumentCustomView and
            # DocumentContext! By John Doe

            url="/%s/doc&$$={\"limit\":null}" % new_base.metadata.name
            for col in del_cols:
                params={
                    'path': "[{\"path\":\"%s\",\"fn\":null,\"mode\":" +\
                            "\"delete\",\"args\":[]}]" % ("/".join(col.path))
                }
                request=DummyRequest(path=url, params=params)
                request.method='PUT'
                request.matchdict={"base": new_base.metadata.name}
                doc_view=DocumentCustomView(
                    DocumentContextFactory(request), 
                    request
                )
                doc_view.update_collection()

        # NOTE: Check for relation field changes (to ALTER table if needed)!
        # By John Doe
        old_doc_table=get_doc_table(old_base.metadata.name, config.METADATA,
            **old_base.relational_fields)

        new_doc_table=get_doc_table(new_base.metadata.name, config.METADATA,
            **new_base.relational_fields)

        # NOTE: List relational fields that should be deleted! By John Doe
        del_cols=[]
        for old_col in old_doc_table.columns:
            if old_col.name not in new_doc_table.columns:
                del_cols.append(old_col)

        # NOTE: List relational fields that should be added! By John Doe
        new_cols=[]
        for new_col in new_doc_table.columns:
            if new_col.name not in old_doc_table.columns:
                # NOTE: Get liblightbase.lbbase.fields object! By John Doe

                field=new_base.relational_fields[new_col.name]
                custom_col=get_custom_column(field)
                new_cols.append(custom_col)

        # NOTE: Create alembic connection and operation object! By John Doe
        db_conn=config.ENGINE.connect()

        alembic_ctx=MigrationContext.configure(db_conn)
        alembic_op=Operations(alembic_ctx)

        # NOTE: Drop columns! By John Doe
        for col in del_cols:
            alembic_op.drop_column(new_doc_table.name, col.name)

        # TODO: New_col cannot be required! By John Doe

        # NOTE: Add columns! By John Doe
        for col in new_cols:
            alembic_op.add_column(new_doc_table.name, col)

        # TODO: Alter columns? By John Doe

        db_conn.close()

        # NOTE: Check for base name change! By John Doe
        if member.name != data['name']:
            old_name='lb_doc_%s' %(member.name)
            new_name='lb_doc_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_file_%s' %(member.name)
            new_name='lb_file_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_doc_%s_id_doc_seq' %(member.name)
            new_name='lb_doc_%s_id_doc_seq' %(data['name'])
            self.session.execute('ALTER SEQUENCE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )

        # NOTE: This will add any new fields to the base struct! By John Doe
        for name in data:
            setattr(member, name, data[name])

        # NOTE: Now commits and closes session in the view instead of here
        # flush() pushes operations to DB's buffer - DCarv
        self.session.flush()

        # NOTE: Por alguma razão o objeto "session" estando com
        # "autocommit=True" não "commita" no "close" e por isso executamos um
        # "commit" aqui! "autocommit=True" não comita mais de uma operação em
        # sequência? By Questor
        # session_factory: sessionmaker(
            # class_='Session', 
            # autoflush=True, 
            # bind=Engine(postgresql://lbu:***@127.0.0.1/lb), 
            # autocommit=True, 
            # expire_on_commit=True
        # )
        # registry: <sqlalchemy.\
                # util.\
                # _collections.\
                # ThreadLocalRegistry object at 0x4143f90>
        # ! By Questor
        self.session.commit()

        model.HISTORY.create_member(**{
            'id_base': member.id_base, 
            'author': 'Author', 
            'date': datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'), 
            'name': member.name, 
            'structure': utils.json2object(member.struct), 
            'status': 'UPDATED'
        })

        self.lbirestart()

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        return member
Esempio n. 6
0
def submitRunlistData():
    """ Create/edit experimental run list layout. Insert/edit experimental parameters and parameter types. This request must send its parameters as a single JSON encoded string and the `content-type`_ header must be supplied as `application/json`_

    .. _content-type: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
    .. _application/json: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types

    .. :quickref: Create/Edit run list; Create or edit the experimental run list layout

    :param: isNewRunlist : (Boolean) if runlist is new
    :param: clock : (Integer) creation timestamp
    :param: runlistType : (Integer) Run list type (unused)
    :param: runlistRemarks : Object containing global runlist information. Contains the following keys:

            | ``cols`` : List of experimental parameter objects. Experimental parameter objects contain the following keys:

                    | ``name`` : (String) name of the parameter
                    | ``type`` : (Integer) type of parameter

                            | ``0`` : Regular parameter
                            | ``3`` : Run Title identifier (optinal declaration)
                            | ``4`` : Option parameter

                    | ``parType`` : (Integer - optional) Type of regular parameter (only used if ``type=1``)

                            | ``0`` : Number
                            | ``1`` : Text

                    | ``parUnits`` : (Integer - optional) Physical parameter units (only used if ``type=1``)
                    | ``parOptions`` : (String - optional) JSON encoded string of parameter options (only used if ``type=1``)
                    | ``action`` : (String) Action to be preformed on parameter

                            | ``add`` : add parameter
                            | ``edit`` : edit parameter
                            | ``delete`` : delete parameter

    """
    newRunList = False
    processRequest = request.get_json()
    if 'isNewRunlist' in processRequest:
        newRunList = processRequest["isNewRunlist"]
    else:
        newRunList = True
    try:
        Session = sessionmaker(bind=current_user.engineObj)
        session = Session()
        conn = current_user.engineObj.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        result = session.query(
            daqbrokerDatabase.runs).filter_by(clock=session.query(
                func.max(daqbrokerDatabase.runs.clock)).first()[0]).first()
        if not result:
            oldRemarks = {}
        else:
            oldRemarks = json.loads(result.runlistRemarks)
        #dbQuery=text("INSERT INTO runs VALUES(:clock,:clock,0,:linkRemarks,:runlistType,:runlistRemarks)")
        #startTableAlter="ALTER TABLE runlist "
        if newRunList:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                if ((not (int(col["type"]) == 1))
                        and (not (int(col["type"]) == 2))
                        and (not (int(col["type"]) == 3))):
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        newType = daqbrokerDatabase.Float
                    newCol = daqbrokerDatabase.Column(col["name"], newType)
                    op.add_column("runlist", newCol)
                processRequest["runlistRemarks"]["cols"][i][
                    "action"] = "addOld"
        else:
            for i, col in enumerate(processRequest["runlistRemarks"]["cols"]):
                extra = ''
                #print(col)
                if not int(col["type"]) == 3:
                    if i >= len(oldRemarks["cols"]):
                        column = col
                    else:
                        column = oldRemarks["cols"][i]
                    if int(col["type"]) == 4:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 1:
                        newType = daqbrokerDatabase.Text
                    if int(col["type"]) == 0 and int(col["parType"]) == 0:
                        extra = "\"" + column["name"] + "\"::double precision"
                        newType = daqbrokerDatabase.Float
                    if col['action'] == 'add':
                        newCol = daqbrokerDatabase.Column(col["name"], newType)
                        op.add_column("runlist", newCol)
                        processRequest["runlistRemarks"]["cols"][i][
                            "action"] = "addOld"
                    elif col['action'] == 'edit':
                        if col["name"] != column["name"]:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            new_column_name=col["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                        else:
                            op.alter_column("runlist",
                                            oldRemarks["cols"][i]["name"],
                                            type_=newType,
                                            postgresql_using=extra)
                    elif col['action'] == 'delete':
                        op.drop_column("runlist", col["name"])
                    #print("done")
            daqbrokerDatabase.daqbroker_database.metadata.remove(
                daqbrokerDatabase.daqbroker_database.metadata.tables["runlist"]
            )
            daqbrokerDatabase.daqbroker_database.metadata.reflect(
                current_user.engineObj, extend_existing=True)
            processRequest["runlistRemarks"]["cols"] = [
                x for x in processRequest["runlistRemarks"]["cols"]
                if x['action'] != 'delete'
            ]
        newRuns = daqbrokerDatabase.runs(clock=processRequest["clock"],
                                         linkRemarks='',
                                         runlistRemarks=json.dumps(
                                             processRequest["runlistRemarks"]))
        session.add(newRuns)
        session.commit()
        conn.close()
    except Exception as e:
        session.rollback()
        traceback.print_exc()
        raise InvalidUsage(str(e), status_code=500)
    return jsonify('done')
Esempio n. 7
0
def resetParsing():
    """ Edit the existing parsing information of an instrument

    .. :quickref: Edit data source; Edit data source action information

    :param: instid: (integer) unique instrument identifier
    :param: metaid: (integer) unique instrument data source identifier
    :param: sourceResetTime: (integer) number of seconds to reset the parsing information
    :param: operation: (string) operation to be preformed

            | ``remove`` : Remove data source and associated data
            | ``reset`` : Reset processing information
            | ``lockP`` : Toggle lock data storage methods
            | ``lockB`` : Toggle lock file backup methods

    """
    Session = sessionmaker(bind=current_user.engineObj)
    session = Session()
    conn = current_user.engineObj.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    if('instid' in request.form):
        instid = request.form['instid']
    elif('instid' in request.args):
        instid = request.args['instid']
    else:
        raise InvalidUsage('No instrument ID provided', status_code=500)
    if('metaid' in request.form):
        metaid = request.form['metaid']
    elif('metaid' in request.args):
        metaid = request.args['metaid']
    else:

        raise InvalidUsage('No metadata ID provided', status_code=500)
    if('operation' in request.form):
        operation = request.form['operation']
    elif('operation' in request.args):
        operation = request.args['operation']
    else:
        raise InvalidUsage('No operation provided', status_code=500)
    instrument = []
    try:
        theMeta = session.query(daqbrokerDatabase.instmeta).filter_by(metaid=metaid).first()
        if operation == "remove":
            BACKUPPATH = ''
            IMPORTPATH = ''
            ADDONPATH = ''
            context = zmq.Context()
            newPaths = checkPaths(context, BACKUPPATH, IMPORTPATH, ADDONPATH, 15000)
            paths = {"BACKUPPATH": newPaths[0], "IMPORTPATH": newPaths[1], "ADDONPATH": newPaths[2]}
            pathDel = os.path.join(paths["BACKUPPATH"], current_user.server,
                                   current_user.database[7:], theMeta.meta.Name, theMeta.name)
            try:
                shutil.rmtree(pathDel)
            except BaseException:
                traceback.print_exc()
                poop = "poop"
            session.delete(theMeta)
            for channel in theMeta.channels:
                if channel.channeltype == 3:
                    op.drop_column(theMeta.meta.Name + "_custom", channel.Name)
                else:
                    op.drop_column(theMeta.meta.Name + "_data", channel.Name)
            daqbrokerDatabase.daqbroker_database.metadata.remove(
                daqbrokerDatabase.daqbroker_database.metadata.tables[theMeta.meta.Name + "_custom"])
            daqbrokerDatabase.daqbroker_database.metadata.remove(
                daqbrokerDatabase.daqbroker_database.metadata.tables[theMeta.meta.Name + "_data"])
            daqbrokerDatabase.daqbroker_database.metadata.reflect(current_user.engineObj, extend_existing=True)
        elif operation == 'reset':
            if('sourceResetTime' in request.form):
                sourceResetTime = int(request.form['sourceResetTime'])
            elif('sourceResetTime' in request.args):
                sourceResetTime = int(request.args['sourceResetTime'])
            else:
                sourceResetTime = 1000000000000000000000
            if sourceResetTime <= 0:
                sourceResetTime = 1000000000000000000000
            #print(theMeta.metaid, sourceResetTime)
            theParsing = theMeta.parsing
            for parsing in theParsing:
                if parsing.remarks:
                    theFiles = json.loads(parsing.remarks)
                    theFiles = [x for x in theFiles if (time.time() * 1000 - float(x["lastTime"]) >= sourceResetTime)]
                    parsing.remarks = json.dumps(theFiles)
            #print(theFiles)
        elif operation == 'lockP':
            theMeta.parsing.forcelock = not theMeta.parsing.forcelock
        elif operation == 'lockB':
            theMeta.parsing.sentRequest = not theMeta.parsing.lockSync
            theMeta.parsing.lockSync = not theMeta.parsing.lockSync
        else:
            raise InvalidUsage('Wrong operation provided', status_code=500)
        session.commit()
    except Exception as e:
        session.rollback()
        traceback.print_exc()
        raise InvalidUsage('Error : ' + str(e), status_code=500)
    conn.close()
    return jsonify('done')
Esempio n. 8
0
def insertInstrument():
    """ Insert a new instrument or edit an existing instrument on a DAQBroker database. Guest users are not allowed to
    create instruments. Created instruments are

    .. :quickref: Create/Edit instrument; Creates or edits a DAQBroker instrument instrument

    :param: Name : (String) unique instrument name
    :param: instid : (Integer) unique instrument identifier. Used to edit an existing instrument
    :param: description : (String) description of the instrument and its
    :param: email : (String) contact information for the instrument operator
    :param: Files : (Optional) JSON encoded list of instrument data source objects. Each Contains the following keys:

            | ``name`` : (String) name of the data source
            | ``metaid`` : (Integer) unique data source identifier. Only used to edit existing data sources
            | ``type`` : (Integer) type of instrument data source
            | ``node`` : (String) unique network node identifier
            | ``remarks`` : (String) JSON encoded object of extra data source information
            | ``channels`` : (Optional) JSON encoded list of data channel objects. Each contains the following keys:

                    | ``Name`` : (String) data channel name
                    | ``channelid`` : (Integer) unique channel identifier. -1 if the channel is new. Positive integer
                    if the channel already exists
                    | ``description`` : (String) data channel description
                    | ``units`` : (String) data channel physical units
                    | ``channeltype`` : (Integer) type of data channel

                            | ``0`` : Number
                            | ``1`` : Text
                            | ``2`` : Custom

                    | ``active`` : (Boolean) channel is shown on interface
                    | ``fileorder`` : (Integer) Used to order channels in a data source
                    | ``alias`` : (String) Original data channel name. Kept constant when name changes
                    | ``remarks`` : (String) JSON encoded object with extra information
                    | ``oldname`` : (String) Old channel name. Used to detect changes in the channel name
                    | ``channeltypeOld`` : (Integer) Old channel type. Used to detect changes in the channel type

    """
    processRequest = request.get_json()
    Session = sessionmaker(bind=current_user.engineObj)
    session = Session()
    conn = current_user.engineObj.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    try:
        if 'instid' in processRequest:
            newInst = False
            instid = processRequest['instid']
            instrument = session.query(daqbrokerDatabase.instruments).filter_by(instid=instid).first()
        else:
            newInst = True
            maxInst = session.query(func.max(daqbrokerDatabase.instruments.instid)).one_or_none()
            # print(maxInst==None)
            if maxInst[0]:
                maxInstid = maxInst[0]
            else:
                maxInstid = 0
            instid = maxInstid + 1
            instrument = daqbrokerDatabase.instruments(
                Name=processRequest['Name'],
                instid=instid,
                active=False,
                description=processRequest['description'],
                username=current_user.username,
                email=processRequest['email'],
                insttype=0,
                log=None)
        # Now I have an object called "instrument" that I can use to add sources
        # and metadatas and to those metadatas I should be able to add channels.
        for file in processRequest['files']:
            if 'metaid' in file:
                metadata = session.query(daqbrokerDatabase.instmeta).filter_by(metaid=file["metaid"]).first()
                metadata.clock = time.time() * 1000
                metadata.name= file['name']
                metadata.type=file['type']
                metadata.node=file['node']
                metadata.remarks=json.dumps(file['remarks'])
            else:
                maxMeta = session.query(func.max(daqbrokerDatabase.instmeta.metaid)).first()
                if maxMeta[0]:
                    maxMetaid = maxMeta[0]
                else:
                    maxMetaid = 0
                metaid = maxMetaid + 1
                metadata = daqbrokerDatabase.instmeta(
                    clock=time.time() * 1000,
                    name=file['name'],
                    metaid=metaid,
                    type=file["type"],
                    node=file["node"],
                    remarks=json.dumps(
                        file['remarks']),
                    sentRequest=False,
                    lastAction=0,
                    lasterrortime=0,
                    lasterror='',
                    lockSync=False)
                instrument.sources.append(metadata)
            channelid = None
            if 'channels' in file:
                channelsInsert = []
                for channel in file['channels']:
                    if int(channel['channelid']) < 0:  # New channel - have to insert
                        maxChannel = session.query(func.max(daqbrokerDatabase.channels.channelid)).first()
                        if not channelid:
                            if maxChannel[0]:
                                maxChannelid = maxChannel[0]
                            else:
                                maxChannelid = 0
                            channelid = maxChannelid + 1
                        else:
                            channelid = channelid + 1
                        if 'remarks' in channel:
                            if len(channel["remarks"].keys())>0:
                                theRemarks = json.dumps(channel["remarks"])
                            else:
                                theRemarks = json.dumps({})
                        else:
                            theRemarks = json.dumps({})
                        theChannel = daqbrokerDatabase.channels(
                            Name=channel["Name"],
                            channelid=channelid,
                            channeltype=int(
                                channel["channeltype"]),
                            valuetype=0,
                            units=channel['units'],
                            description=channel['description'],
                            active=int(
                                channel['active']) == 1,
                            remarks=theRemarks,
                            lastclock=0,
                            lastValue=None,
                            firstClock=0,
                            fileorder=channel['fileorder'],
                            alias=channel['alias'])
                        metadata.channels.append(theChannel)
                        channelsInsert.append({'name': channel["Name"], 'type': int(channel["channeltype"])})
                        if not newInst:
                            extra = ''
                            if int(channel['channeltype']) == 1:
                                newType = daqbrokerDatabase.Float
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 2:
                                newType = daqbrokerDatabase.Text
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_data", column)
                            elif int(channel['channeltype']) == 3:
                                extra = "\"" + channel["Name"] + "\"::double precision"
                                theType = daqbrokerDatabase.Float
                                column = daqbrokerDatabase.Column(channel["Name"], newType)
                                op.add_column(processRequest['Name'] + "_custom", column)
                    elif not newInst:
                        theChannel = session.query(
                            daqbrokerDatabase.channels).filter_by(
                            channelid=channel['channelid']).first()
                        theChannel.Name = channel["Name"]
                        theChannel.channeltype = int(channel["channeltype"])
                        theChannel.units = channel['units']
                        theChannel.description = channel['description']
                        theChannel.active = int(channel['active']) == 1
                        theChannel.fileorder = channel['fileorder']
                        theChannel.alias = channel['alias']
                        if (not channel['channeltypeOld'] == channel['channeltype']) or (
                                not channel['oldName'] == str(channel['Name'])):
                            if not channel['oldName'] == str(channel['Name']):
                                newName = str(channel['Name'])
                                oldName = channel['oldName']
                            else:
                                oldName = str(channel['Name'])
                                newName = None
                            if not channel['channeltypeOld'] == channel['channeltype']:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 3:
                                    newType = daqbrokerDatabase.Float
                                    extra = "\"" + oldName + "\"::double precision"
                                else:
                                    newType = daqbrokerDatabase.Text
                                    extra = None
                            else:
                               newType = None
                            if not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltype'] == 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    theType = daqbrokerDatabase.Float
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_data", oldName)
                                op.add_column(processRequest['Name'] + "_custom", column)
                            elif not channel['channeltypeOld'] == channel['channeltype'] and channel['channeltypeOld'] != 3:
                                if not newName:
                                    theName = oldName
                                else:
                                    theName = newName
                                if not newType:
                                    if channel['channeltypeOld'] == 1:
                                        theType = daqbrokerDatabase.Float
                                    else:
                                        theType = daqbrokerDatabase.Text
                                else:
                                    theType = newType
                                column = daqbrokerDatabase.Column(theName, theType)
                                op.drop_column(processRequest['Name'] + "_custom", oldName)
                                op.add_column(processRequest['Name'] + "_data", column)
                            else:
                                if channel['channeltype'] == 1 or channel['channeltype'] == 2:
                                    if extra:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data", oldName, new_column_name=newName, type_=newType)
                                else:
                                    if extra=='':
                                        op.alter_column(
                                            processRequest['Name'] + "_custom", oldName, new_column_name=newName, type_=newType)
                                    else:
                                        op.alter_column(
                                            processRequest['Name'] + "_data",
                                            oldName,
                                            new_column_name=newName,
                                            type_=newType,
                                            postgresql_using=extra)
                    elif newInst:
                        raise InvalidUsage("Cannot issue edit channels on new instrument", status_code=401)
        if newInst:
            daqbrokerDatabase.createInstrumentTable(processRequest['Name'], channelsInsert, True)
            session.add(instrument)
            daqbrokerDatabase.daqbroker_database.metadata.create_all(current_user.engineObj)
        session.commit()
        conn.close()
        current_user.updateDB()
        return jsonify('done')
    except Exception as e:
        traceback.print_exc()
        session.rollback()
        # for statement in deleteStatements:
        #	connection.execute(statement)
        raise InvalidUsage(str(e), status_code=500)
Esempio n. 9
0
    def update_member(self, id, data):
        member = self.get_member(id)
        if member is None:
            return None

        # NOTE: BaseContext's init method sets its base to the base
        # struct contained in the request, so we need to reset it here
        # to the base struct that is actually in the database - DCarv

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        # NOTE: Set old base struct as active! By John Doe
        self.set_base(member.struct)

        # NOTE: Check for base content changes! By John Doe
        old_base = json2base(member.struct)

        new_base = json2base(data['struct'])

        # NOTE: List all fields that should be deleted! By John Doe
        del_cols = []
        for old_col_name, old_col in old_base.content.__allstructs__.items():
            if old_col_name not in new_base.content.__allsnames__:
                del_cols.append(old_col)

        # NOTE: If any field will be deleted, delete it from all documents in
        # the base! By John Doe
        if len(del_cols) > 0:
            # NOTE: Create a fake request for DocumentCustomView and
            # DocumentContext! By John Doe

            url = "/%s/doc&$$={\"limit\":null}" % new_base.metadata.name
            for col in del_cols:
                params={
                    'path': "[{\"path\":\"%s\",\"fn\":null,\"mode\":" +\
                            "\"delete\",\"args\":[]}]" % ("/".join(col.path))
                }
                request = DummyRequest(path=url, params=params)
                request.method = 'PUT'
                request.matchdict = {"base": new_base.metadata.name}
                doc_view = DocumentCustomView(DocumentContextFactory(request),
                                              request)
                doc_view.update_collection()

        # NOTE: Check for relation field changes (to ALTER table if needed)!
        # By John Doe
        old_doc_table = get_doc_table(old_base.metadata.name, config.METADATA,
                                      **old_base.relational_fields)

        new_doc_table = get_doc_table(new_base.metadata.name, config.METADATA,
                                      **new_base.relational_fields)

        # NOTE: List relational fields that should be deleted! By John Doe
        del_cols = []
        for old_col in old_doc_table.columns:
            if old_col.name not in new_doc_table.columns:
                del_cols.append(old_col)

        # NOTE: List relational fields that should be added! By John Doe
        new_cols = []
        for new_col in new_doc_table.columns:
            if new_col.name not in old_doc_table.columns:
                # NOTE: Get liblightbase.lbbase.fields object! By John Doe

                field = new_base.relational_fields[new_col.name]
                custom_col = get_custom_column(field)
                new_cols.append(custom_col)

        # NOTE: Create alembic connection and operation object! By John Doe
        db_conn = config.ENGINE.connect()

        alembic_ctx = MigrationContext.configure(db_conn)
        alembic_op = Operations(alembic_ctx)

        # NOTE: Drop columns! By John Doe
        for col in del_cols:
            alembic_op.drop_column(new_doc_table.name, col.name)

        # TODO: New_col cannot be required! By John Doe

        # NOTE: Add columns! By John Doe
        for col in new_cols:
            alembic_op.add_column(new_doc_table.name, col)

        # TODO: Alter columns? By John Doe

        db_conn.close()

        # NOTE: Check for base name change! By John Doe
        if member.name != data['name']:
            old_name = 'lb_doc_%s' % (member.name)
            new_name = 'lb_doc_%s' % (data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %
                                 (old_name, new_name))
            old_name = 'lb_file_%s' % (member.name)
            new_name = 'lb_file_%s' % (data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %
                                 (old_name, new_name))
            old_name = 'lb_doc_%s_id_doc_seq' % (member.name)
            new_name = 'lb_doc_%s_id_doc_seq' % (data['name'])
            self.session.execute('ALTER SEQUENCE %s RENAME TO %s' %
                                 (old_name, new_name))

        # NOTE: This will add any new fields to the base struct! By John Doe
        for name in data:
            setattr(member, name, data[name])

        # NOTE: Now commits and closes session in the view instead of here
        # flush() pushes operations to DB's buffer - DCarv
        self.session.flush()

        # NOTE: Por alguma razão o objeto "session" estando com
        # "autocommit=True" não "commita" no "close" e por isso executamos um
        # "commit" aqui! "autocommit=True" não comita mais de uma operação em
        # sequência? By Questor
        # session_factory: sessionmaker(
        # class_='Session',
        # autoflush=True,
        # bind=Engine(postgresql://lbu:***@127.0.0.1/lb),
        # autocommit=True,
        # expire_on_commit=True
        # )
        # registry: <sqlalchemy.\
        # util.\
        # _collections.\
        # ThreadLocalRegistry object at 0x4143f90>
        # ! By Questor
        self.session.commit()

        model.HISTORY.create_member(
            **{
                'id_base': member.id_base,
                'author': 'Author',
                'date': datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'),
                'name': member.name,
                'structure': utils.json2object(member.struct),
                'status': 'UPDATED'
            })

        self.lbirestart()

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        return member
def downgrade(pyramid_env):
    admin_context = get_admin_context()
    op = Operations(admin_context)
    with admin_context.begin_transaction():
        op.drop_column('WS.WS.SYS_DAV_RES', 'RES_SIZE')
def downgrade(pyramid_env):
    admin_context = get_admin_context()
    op = Operations(admin_context)
    with admin_context.begin_transaction():
        op.drop_column('WS.WS.SYS_DAV_RES', 'RES_SIZE')