Beispiel #1
0
    def test_create_database(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)

        db.execute("DROP TABLE dbversion")
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)

        db.execute("DELETE FROM dbversion")
        db.close()
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)

        db.execute("DROP TABLE rc_pfn")
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)

        db.execute("DROP TABLE rc_pfn")
        db.execute("DROP TABLE workflow")
        db.execute("DROP TABLE master_workflow")
        self.assertRaises(DBAdminError, db_verify, db)
        self.assertRaises(DBAdminError, db_verify, db, "4.3.0")
        db.close()
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        _remove(filename)
Beispiel #2
0
def _get_connection(
    dburi=None,
    cl_properties=None,
    config_properties=None,
    submit_dir=None,
    db_type=None,
    pegasus_version=None,
    schema_check=True,
    create=False,
    force=False,
    print_version=True,
):
    """ Get connection to the database based on the parameters"""
    if dburi:
        return connection.connect(
            dburi,
            pegasus_version=pegasus_version,
            schema_check=schema_check,
            create=create,
            force=force,
            db_type=db_type,
            print_version=print_version,
        )
    elif submit_dir:
        return connection.connect_by_submitdir(
            submit_dir,
            db_type,
            config_properties,
            pegasus_version=pegasus_version,
            schema_check=schema_check,
            create=create,
            force=force,
            cl_properties=cl_properties,
            print_version=print_version,
        )

    elif config_properties or _has_connection_properties(cl_properties):
        return connection.connect_by_properties(
            config_properties,
            db_type,
            cl_properties=cl_properties,
            pegasus_version=pegasus_version,
            schema_check=schema_check,
            create=create,
            force=force,
            print_version=print_version,
        )

    if not db_type:
        dburi = connection._get_master_uri()
        return connection.connect(
            dburi,
            pegasus_version=pegasus_version,
            schema_check=schema_check,
            create=create,
            force=force,
            db_type=db_type,
            print_version=print_version,
        )
    return None
Beispiel #3
0
 def test_create_database(self):
     filename = str(uuid.uuid4())
     _silentremove(filename)
     dburi = "sqlite:///%s" % filename
     
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     
     db.execute("DROP TABLE dbversion")
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     
     db.execute("DELETE FROM dbversion")
     db.close()
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     
     db.execute("DROP TABLE rc_pfn")
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     
     db.execute("DROP TABLE rc_pfn")
     db.execute("DROP TABLE workflow")
     db.execute("DROP TABLE master_workflow")
     self.assertRaises(DBAdminError, db_verify, db)
     self.assertRaises(DBAdminError, db_verify, db, "4.3.0")
     db.close()
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     _remove(filename)
Beispiel #4
0
def test_malformed_db():
    dburi = "sqlite://"
    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.execute("DROP TABLE rc_pfn")
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()
Beispiel #5
0
def _get_connection(dburi=None, config_properties=None, submit_dir=None, db_type=None, pegasus_version=None, schema_check=True, create=False, force=False):
    """ Get connection to the database based on the parameters"""
    if dburi:
        return connection.connect(dburi, pegasus_version=pegasus_version, schema_check=schema_check, create=create, force=force)
    elif config_properties:
        return connection.connect_by_properties(config_properties, db_type, pegasus_version=pegasus_version, schema_check=schema_check, create=create, force=force)
    elif submit_dir:
        return connection.connect_by_submitdir(submit_dir, db_type, config_properties, pegasus_version=pegasus_version, schema_check=schema_check, create=create, force=force)
    
    if not db_type:
        dburi = connection._get_master_uri()
        return connection.connect(dburi, pegasus_version=pegasus_version, schema_check=schema_check, create=create, force=force)
    return None
Beispiel #6
0
    def test_malformed_db(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        db.execute("DROP TABLE rc_pfn")
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)
Beispiel #7
0
 def test_malformed_db(self):
     filename = str(uuid.uuid4())
     _silentremove(filename)
     dburi = "sqlite:///%s" % filename
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     db.execute("DROP TABLE rc_lfn")
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     db.close()
     _remove(filename)
Beispiel #8
0
def test_dbs(input, tmp_path):
    orig_filename = os.path.dirname(os.path.abspath(__file__)) + "/input/" + input
    filename = str(uuid.uuid4())
    shutil.copyfile(orig_filename, filename)
    dburi = "sqlite:///%s" % filename

    db = connection.connect(dburi, create=False, schema_check=False, verbose=False)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()
Beispiel #9
0
    def detach(self, wf_uuid=None):
        "Remove any master db entries for the given root workflow"
        if self.submitdir_exists:
            # Verify that we aren't trying to detach a subworkflow
            if self.is_subworkflow():
                raise SubmitDirException(
                    "Subworkflows cannot be detached independent of the root workflow"
                )

            # Connect to master database
            mdbsession = connection.connect_by_submitdir(
                self.submitdir, connection.DBType.MASTER)
            mdb = MasterDatabase(mdbsession)

            # Check to see if it even exists
            wf = mdb.get_master_workflow(self.wf_uuid)
            if wf is None:
                print "Workflow is not in master DB"
            else:
                # Delete the workflow (this will delete the master_workflowstate entries as well)
                mdb.delete_master_workflow(self.wf_uuid)

            # Update the master db
            mdbsession.commit()
            mdbsession.close()

        else:
            # Connect to master database
            home = expanduser('~')
            mdbsession = connection.connect(
                'sqlite:///%s/.pegasus/workflow.db' % home,
                db_type=connection.DBType.MASTER)
            mdb = MasterDatabase(mdbsession)

            try:
                if wf_uuid is None:
                    wfs = mdb.get_master_workflow_for_submitdir(self.submitdir)
                    if wfs:
                        msg = "Invalid submit dir: %s, Specify --wf-uuid <WF_UUID> to detach\n" % self.submitdir
                        msg += "\tWorkflow UUID, DAX Label, Submit Hostname, Submit Dir.\n"
                        for wf in wfs:
                            msg += '\t%s, %s, %s, %s\n' % (
                                wf.wf_uuid, wf.dax_label, wf.submit_hostname,
                                wf.submit_dir)
                        raise SubmitDirException(msg)

                    else:
                        raise SubmitDirException("Invalid submit dir: %s" %
                                                 self.submitdir)

                else:
                    # Delete
                    mdb.delete_master_workflow(wf_uuid,
                                               submit_dir=self.submitdir)

                    # Update the master db
                    mdbsession.commit()

            finally:
                mdbsession.close()
Beispiel #10
0
def delete_workflow(dburi, wf_uuid):
    "Expunge workflow from workflow database"

    log.info('Expunging %s from workflow database', wf_uuid)

    session = connection.connect(dburi, create=True)
    try:
        query = session.query(Workflow).filter(Workflow.wf_uuid == wf_uuid)
        try:
            wf = query.one()
        except orm.exc.NoResultFound, e:
            log.warn('No workflow found with wf_uuid %s - aborting expunge',
                     wf_uuid)
            return

        # PM-1218 gather list of descendant workflows with wf_uuid
        query = session.query(Workflow).filter(Workflow.root_wf_id == wf.wf_id)
        try:
            desc_wfs = query.all()
            for desc_wf in desc_wfs:
                # delete the files from the rc_lfn explicitly as they are
                # not associated with workflow table
                __delete_workflow_files__(session, desc_wf.wf_uuid,
                                          desc_wf.wf_id)
        except orm.exc.NoResultFound, e:
            log.warn('No workflow found with root wf_id %s - aborting expunge',
                     wf.wf_id)
            return
Beispiel #11
0
 def __init__(self, connString=None, expand_workflow=True):
     self.log = logging.getLogger("%s.%s" % (self.__module__, self.__class__.__name__))
     try:
         self.session = connection.connect(connString)
     except (connection.ConnectionError, DBAdminError), e:
         self.log.exception(e)
         raise StampedeDBNotFoundError
Beispiel #12
0
    def __init__(self, dburi, batch=True, props=None, db_type=None, backup=False, flush_every=1000):
        """Will be overridden by subclasses to take
        parameters specific to their function.
        """
        self.log = logging.getLogger("%s.%s" % (self.__module__, self.__class__.__name__))
        self.dburi = dburi

        # PM-898 all props passed should have pegasus prefix stripped off
        # so they are more like connect_args to be used for database
        connect_args = {}
        for key in props.keyset():
            # we don't pass url in connect args
            if key != "url":
                connect_args[key] = props.property(key)

        # make sure timeout is an int
        if "timeout" in connect_args:
            connect_args["timeout"] = int(connect_args["timeout"])

        #self.session = connection.connect(dburi, create=True, props=props, db_type=db_type, backup=backup)
        self.session = connection.connect(dburi, create=True, connect_args=connect_args, db_type=db_type, backup=backup)

        # flags and state for batching
        self._batch = batch
        self._flush_every = flush_every
        self._flush_count = 0
        self._last_flush = time.time()
Beispiel #13
0
    def __init__(self,
                 conn_string=None,
                 wf_id=None,
                 wf_uuid=None,
                 debug=False):
        self._dbg = debug

        if conn_string is None:
            raise ValueError('Connection string is required')

        try:
            self.session = connection.connect(conn_string)
        except connection.ConnectionError as e:
            log.exception(e)
            message = e

            while isinstance(message, Exception):
                message = message.message

            if 'attempt to write a readonly database' in message:
                raise DBAdminError(message)

            raise StampedeDBNotFoundError

        self.initialize(wf_id, wf_uuid)
Beispiel #14
0
def delete_workflow(dburi, wf_uuid):
    "Expunge workflow from workflow database"

    log.info('Expunging %s from workflow database', wf_uuid)

    session = connection.connect(dburi, create=True)
    try:
        query = session.query(Workflow).filter(Workflow.wf_uuid == wf_uuid)
        try:
            wf = query.one()
        except orm.exc.NoResultFound as e:
            log.warn('No workflow found with wf_uuid %s - aborting expunge', wf_uuid)
            return

        # PM-1218 gather list of descendant workflows with wf_uuid
        query = session.query(Workflow).filter(Workflow.root_wf_id == wf.wf_id )
        try:
            desc_wfs = query.all()
            for desc_wf in desc_wfs:
                # delete the files from the rc_lfn explicitly as they are
                # not associated with workflow table
                __delete_workflow_files__( session, desc_wf.wf_uuid, desc_wf.wf_id )
        except orm.exc.NoResultFound as e:
            log.warn('No workflow found with root wf_id %s - aborting expunge', wf.wf_id)
            return
        
        session.delete(wf)

        log.info('Flushing top-level workflow: %s', wf.wf_uuid)
        i = time.time()
        session.flush()
        session.commit()
        log.info('Flush took: %f seconds', time.time() - i)
    finally:
        session.close()
Beispiel #15
0
 def test_connection_by_uri(self):
     filename = str(uuid.uuid4())
     _silentremove(filename)
     dburi = "sqlite:///%s" % filename
     db = connection.connect(dburi, echo=False, schema_check=True, create=True)
     db.close()
     _remove(filename)
Beispiel #16
0
    def test_dbs(self):
        dbs = ["test-01.db", "test-02.db"]

        for db in dbs:
            orig_filename = os.path.dirname(os.path.abspath(__file__)) + "/input/" + db
            filename = str(uuid.uuid4())
            shutil.copyfile(orig_filename, filename)
            dburi = "sqlite:///%s" % filename
            
            db = connection.connect(dburi, create=False, schema_check=False, verbose=False)
            self.assertRaises(DBAdminError, db_verify, db)
            db.close()
            
            db = connection.connect(dburi, create=True, verbose=False)
            self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
            db.close()
            _remove(filename)
Beispiel #17
0
    def test_dbs(self):
        dbs = ["test-01.db", "test-02.db"]

        for db in dbs:
            orig_filename = os.path.dirname(os.path.abspath(__file__)) + "/input/" + db
            filename = str(uuid.uuid4())
            shutil.copyfile(orig_filename, filename)
            dburi = "sqlite:///%s" % filename
            
            db = connection.connect(dburi, create=False, schema_check=False)
            self.assertRaises(DBAdminError, db_verify, db)
            db.close()
            
            db = connection.connect(dburi, create=True)
            self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
            db.close()
            _remove(filename)
Beispiel #18
0
 def __init__(self, connString=None, expand_workflow=True):
     self.log = logging.getLogger(
         "%s.%s" % (self.__module__, self.__class__.__name__))
     try:
         self.session = connection.connect(connString)
     except (connection.ConnectionError, DBAdminError), e:
         self.log.exception(e)
         raise StampedeDBNotFoundError
Beispiel #19
0
def test_version_operations():
    dburi = "sqlite://"
    db = connection.connect(dburi, create=True, verbose=False)

    db_downgrade(db, pegasus_version="4.7.0", verbose=False)
    assert get_version(db) == 8
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    RCLFN.__table__._set_parent(metadata)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()

    dburi2 = "sqlite://"
    db2 = connection.connect(dburi2, create=True, verbose=False)
    db2.close()
Beispiel #20
0
 def test_connection_by_uri(self):
     filename = str(uuid.uuid4())
     _silentremove(filename)
     dburi = "sqlite:///%s" % filename
     db = connection.connect(dburi,
                             echo=False,
                             schema_check=True,
                             create=True)
     db.close()
     _remove(filename)
Beispiel #21
0
 def loop_forever(self):
     while True:
         u = user.get_user_by_uid(os.getuid())
         session = connection.connect(u.get_master_db_url())
         try:
             dao = Ensembles(session)
             self.loop_once(dao)
         finally:
             session.close()
         time.sleep(self.interval)
Beispiel #22
0
 def loop_forever(self):
     while True:
         u = user.get_user_by_uid(os.getuid())
         session = connection.connect(u.get_master_db_url())
         try:
             dao = Ensembles(session)
             self.loop_once(dao)
         finally:
             session.close()
         time.sleep(self.interval)
Beispiel #23
0
    def test_version_operations(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True)

        db_downgrade(db, pegasus_version="4.5.0")
        self.assertEquals(db_current_version(db), 4)
        self.assertRaises(DBAdminError, db_verify, db)
        rc_lfn._set_parent(metadata)
        db.close()

        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)

        fn = str(uuid.uuid4())
        _silentremove(fn)
        dburi2 = "sqlite:///%s" % fn
        db2 = connection.connect(dburi2, create=True)
Beispiel #24
0
    def detach(self, wf_uuid=None):
        "Remove any master db entries for the given root workflow"
        if self.submitdir_exists:
            # Verify that we aren't trying to detach a subworkflow
            if self.is_subworkflow():
                raise SubmitDirException("Subworkflows cannot be detached independent of the root workflow")

            # Connect to master database
            mdbsession = connection.connect_by_submitdir(self.submitdir, connection.DBType.MASTER)
            mdb = MasterDatabase(mdbsession)

            # Check to see if it even exists
            wf = mdb.get_master_workflow(self.wf_uuid)
            if wf is None:
                print "Workflow is not in master DB"
            else:
                # Delete the workflow (this will delete the master_workflowstate entries as well)
                mdb.delete_master_workflow(self.wf_uuid)

            # Update the master db
            mdbsession.commit()
            mdbsession.close()

        else:
            # Connect to master database
            home = expanduser('~')
            mdbsession = connection.connect('sqlite:///%s/.pegasus/workflow.db' % home,
                                            db_type=connection.DBType.MASTER)
            mdb = MasterDatabase(mdbsession)

            try:
                if wf_uuid is None:
                    wfs = mdb.get_master_workflow_for_submitdir(self.submitdir)
                    if wfs:
                        msg = "Invalid submit dir: %s, Specify --wf-uuid <WF_UUID> to detach\n" % self.submitdir
                        msg += "\tWorkflow UUID, DAX Label, Submit Hostname, Submit Dir.\n"
                        for wf in wfs:
                            msg += '\t%s, %s, %s, %s\n' % (wf.wf_uuid, wf.dax_label, wf.submit_hostname, wf.submit_dir)
                        raise SubmitDirException(msg)

                    else:
                        raise SubmitDirException("Invalid submit dir: %s" % self.submitdir)

                else:
                    # Delete
                    mdb.delete_master_workflow(wf_uuid, submit_dir=self.submitdir)

                    # Update the master db
                    mdbsession.commit()

            finally:
                mdbsession.close()
Beispiel #25
0
    def __init__(self, dburi, batch=True, props=None, db_type=None, flush_every=1000):
        """Will be overridden by subclasses to take
        parameters specific to their function.
        """
        self.log = logging.getLogger("%s.%s" % (self.__module__, self.__class__.__name__))
        self.dburi = dburi
        self.session = connection.connect(dburi, create=True, props=props, db_type=db_type)

        # flags and state for batching
        self._batch = batch
        self._flush_every = flush_every
        self._flush_count = 0
        self._last_flush = time.time()
Beispiel #26
0
    def test_version_operations(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)

        db_downgrade(db, pegasus_version="4.5.0", verbose=False)
        self.assertEqual(db_current_version(db), 4)
        self.assertRaises(DBAdminError, db_verify, db)
        rc_lfn._set_parent(metadata)
        db.close()

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)

        fn = str(uuid.uuid4())
        _silentremove(fn)
        dburi2 = "sqlite:///%s" % fn
        db2 = connection.connect(dburi2, create=True, verbose=False)
        _remove(fn)
Beispiel #27
0
    def test_jdbc_sqlite(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "jdbc:sqlite:%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)

        filename = "/tmp/" + str(uuid.uuid4())
        _silentremove(filename)
        dburi = "jdbc:sqlite:%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)

        dburi = "jdbc:sqlite:/%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)
Beispiel #28
0
def test_create_database():
    dburi = "sqlite://"

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION

    db.execute("DROP TABLE dbversion")
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION

    db.execute("DELETE FROM dbversion")
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION

    db.execute("DROP TABLE rc_pfn")
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION

    db.execute("DROP TABLE rc_pfn")
    db.execute("DROP TABLE master_workflow")
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True), "4.3.0"
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()
Beispiel #29
0
    def __init__(self, dburi, batch=True, props=None, db_type=None, backup=False, flush_every=1000):
        """Will be overridden by subclasses to take
        parameters specific to their function.
        """
        self.log = logging.getLogger("%s.%s" % (self.__module__, self.__class__.__name__))
        self.dburi = dburi
        self.session = connection.connect(dburi, create=True, props=props, db_type=db_type, backup=backup)

        # flags and state for batching
        self._batch = batch
        self._flush_every = flush_every
        self._flush_count = 0
        self._last_flush = time.time()
Beispiel #30
0
    def test_jdbc_sqlite(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "jdbc:sqlite:%s" % filename
        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)
        
        filename = "/tmp/" + str(uuid.uuid4())
        _silentremove(filename)
        dburi = "jdbc:sqlite:%s" % filename
        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)

        dburi = "jdbc:sqlite:/%s" % filename
        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        db.close()
        _remove(filename)
Beispiel #31
0
    def test_version_operations(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True)

        # db_downgrade(db, "4.4.2")
        # self.assertEquals(db_current_version(db), 2)
        # self.assertRaises(DBAdminError, db_verify, db)
        #
        # db_downgrade(db)
        # self.assertEquals(db_current_version(db), 1)
        # self.assertRaises(DBAdminError, db_verify, db)
        # db.close()
        #
        # db = connection.connect(dburi, create=True, pegasus_version="4.4.0")
        # self.assertEquals(db_current_version(db), 2)
        # self.assertRaises(DBAdminError, db_verify, db)
        # db.close()
        
        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        self.assertTrue(db_verify(db))
        _remove(filename)
Beispiel #32
0
    def test_version_operations(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True)

        # db_downgrade(db, "4.4.2")
        # self.assertEquals(db_current_version(db), 2)
        # self.assertRaises(DBAdminError, db_verify, db)
        #
        # db_downgrade(db)
        # self.assertEquals(db_current_version(db), 1)
        # self.assertRaises(DBAdminError, db_verify, db)
        # db.close()
        #
        # db = connection.connect(dburi, create=True, pegasus_version="4.4.0")
        # self.assertEquals(db_current_version(db), 2)
        # self.assertRaises(DBAdminError, db_verify, db)
        # db.close()

        db = connection.connect(dburi, create=True)
        self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
        self.assertTrue(db_verify(db))
        _remove(filename)
Beispiel #33
0
    def delete(self):
        "Delete this submit dir and its entry in the master db"

        # Verify that we aren't trying to move a subworkflow
        if self.is_subworkflow():
            raise SubmitDirException(
                "Subworkflows cannot be deleted independent of the root workflow"
            )

        # Confirm that they want to delete the workflow
        while True:
            try:
                input = raw_input
            except NameError:
                pass
            answer = (input(
                "Are you sure you want to delete this workflow? This operation cannot be undone. [y/n]: "
            ).strip().lower())
            if answer == "y":
                break
            if answer == "n":
                return

        # Connect to master database
        mdbsession = connection.connect_by_submitdir(self.submitdir,
                                                     connection.DBType.MASTER)
        mdb = MasterDatabase(mdbsession)

        # Delete all of the records from the workflow db if they are not using
        # an sqlite db that is in the submit dir.
        db_url = connection.url_by_submitdir(self.submitdir,
                                             connection.DBType.WORKFLOW)
        if self.submitdir not in db_url:
            dbsession = connection.connect(db_url)
            db = WorkflowDatabase(dbsession)
            db.delete_workflow(self.wf_uuid)
            dbsession.commit()
            dbsession.close()

        # Delete the workflow
        mdb.delete_master_workflow(self.wf_uuid)

        # Remove all the files
        shutil.rmtree(self.submitdir)

        # Update master db
        mdbsession.commit()
        mdbsession.close()
Beispiel #34
0
    def test_partial_database(self):
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(
            dburi, schema_check=False, create=False, verbose=False
        )
        rc_sequences.create(db.get_bind(), checkfirst=True)
        rc_lfn.create(db.get_bind(), checkfirst=True)
        rc_pfn.create(db.get_bind(), checkfirst=True)
        rc_meta.create(db.get_bind(), checkfirst=True)
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        _remove(filename)

        db = connection.connect(
            dburi, schema_check=False, create=False, verbose=False
        )
        pg_workflow.create(db.get_bind(), checkfirst=True)
        pg_workflowstate.create(db.get_bind(), checkfirst=True)
        pg_ensemble.create(db.get_bind(), checkfirst=True)
        pg_ensemble_workflow.create(db.get_bind(), checkfirst=True)
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        _remove(filename)

        db = connection.connect(
            dburi, schema_check=False, create=False, verbose=False
        )
        st_workflow.create(db.get_bind(), checkfirst=True)
        st_workflowstate.create(db.get_bind(), checkfirst=True)
        st_host.create(db.get_bind(), checkfirst=True)
        st_job.create(db.get_bind(), checkfirst=True)
        st_job_edge.create(db.get_bind(), checkfirst=True)
        st_job_instance.create(db.get_bind(), checkfirst=True)
        st_jobstate.create(db.get_bind(), checkfirst=True)
        st_task.create(db.get_bind(), checkfirst=True)
        st_task_edge.create(db.get_bind(), checkfirst=True)
        st_invocation.create(db.get_bind(), checkfirst=True)
        self.assertRaises(DBAdminError, db_verify, db)
        db.close()

        db = connection.connect(dburi, create=True, verbose=False)
        self.assertEqual(db_current_version(db), CURRENT_DB_VERSION)
        _remove(filename)
Beispiel #35
0
    def test_upper_version(self):
        """
        Test whether DBs created with newer Pegasus version raises an exception.
        """
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True)
        dbversion = DBVersion()
        dbversion.version = CURRENT_DB_VERSION + 1
        dbversion.version_number = CURRENT_DB_VERSION + 1
        dbversion.version_timestamp = datetime.datetime.now().strftime("%s")
        db.add(dbversion)
        db.commit()

        self.assertRaises(DBAdminError, db_current_version, db)
        self.assertRaises(DBAdminError, db_verify, db)

        _remove(filename)
Beispiel #36
0
    def __init__(self, conn_string, debug=False):
        self._dbg = debug

        if conn_string is None:
            raise ValueError("Connection string is required")

        try:
            self.session = connection.connect(conn_string)
        except connection.ConnectionError as e:
            log.exception(e)
            message = e

            while isinstance(message, Exception):
                message = message.message

            if "attempt to write a readonly database" in message:
                raise DBAdminError(message)

            raise MasterDBNotFoundError(e)
Beispiel #37
0
    def test_upper_version(self):
        """
        Test whether DBs created with newer Pegasus version raises an exception.
        """
        filename = str(uuid.uuid4())
        _silentremove(filename)
        dburi = "sqlite:///%s" % filename
        db = connection.connect(dburi, create=True, verbose=False)
        dbversion = DBVersion()
        dbversion.version = CURRENT_DB_VERSION + 1
        dbversion.version_number = CURRENT_DB_VERSION + 1
        dbversion.version_timestamp = datetime.datetime.now().strftime("%s")
        db.add(dbversion)
        db.commit()

        self.assertRaises(DBAdminError, db_current_version, db)
        self.assertRaises(DBAdminError, db_verify, db)

        _remove(filename)
    def __init__(self, connString=None, expand_workflow=True):
        self.log = logging.getLogger("%s.%s" % (self.__module__, self.__class__.__name__))
        try:
            self.session = connection.connect(connString)
        except (connection.ConnectionError, DBAdminError) as e:
            self.log.exception(e)
            raise StampedeDBNotFoundError

        self._expand = expand_workflow

        self._root_wf_id = []
        self._root_wf_uuid = []
        self.all_workflows = None
        self._job_filter_mode = None
        self._time_filter_mode = None
        self._host_filter = None
        self._xform_filter = {'include':None, 'exclude':None}

        self._wfs = []
Beispiel #39
0
    def __init__(self, conn_string, debug=False):
        self._dbg = debug

        if conn_string is None:
            raise ValueError('Connection string is required')

        try:
            self.session = connection.connect(conn_string)
        except connection.ConnectionError as e:
            log.exception(e)
            message = e

            while isinstance(message, Exception):
                message = message.message

            if 'attempt to write a readonly database' in message:
                raise DBAdminError(message)

            raise MasterDBNotFoundError(e)
Beispiel #40
0
    def __init__(
        self,
        dburi,
        batch=True,
        props=None,
        db_type=None,
        backup=False,
        flush_every=1000,
    ):
        """Will be overridden by subclasses to take
        parameters specific to their function.
        """
        self.log = logging.getLogger("{}.{}".format(self.__module__,
                                                    self.__class__.__name__))
        self.dburi = dburi

        # PM-898 all props passed should have pegasus prefix stripped off
        # so they are more like connect_args to be used for database
        connect_args = {}
        for key in props.keyset():
            # we don't pass url in connect args
            if key != "url":
                connect_args[key] = props.property(key)

        # make sure timeout is an int
        if "timeout" in connect_args:
            connect_args["timeout"] = int(connect_args["timeout"])

        # self.session = connection.connect(dburi, create=True, props=props, db_type=db_type, backup=backup)
        self.session = connection.connect(
            dburi,
            create=True,
            connect_args=connect_args,
            db_type=db_type,
            backup=backup,
        )

        # flags and state for batching
        self._batch = batch
        self._flush_every = flush_every
        self._flush_count = 0
        self._last_flush = time.time()
Beispiel #41
0
    def delete(self):
        "Delete this submit dir and its entry in the master db"

        # Verify that we aren't trying to move a subworkflow
        if self.is_subworkflow():
            raise SubmitDirException("Subworkflows cannot be deleted independent of the root workflow")

        # Confirm that they want to delete the workflow
        while True:
            try:
                input = raw_input
            except NameError:
                pass
            answer = input("Are you sure you want to delete this workflow? This operation cannot be undone. [y/n]: ").strip().lower()
            if answer == "y":
                break
            if answer == "n":
                return

        # Connect to master database
        mdbsession = connection.connect_by_submitdir(self.submitdir, connection.DBType.MASTER)
        mdb = MasterDatabase(mdbsession)

        # Delete all of the records from the workflow db if they are not using
        # an sqlite db that is in the submit dir.
        db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
        if self.submitdir not in db_url:
            dbsession = connection.connect(db_url)
            db = WorkflowDatabase(dbsession)
            db.delete_workflow(self.wf_uuid)
            dbsession.commit()
            dbsession.close()

        # Delete the workflow
        mdb.delete_master_workflow(self.wf_uuid)

        # Remove all the files
        shutil.rmtree(self.submitdir)

        # Update master db
        mdbsession.commit()
        mdbsession.close()
Beispiel #42
0
def delete_dashboard_workflow(dburi, wf_uuid):
    "Expunge workflow from dashboard database"

    log.info('Expunging %s from dashboard database', wf_uuid)

    session = connection.connect(dburi, create=True)
    try:
        query = session.query(DashboardWorkflow).filter(DashboardWorkflow.wf_uuid == wf_uuid)
        try:
            wf = query.one()
        except orm.exc.NoResultFound, e:
            log.warn('No workflow found with wf_uuid %s - aborting expunge', wf_uuid)
            return

        session.delete(wf)

        i = time.time()
        session.flush()
        session.commit()
        log.info('Flush took: %f seconds', time.time() - i)
Beispiel #43
0
def delete_dashboard_workflow(dburi, wf_uuid):
    "Expunge workflow from dashboard database"

    log.info("Expunging %s from dashboard database", wf_uuid)

    session = connection.connect(dburi, create=True)
    try:
        query = session.query(DashboardWorkflow).filter(DashboardWorkflow.wf_uuid == wf_uuid)
        try:
            wf = query.one()
        except orm.exc.NoResultFound, e:
            log.warn("No workflow found with wf_uuid %s - aborting expunge", wf_uuid)
            return

        session.delete(wf)

        i = time.time()
        session.flush()
        session.commit()
        log.info("Flush took: %f seconds", time.time() - i)
Beispiel #44
0
def test_upper_version():
    """
    Test whether DBs created with newer Pegasus version raises an exception.
    """
    dburi = "sqlite://"
    db = connection.connect(dburi, create=True, verbose=False)
    dbversion = DBVersion()
    dbversion.version = CURRENT_DB_VERSION + 1
    dbversion.version_number = CURRENT_DB_VERSION + 1
    dbversion.version_timestamp = (
        datetime.datetime.now() + datetime.timedelta(seconds=3)
    ).strftime("%s")
    db.add(dbversion)
    db.commit()

    with pytest.raises(DBAdminError):
        get_version(db)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)

    db.close()
Beispiel #45
0
 def test_partial_database(self):
     filename = str(uuid.uuid4())
     _silentremove(filename)
     dburi = "sqlite:///%s" % filename
     db = connection.connect(dburi, schema_check=False, create=False)
     rc_sequences.create(db.get_bind(), checkfirst=True)
     rc_lfn.create(db.get_bind(), checkfirst=True)
     rc_attr.create(db.get_bind(), checkfirst=True)
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     self.assertTrue(db_verify(db))
     _remove(filename)
     
     db = connection.connect(dburi, schema_check=False, create=False)
     pg_workflow.create(db.get_bind(), checkfirst=True)
     pg_workflowstate.create(db.get_bind(), checkfirst=True)
     pg_ensemble.create(db.get_bind(), checkfirst=True)
     pg_ensemble_workflow.create(db.get_bind(), checkfirst=True)
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     self.assertTrue(db_verify(db))
     _remove(filename)
     
     db = connection.connect(dburi, schema_check=False, create=False)
     st_workflow.create(db.get_bind(), checkfirst=True)
     st_workflowstate.create(db.get_bind(), checkfirst=True)
     st_host.create(db.get_bind(), checkfirst=True)
     st_job.create(db.get_bind(), checkfirst=True)
     st_job_edge.create(db.get_bind(), checkfirst=True)
     st_job_instance.create(db.get_bind(), checkfirst=True)
     st_jobstate.create(db.get_bind(), checkfirst=True)
     st_task.create(db.get_bind(), checkfirst=True)
     st_task_edge.create(db.get_bind(), checkfirst=True)
     st_invocation.create(db.get_bind(), checkfirst=True)
     st_file.create(db.get_bind(), checkfirst=True)
     self.assertRaises(DBAdminError, db_verify, db)
     db.close()
     
     db = connection.connect(dburi, create=True)
     self.assertEquals(db_current_version(db), CURRENT_DB_VERSION)
     self.assertTrue(db_verify(db))
     _remove(filename)
Beispiel #46
0
def delete_workflow(dburi, wf_uuid):
    "Expunge workflow from workflow database"

    log.info('Expunging %s from workflow database', wf_uuid)

    session = connection.connect(dburi, create=True)
    try:
        query = session.query(Workflow).filter(Workflow.wf_uuid == wf_uuid)
        try:
            wf = query.one()
        except orm.exc.NoResultFound as e:
            log.warn('No workflow found with wf_uuid %s - aborting expunge',
                     wf_uuid)
            return

        # PM-1218 gather list of descendant workflows with wf_uuid
        query = session.query(Workflow).filter(Workflow.root_wf_id == wf.wf_id)
        try:
            desc_wfs = query.all()
            for desc_wf in desc_wfs:
                # delete the files from the rc_lfn explicitly as they are
                # not associated with workflow table
                __delete_workflow_files__(session, desc_wf.wf_uuid,
                                          desc_wf.wf_id)
        except orm.exc.NoResultFound as e:
            log.warn('No workflow found with root wf_id %s - aborting expunge',
                     wf.wf_id)
            return

        session.delete(wf)

        log.info('Flushing top-level workflow: %s', wf.wf_uuid)
        i = time.time()
        session.flush()
        session.commit()
        log.info('Flush took: %f seconds', time.time() - i)
    finally:
        session.close()
Beispiel #47
0
def test_partial_database():
    dburi = "sqlite://"
    db = connection.connect(dburi, schema_check=False, create=False, verbose=False)
    RCLFN.__table__.create(db.get_bind(), checkfirst=True)
    RCPFN.__table__.create(db.get_bind(), checkfirst=True)
    RCMeta.__table__.create(db.get_bind(), checkfirst=True)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()

    db = connection.connect(dburi, schema_check=False, create=False, verbose=False)
    Workflow.__table__.create(db.get_bind(), checkfirst=True)
    DashboardWorkflowstate.__table__.create(db.get_bind(), checkfirst=True)
    Ensemble.__table__.create(db.get_bind(), checkfirst=True)
    EnsembleWorkflow.__table__.create(db.get_bind(), checkfirst=True)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()

    db = connection.connect(dburi, schema_check=False, create=False, verbose=False)
    Workflow.__table__.create(db.get_bind(), checkfirst=True)
    Workflowstate.__table__.create(db.get_bind(), checkfirst=True)
    Host.__table__.create(db.get_bind(), checkfirst=True)
    Job.__table__.create(db.get_bind(), checkfirst=True)
    JobEdge.__table__.create(db.get_bind(), checkfirst=True)
    JobInstance.__table__.create(db.get_bind(), checkfirst=True)
    Jobstate.__table__.create(db.get_bind(), checkfirst=True)
    Task.__table__.create(db.get_bind(), checkfirst=True)
    TaskEdge.__table__.create(db.get_bind(), checkfirst=True)
    Invocation.__table__.create(db.get_bind(), checkfirst=True)
    with pytest.raises(DBAdminError):
        db_verify(db, check=True)
    db.close()

    db = connection.connect(dburi, create=True, verbose=False)
    assert get_version(db) == CURRENT_DB_VERSION
    db.close()
Beispiel #48
0
    def attach(self):
        "Add a workflow to the master db"

        # Verify that we aren't trying to attach a subworkflow
        if self.is_subworkflow():
            raise SubmitDirException("Subworkflows cannot be attached independent of the root workflow")

        # Connect to master database
        mdbsession = connection.connect_by_submitdir(self.submitdir, connection.DBType.MASTER)
        mdb = MasterDatabase(mdbsession)

        # Check to see if it already exists and just update it
        wf = mdb.get_master_workflow(self.wf_uuid)
        if wf is not None:
            print "Workflow is already in master db"
            old_submit_dir = wf.submit_dir
            if old_submit_dir != self.submitdir:
                print "Updating path..."
                wf.submit_dir = self.submitdir
                wf.db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
                mdbsession.commit()
            mdbsession.close()
            return

        # Connect to workflow db
        db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
        dbsession = connection.connect(db_url)
        db = WorkflowDatabase(dbsession)

        # Get workflow record
        wf = db.get_workflow(self.wf_uuid)
        if wf is None:
            print "No database record for that workflow exists"
            return

        # Update the workflow record
        wf.submit_dir = self.submitdir
        wf.db_url = db_url

        # Insert workflow record into master db
        mwf = DashboardWorkflow()
        mwf.wf_uuid = wf.wf_uuid
        mwf.dax_label = wf.dax_label
        mwf.dax_version = wf.dax_version
        mwf.dax_file = wf.dax_file
        mwf.dag_file_name = wf.dag_file_name
        mwf.timestamp = wf.timestamp
        mwf.submit_hostname = wf.submit_hostname
        mwf.submit_dir = self.submitdir
        mwf.planner_arguments = wf.planner_arguments
        mwf.user = wf.user
        mwf.grid_dn = wf.grid_dn
        mwf.planner_version = wf.planner_version
        mwf.db_url = wf.db_url
        mwf.archived = self.is_archived()
        mdbsession.add(mwf)
        mdbsession.flush() # We should have the new wf_id after this

        # Query states from workflow database
        states = db.get_workflow_states(wf.wf_id)

        # Insert states into master db
        for s in states:
            ms = DashboardWorkflowstate()
            ms.wf_id = mwf.wf_id
            ms.state = s.state
            ms.timestamp = s.timestamp
            ms.restart_count = s.restart_count
            ms.status = s.status
            mdbsession.add(ms)
        mdbsession.flush()

        dbsession.commit()
        dbsession.close()

        mdbsession.commit()
        mdbsession.close()
Beispiel #49
0
    def move(self, dest):
        "Move this submit directory to dest"

        dest = os.path.abspath(dest)

        if os.path.isfile(dest):
            raise SubmitDirException("Destination is a file: %s" % dest)

        if os.path.isdir(dest):
            if os.path.exists(os.path.join(dest, "braindump.txt")):
                raise SubmitDirException("Destination is a submit dir: %s" % dest)
            dest = os.path.join(dest, os.path.basename(self.submitdir))

        # Verify that we aren't trying to move a subworkflow
        if self.is_subworkflow():
            raise SubmitDirException("Subworkflows cannot be moved independent of the root workflow")

        # Connect to master database
        mdbsession = connection.connect_by_submitdir(self.submitdir, connection.DBType.MASTER)
        mdb = MasterDatabase(mdbsession)

        # Get the workflow record from the master db
        db_url = None
        wf = mdb.get_master_workflow(self.wf_uuid)
        if wf is None:
            db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
        else:
            # We found an mdb record, so we need to update it

            # Save the master db's pointer
            db_url = wf.db_url

            # Update the master db's db_url
            # Note that this will only update the URL if it is an sqlite file
            # located in the submitdir
            log.info("Old master db_url: %s" % wf.db_url)
            wf.db_url = db_url.replace(self.submitdir, dest)
            log.info("New master db_url: %s" % wf.db_url)

            # Change the master db's submit_dir
            log.info("Old master submit_dir: %s" % wf.submit_dir)
            wf.submit_dir = dest
            log.info("New master submit_dir: %s" % wf.submit_dir)

        # Update the ensemble record if one exists
        ew = mdb.get_ensemble_workflow(self.wf_uuid)
        if ew is not None:
            log.info("Old ensemble submit dir: %s", ew.submitdir)
            ew.submitdir = dest
            log.info("New ensemble submit dir: %s", ew.submitdir)

        # Update the workflow database if we found one
        if db_url is not None:
            dbsession = connection.connect(db_url)
            db = WorkflowDatabase(dbsession)
            root_wf = db.get_workflow(self.wf_uuid)
            db.update_submit_dirs(root_wf.wf_id, self.submitdir, dest)
            dbsession.commit()
            dbsession.close()

        # Move all the files
        shutil.move(self.submitdir, dest)

        # Set new paths in the braindump file
        self.braindump["submit_dir"] = dest
        self.braindump["basedir"] = os.path.dirname(dest)
        utils.write_braindump(os.path.join(dest, "braindump.txt"), self.braindump)

        # Note that we do not need to update the properties file even though it
        # might contain DB URLs because it cannot contain a DB URL with the submit
        # dir in it.

        # TODO We might want to update all of the absolute paths in the condor submit files
        # if we plan on moving workflows that could be resubmitted in the future

        # TODO We might want to update the braindump files for subworkflows

        # Update master database
        mdbsession.commit()
        mdbsession.close()

        # Finally, update object
        self.submitdir = dest
Beispiel #50
0
def all_workflows_db(db, update=True, pegasus_version=None, schema_check=True, force=False):
    """
    Update/Downgrade all completed workflow databases listed in master_workflow table.
    :param db: DB session object
    :param pegasus_version: version of the Pegasus software (e.g., 4.6.0)
    :param schema_check: whether a sanity check of the schema should be performed
    :param force: whether operations should be performed despite conflicts
    """
    # log files
    file_prefix = "%s-dbadmin" % time.strftime("%Y%m%dT%H%M%S")
    f_out = open("%s.out" % file_prefix, "w")
    f_err = open("%s.err" % file_prefix, "w")

    data = (
        db.query(DashboardWorkflow.db_url, DashboardWorkflowstate.state, func.max(DashboardWorkflowstate.timestamp))
        .join(DashboardWorkflowstate)
        .group_by(DashboardWorkflow.wf_id)
        .all()
    )

    db_urls = []
    for d in data:
        if d[1] == "WORKFLOW_TERMINATED":
            db_urls.append(d[0])
            f_err.write("[ACTIVE] %s\n" % d[0])

    counts = {
        "total": len(data),
        "running": len(data) - len(db_urls),
        "success": 0,
        "failed": 0,
        "unable_to_connect": 0,
    }
    if update:
        msg = ["updating", "Updated"]
    else:
        msg = ["downgrading", "Downgraded"]

    print ""
    print "Verifying and %s workflow databases:" % msg[0]
    i = counts["running"]
    for dburi in db_urls:
        log.debug("%s '%s'..." % (msg[0], dburi))
        i += 1
        sys.stdout.write("\r%d/%d" % (i, counts["total"]))
        sys.stdout.flush()
        try:
            if update:
                con = connection.connect(
                    dburi,
                    pegasus_version=pegasus_version,
                    schema_check=schema_check,
                    create=True,
                    force=force,
                    verbose=False,
                )
            else:
                con = connection.connect(dburi, schema_check=schema_check, create=False, verbose=False)
                metadata.clear()
                warnings.simplefilter("ignore")
                metadata.reflect(bind=con.get_bind())
                db_downgrade(con, pegasus_version=pegasus_version, force=force, verbose=False)
            con.close()
            f_out.write("[SUCCESS] %s\n" % dburi)
            counts["success"] += 1
        except connection.ConnectionError, e:
            if "unable to open database file" in str(e):
                f_err.write("[UNABLE TO CONNECT] %s\n" % dburi)
                counts["unable_to_connect"] += 1
                log.debug(e)
            else:
                f_err.write("[ERROR] %s\n" % dburi)
                counts["failed"] += 1
                log.debug(e)
        except Exception, e:
            f_err.write("[ERROR] %s\n" % dburi)
            counts["failed"] += 1
            log.debug(e)
Beispiel #51
0
def all_workflows_db(db,
                     update=True,
                     pegasus_version=None,
                     schema_check=True,
                     force=False):
    """
    Update/Downgrade all completed workflow databases listed in master_workflow table.
    :param db: DB session object
    :param pegasus_version: version of the Pegasus software (e.g., 4.6.0)
    :param schema_check: whether a sanity check of the schema should be performed
    :param force: whether operations should be performed despite conflicts
    """
    # log files
    file_prefix = "%s-dbadmin" % time.strftime("%Y%m%dT%H%M%S")
    f_out = open("%s.out" % file_prefix, 'w')
    f_err = open("%s.err" % file_prefix, 'w')

    data = db.query(DashboardWorkflow.db_url, DashboardWorkflowstate.state,
                    func.max(DashboardWorkflowstate.timestamp)).join(
                        DashboardWorkflowstate).group_by(
                            DashboardWorkflow.wf_id).all()

    db_urls = []
    for d in data:
        if d[1] == "WORKFLOW_TERMINATED":
            db_urls.append(d[0])
            f_err.write("[ACTIVE] %s\n" % d[0])

    counts = {
        'total': len(data),
        'running': len(data) - len(db_urls),
        'success': 0,
        'failed': 0,
        'unable_to_connect': 0,
    }
    if update:
        msg = ['updating', 'Updated']
    else:
        msg = ['downgrading', 'Downgraded']

    print("")
    print("Verifying and %s workflow databases:" % msg[0])
    i = counts['running']
    for dburi in db_urls:
        log.debug("%s '%s'..." % (msg[0], dburi))
        i += 1
        sys.stdout.write("\r%d/%d" % (i, counts['total']))
        sys.stdout.flush()
        try:
            if update:
                con = connection.connect(dburi,
                                         pegasus_version=pegasus_version,
                                         schema_check=schema_check,
                                         create=True,
                                         force=force,
                                         verbose=False)
            else:
                con = connection.connect(dburi,
                                         schema_check=schema_check,
                                         create=False,
                                         verbose=False)
                metadata.clear()
                warnings.simplefilter("ignore")
                metadata.reflect(bind=con.get_bind())
                db_downgrade(con,
                             pegasus_version=pegasus_version,
                             force=force,
                             verbose=False)
            con.close()
            f_out.write("[SUCCESS] %s\n" % dburi)
            counts['success'] += 1
        except connection.ConnectionError as e:
            if "unable to open database file" in str(e):
                f_err.write("[UNABLE TO CONNECT] %s\n" % dburi)
                counts['unable_to_connect'] += 1
                log.debug(e)
            else:
                f_err.write("[ERROR] %s\n" % dburi)
                counts['failed'] += 1
                log.debug(e)
        except Exception as e:
            f_err.write("[ERROR] %s\n" % dburi)
            counts['failed'] += 1
            log.debug(e)

    f_out.close()
    f_err.close()

    print("\n\nSummary:")
    print("  Verified/%s: %s/%s" %
          (msg[1], counts['success'], counts['total']))
    print("  Failed: %s/%s" % (counts['failed'], counts['total']))
    print("  Unable to connect: %s/%s" %
          (counts['unable_to_connect'], counts['total']))
    print("  Unable to update (active workflows): %s/%s" %
          (counts['running'], counts['total']))
    print("\nLog files:")
    print("  %s.out (Succeeded operations)" % file_prefix)
    print("  %s.err (Failed operations)" % file_prefix)
Beispiel #52
0
 def __init__(self, dburi, props=None, db_type=None, **kwarg):
     self.dburi = dburi
     self.session = connection.connect(dburi, create=True, props=props, db_type=db_type)
Beispiel #53
0
 def __init__(self, dburi, **kwarg):
     self.dburi = dburi
     self.session = connection.connect(dburi, create=True)
Beispiel #54
0
def connect():
    log.debug("Connecting to database")
    g.session = connection.connect(g.master_db_url)
Beispiel #55
0
    def attach(self):
        "Add a workflow to the master db"

        # Verify that we aren't trying to attach a subworkflow
        if self.is_subworkflow():
            raise SubmitDirException("Subworkflows cannot be attached independent of the root workflow")

        # Connect to master database
        mdbsession = connection.connect_by_submitdir(self.submitdir, connection.DBType.MASTER)
        mdb = MasterDatabase(mdbsession)

        # Check to see if it already exists and just update it
        wf = mdb.get_master_workflow(self.wf_uuid)
        if wf is not None:
            print("Workflow is already in master db")
            old_submit_dir = wf.submit_dir
            if old_submit_dir != self.submitdir:
                print("Updating path...")
                wf.submit_dir = self.submitdir
                wf.db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
                mdbsession.commit()
            mdbsession.close()
            return

        # Connect to workflow db
        db_url = connection.url_by_submitdir(self.submitdir, connection.DBType.WORKFLOW)
        dbsession = connection.connect(db_url)
        db = WorkflowDatabase(dbsession)

        # Get workflow record
        wf = db.get_workflow(self.wf_uuid)
        if wf is None:
            print("No database record for that workflow exists")
            return

        # Update the workflow record
        wf.submit_dir = self.submitdir
        wf.db_url = db_url

        # Insert workflow record into master db
        mwf = DashboardWorkflow()
        mwf.wf_uuid = wf.wf_uuid
        mwf.dax_label = wf.dax_label
        mwf.dax_version = wf.dax_version
        mwf.dax_file = wf.dax_file
        mwf.dag_file_name = wf.dag_file_name
        mwf.timestamp = wf.timestamp
        mwf.submit_hostname = wf.submit_hostname
        mwf.submit_dir = self.submitdir
        mwf.planner_arguments = wf.planner_arguments
        mwf.user = wf.user
        mwf.grid_dn = wf.grid_dn
        mwf.planner_version = wf.planner_version
        mwf.db_url = wf.db_url
        mwf.archived = self.is_archived()
        mdbsession.add(mwf)
        mdbsession.flush() # We should have the new wf_id after this

        # Query states from workflow database
        states = db.get_workflow_states(wf.wf_id)

        # Insert states into master db
        for s in states:
            ms = DashboardWorkflowstate()
            ms.wf_id = mwf.wf_id
            ms.state = s.state
            ms.timestamp = s.timestamp
            ms.restart_count = s.restart_count
            ms.status = s.status
            mdbsession.add(ms)
        mdbsession.flush()

        dbsession.commit()
        dbsession.close()

        mdbsession.commit()
        mdbsession.close()