コード例 #1
0
def cluster_named(cluster_name, pattern=None):
    global _clusters
    cluster = _clusters.get((cluster_name, pattern), None)
    if cluster is None:
        # user
        config_user = config.config_value('remote', cluster_name, 'user')
        if config_user is None:
            config_user = '******'
        # identity
        config_identity = config.config_value('remote', cluster_name,
                                              'identity')
        # hosts
        config_hosts = config.config_value('remote', cluster_name, 'hosts')
        if isinstance(config_hosts, list) or isinstance(config_hosts, tuple):
            hosts = [
                Host(addr, addr, config_user, config_identity, None)
                for addr in config_hosts
            ]
        elif isinstance(config_hosts, dict):
            hosts = []
            for name, host_spec in config_hosts.iteritems():
                addr, db_profile = _parse_host_spec(cluster_name, host_spec)
                hosts.append(
                    Host(name, addr, config_user, config_identity, db_profile))
        else:
            return None
        if config_user and hosts:
            cluster = Cluster(cluster_name, config_user, hosts, pattern)
            _clusters[(cluster_name, pattern)] = cluster
        else:
            cluster = None
    return cluster
コード例 #2
0
ファイル: SpCompiler.py プロジェクト: Jahze/smtest
 def __init__( self, name ):
     self.name = name
     self.outputFile = ''
     self.baseCommand = [
         config.config_value('SPCOMP'),
         '-i' + config.config_value('SP_INCLUDE_DIR')
     ]
コード例 #3
0
ファイル: cluster.py プロジェクト: geophile/osh
def cluster_named(cluster_name, pattern = None):
    global _clusters
    cluster = _clusters.get((cluster_name, pattern), None)
    if cluster is None:
        # user
        config_user = config.config_value('remote', cluster_name, 'user')
        if config_user is None:
            config_user = '******'
        # identity
        config_identity = config.config_value('remote', cluster_name, 'identity')
        # hosts
        config_hosts = config.config_value('remote', cluster_name, 'hosts')
        if isinstance(config_hosts, list) or isinstance(config_hosts, tuple):
            hosts = [Host(addr, addr, config_user, config_identity, None)
                     for addr in config_hosts]
        elif isinstance(config_hosts, dict):
            hosts = []
            for name, host_spec in config_hosts.iteritems():
                addr, db_profile = _parse_host_spec(cluster_name, host_spec)
                hosts.append(Host(name, addr, config_user, config_identity, db_profile))
        else:
            return None
        if config_user and hosts:
            cluster = Cluster(cluster_name, config_user, hosts, pattern)
            _clusters[(cluster_name, pattern)] = cluster
        else:
            cluster = None
    return cluster
コード例 #4
0
ファイル: scheduler.py プロジェクト: mersoy/DIGITS
    def load_past_jobs(self):
        """
        Look in the jobs directory and load all valid jobs
        """
        failed = 0
        loaded_jobs = []
        for dir_name in sorted(os.listdir(config_value('jobs_dir'))):
            if os.path.isdir(os.path.join(config_value('jobs_dir'), dir_name)):
                exists = False

                # Make sure it hasn't already been loaded
                if dir_name in self.jobs:
                    exists = True
                    break

                if not exists:
                    try:
                        job = Job.load(dir_name)
                        # The server might have crashed
                        if job.status.is_running():
                            job.status = Status.ABORT
                        for task in job.tasks:
                            if task.status.is_running():
                                task.status = Status.ABORT

                        # We might have changed some attributes here or in __setstate__
                        job.save()
                        loaded_jobs.append(job)
                    except Exception as e:
                        failed += 1
                        if self.verbose:
                            if str(e):
                                print 'Caught %s while loading job "%s":' % (type(e).__name__, dir_name)
                                print '\t%s' % e
                            else:
                                print 'Caught %s while loading job "%s"' % (type(e).__name__, dir_name)

        # add DatasetJobs
        for job in loaded_jobs:
            if isinstance(job, DatasetJob):
                self.jobs[job.id()] = job

        # add ModelJobs
        for job in loaded_jobs:
            if isinstance(job, ModelJob):
                try:
                    # load the DatasetJob
                    job.load_dataset()
                    self.jobs[job.id()] = job
                except Exception as e:
                    failed += 1
                    if self.verbose:
                        if str(e):
                            print 'Caught %s while loading job "%s":' % (type(e).__name__, job.id())
                            print '\t%s' % e
                        else:
                            print 'Caught %s while loading job "%s"' % (type(e).__name__, job.id())

        if failed > 0 and self.verbose:
            print 'WARNING:', failed, 'jobs failed to load.'
コード例 #5
0
ファイル: scheduler.py プロジェクト: flx42/DIGITS
    def delete_job(self, job):
        """
        Deletes an entire job folder from disk
        Returns True if the Job was found and deleted
        """
        if isinstance(job, str) or isinstance(job, unicode):
            job_id = str(job)
        elif isinstance(job, Job):
            job_id = job.id()
        else:
            raise ValueError('called delete_job with a %s' % type(job))
        dependent_jobs = []
        # try to find the job
        for i, job in enumerate(self.jobs):
            if job.id() == job_id:
                if isinstance(job, DatasetJob):
                    # check for dependencies
                    for j in self.jobs:
                        if isinstance(j,
                                      ModelJob) and j.dataset_id == job.id():
                            logger.error(
                                'Cannot delete "%s" (%s) because "%s" (%s) depends on it.'
                                % (job.name(), job.id(), j.name(), j.id()))
                            dependent_jobs.append(j.name())
                if len(dependent_jobs) > 0:
                    error_message = 'Cannot delete "%s" because %d model%s depend%s on it: %s' % (
                        job.name(), len(dependent_jobs),
                        ('s' if len(dependent_jobs) != 1 else ''),
                        ('s' if len(dependent_jobs) == 1 else ''), ', '.join(
                            ['"%s"' % j for j in dependent_jobs]))
                    raise errors.DeleteError(error_message)
                self.jobs.pop(i)
                job.abort()
                if os.path.exists(job.dir()):
                    shutil.rmtree(job.dir())
                logger.info('Job deleted.', job_id=job_id)
                from digits.webapp import socketio
                socketio.emit(
                    'job update',
                    {
                        'update': 'deleted',
                        'job_id': job.id()
                    },
                    namespace='/jobs',
                    room='job_management',
                )
                return True

        # see if the folder exists on disk
        path = os.path.join(config_value('jobs_dir'), job_id)
        path = os.path.normpath(path)
        if os.path.dirname(path) == config_value(
                'jobs_dir') and os.path.exists(path):
            shutil.rmtree(path)
            return True

        return False
コード例 #6
0
    def delete_job(self, job):
        """
        Deletes an entire job folder from disk
        Returns True if the Job was found and deleted
        """
        if isinstance(job, str) or isinstance(job, unicode):
            job_id = str(job)
        elif isinstance(job, Job):
            job_id = job.id()
        else:
            raise ValueError('called delete_job with a %s' % type(job))
        dependent_jobs = []
        # try to find the job
        job = self.jobs.get(job_id, None)
        if job:
            if isinstance(job, DatasetJob):
                # check for dependencies
                for j in self.jobs.values():
                    if isinstance(j, ModelJob) and j.dataset_id == job.id():
                        logger.error('Cannot delete "%s" (%s) because "%s" (%s) depends on it.' % (job.name(), job.id(), j.name(), j.id()))
                        dependent_jobs.append(j.name())
            if len(dependent_jobs)>0:
                error_message = 'Cannot delete "%s" because %d model%s depend%s on it: %s' % (
                        job.name(),
                        len(dependent_jobs),
                        ('s' if len(dependent_jobs) != 1 else ''),
                        ('s' if len(dependent_jobs) == 1 else ''),
                        ', '.join(['"%s"' % j for j in dependent_jobs]))
                raise errors.DeleteError(error_message)
            self.jobs.pop(job_id, None)
            job.abort()
            if os.path.exists(job.dir()):
                shutil.rmtree(job.dir())
            logger.info('Job deleted.', job_id=job_id)
            from digits.webapp import socketio
            socketio.emit('job update',
                          {
                              'update': 'deleted',
                              'job_id': job.id()
                          },
                          namespace='/jobs',
                          room='job_management',
            )
            return True

        # see if the folder exists on disk
        path = os.path.join(config_value('jobs_dir'), job_id)
        path = os.path.normpath(path)
        if os.path.dirname(path) == config_value('jobs_dir') and os.path.exists(path):
            shutil.rmtree(path)
            return True

        return False
コード例 #7
0
    def test_inexistent_level2_config_value(self):
        """
        Tests the retrival of an inexistent level2 configuration entry.
        """
        cfg = {
            'level1': {
                'level2': 1
            }
        }

        with self.assertRaises(Exception):
            config_value(cfg, '/level1/inexistent', accept_none=False)
コード例 #8
0
ファイル: scheduler.py プロジェクト: imclab/DIGITS
    def load_past_jobs(self):
        """
        Look in the jobs directory and load all valid jobs
        """
        loaded_jobs = []
        failed_jobs = []
        for dir_name in sorted(os.listdir(config_value('jobs_dir'))):
            if os.path.isdir(os.path.join(config_value('jobs_dir'), dir_name)):
                exists = False

                # Make sure it hasn't already been loaded
                if dir_name in self.jobs:
                    exists = True
                    break

                if not exists:
                    try:
                        job = Job.load(dir_name)
                        # The server might have crashed
                        if job.status.is_running():
                            job.status = Status.ABORT
                        for task in job.tasks:
                            if task.status.is_running():
                                task.status = Status.ABORT

                        # We might have changed some attributes here or in __setstate__
                        job.save()
                        loaded_jobs.append(job)
                    except Exception as e:
                        failed_jobs.append((dir_name, e))

        # add DatasetJobs
        for job in loaded_jobs:
            if isinstance(job, DatasetJob):
                self.jobs[job.id()] = job

        # add ModelJobs
        for job in loaded_jobs:
            if isinstance(job, ModelJob):
                try:
                    # load the DatasetJob
                    job.load_dataset()
                    self.jobs[job.id()] = job
                except Exception as e:
                    failed_jobs.append((job.id(), e))

        logger.info('Loaded %d jobs.' % len(self.jobs))

        if len(failed_jobs):
            logger.warning('Failed to load %d jobs.' % len(failed_jobs))
            if self.verbose:
                for job_id, e in failed_jobs:
                    logger.debug('%s - %s: %s' % (job_id, type(e).__name__, str(e)))
コード例 #9
0
    def load_past_jobs(self):
        """
        Look in the jobs directory and load all valid jobs
        """
        loaded_jobs = []
        failed_jobs = []
        for dir_name in sorted(os.listdir(config_value('jobs_dir'))):
            if os.path.isdir(os.path.join(config_value('jobs_dir'), dir_name)):
                # Make sure it hasn't already been loaded
                if dir_name in self.jobs:
                    continue

                try:
                    job = Job.load(dir_name)
                    # The server might have crashed
                    if job.status.is_running():
                        job.status = Status.ABORT
                    for task in job.tasks:
                        if task.status.is_running():
                            task.status = Status.ABORT

                    # We might have changed some attributes here or in __setstate__
                    job.save()
                    loaded_jobs.append(job)
                except Exception as e:
                    failed_jobs.append((dir_name, e))

        # add DatasetJobs
        for job in loaded_jobs:
            if isinstance(job, DatasetJob):
                self.jobs[job.id()] = job

        # add ModelJobs
        for job in loaded_jobs:
            if isinstance(job, ModelJob):
                try:
                    # load the DatasetJob
                    job.load_dataset()
                    self.jobs[job.id()] = job
                except Exception as e:
                    failed_jobs.append((job.id(), e))

        logger.info('Loaded %d jobs.' % len(self.jobs))

        if len(failed_jobs):
            logger.warning('Failed to load %d jobs.' % len(failed_jobs))
            if self.verbose:
                for job_id, e in failed_jobs:
                    logger.debug('%s - %s: %s' %
                                 (job_id, type(e).__name__, str(e)))
コード例 #10
0
ファイル: test_webapp.py プロジェクト: hemrampal/DIGITS
 def test_select_gpus(self):
     """model - select GPUs"""
     # test all possible combinations
     gpu_list = config_value('gpu_list').split(',')
     for i in xrange(len(gpu_list)):
         for combination in itertools.combinations(gpu_list, i+1):
             yield self.check_select_gpus, combination
コード例 #11
0
ファイル: test_webapp.py プロジェクト: patrickdamery/DIGITS
 def test_select_gpus(self):
     """model - select GPUs"""
     # test all possible combinations
     gpu_list = config_value('gpu_list').split(',')
     for i in xrange(len(gpu_list)):
         for combination in itertools.combinations(gpu_list, i + 1):
             yield self.check_select_gpus, combination
コード例 #12
0
ファイル: FtpFileList.py プロジェクト: Jahze/smtest
    def ConnectToFtp( self ):
        self.PrefixLog("Connecting to ftp server " + config.config_value('FTP_HOST'))
        try:
            ftp = ftplib.FTP();
            ftp.connect(
                config.config_value('FTP_HOST'), 
                int(config.config_value('FTP_PORT'))
            )
            ftp.login(
                config.config_value('FTP_USER'),
                config.config_value('FTP_PASS')
            )
        except ftplib.all_errors:
            self.PrefixLog("Connection failed")
            return None

        return ftp
コード例 #13
0
ファイル: views.py プロジェクト: Cloud-CV/DIGITS
def serve_file(path):
    """
    Return a file in the jobs directory

    If you install the nginx.site file, nginx will serve files instead
    and this path will never be used
    """
    jobs_dir = config_value('jobs_dir')
    return flask.send_from_directory(jobs_dir, path)
コード例 #14
0
ファイル: views.py プロジェクト: iwalkdaline/DIGITS
def serve_file(path):
    """
    Return a file in the jobs directory

    If you install the nginx.site file, nginx will serve files instead
    and this path will never be used
    """
    jobs_dir = config_value('jobs_dir')
    return flask.send_from_directory(jobs_dir, path)
コード例 #15
0
ファイル: SmTest.py プロジェクト: Jahze/smtest
    def Run( self ):
        self.Rcon("sm plugins unload_all")

        for smx in self.smxs:
            self.Rcon("sm plugins load " + config_value('PLUGIN_PATH') +
                "/" + smx)

        self.Rcon("sm plugins load_lock");
        self.RunCommands()
コード例 #16
0
ファイル: CopyFileList.py プロジェクト: Jahze/smtest
    def CopyFiles( self, files, deleteFiles ):
        if len(files) == 0:
            return True

        for local,remote in files.items():
            fullRemote = '/'.join([config.config_value("LOCAL_PATH"), remote])
            self.PrefixLog("  > cp " + local + " " + fullRemote)
            shutil.copyfile(local, fullRemote);

        return True
コード例 #17
0
    def test_default_in_level2_config_value(self):
        """
        Tests the retrival of an inexistent level1 configuration entry upon
        passing a default value.
        """
        cfg = {}
        expected = 1
        found = config_value(cfg, '/level1/level2', expected)

        self.assertEqual(found, expected)
コード例 #18
0
    def test_existent_level1_config_value(self):
        """
        Tests the retrival of an existent level1 configuration entry.
        """
        cfg = {
            'level1': 1
        }
        expected = 1
        found = config_value(cfg, '/level1')

        self.assertEqual(found, expected)
コード例 #19
0
ファイル: SmTest.py プロジェクト: Jahze/smtest
    def GetAllFiles( self ):
        files = self.files.copy()

        for smx in self.smxs:
            local = os.path.join(SmTest.SMX_TEMP_DIR, smx)
            remote = '/'.join([
                'addons/sourcemod/plugins',
                config_value('PLUGIN_PATH'),
                os.path.basename(local)
            ])

            files[local] = remote;

        return files
コード例 #20
0
ファイル: FtpFileList.py プロジェクト: Jahze/smtest
    def CopyFiles( self, ftpFiles, deleteFiles ):
        if len(ftpFiles) == 0:
            return True

        ftp = self.ConnectToFtp()
        if ftp == None:
            return False

        for local,remote in ftpFiles.items():
            try:
                file = open(local, "rb")
            except IOError:
                self.PrefixLog("Couldn't open " + local)
                ftp.quit()
                return False

            # Change to the correct firectory
            (folder,filename) = os.path.split(remote)
            folder = folder.replace('\\', '/');
            folder = '/'.join([config.config_value("FTP_PATH"), folder])
            if folder:
                try:
                    self.PrefixLog("FTP: CD " + folder)
                    response = ftp.cwd(folder)
                    self.PrefixLog("FTP: " + response)
                except all_errors:
                    self.PrefixLog("FTP: Couldn't CD")
                    return False

            # Save the file
            try:
                self.PrefixLog("FTP: STOR " + filename)
                response = ftp.storbinary('STOR '+filename, file)
                self.PrefixLog("FTP: " + response)
            except all_errors:
                self.PrefixLog("FTP: Couldn't STOR " + filename)
                return False

            remotePath = '/'.join([folder,filename])
            self.storedFiles.append(remotePath)
            if local in deleteFiles:
                self.deleteFiles[remotePath] = deleteFiles[local]
            else:
                self.deleteFiles[remotePath] = True
                
        ftp.quit()
        
        return True
コード例 #21
0
ファイル: test_init.py プロジェクト: jalme/qsp-protocol-node
 def test_initialize(self):
     config_file_uri = resource_uri("test_config.yaml")
     config = ConfigFactory.create_from_file(
         config_file_uri, "dev", validate_contract_settings=False)
     log_streaming.initialize("account",
                              config_value(config, "/logging/streaming",
                                           {}),
                              force=True)
     self.assertEqual(get_config(), {})
     self.assertEqual(get_account(), "account")
     self.assertEqual(get_loggers(), {})
     try:
         log_streaming.initialize("account", {})
         self.fail("An exception was expected")
     except Exception:
         # expected
         pass
コード例 #22
0
    def path(self, filename, relative=False):
        """
        Returns a path to the given file

        Arguments:
        filename -- the requested file

        Keyword arguments:
        relative -- If False, return an absolute path to the file
                    If True, return a path relative to the jobs directory
        """
        if not filename:
            return None
        if os.path.isabs(filename):
            path = filename
        else:
            path = os.path.join(self.job_dir, filename)
        if relative:
            path = os.path.relpath(path, config_value('jobs_dir'))
        return str(path).replace("\\", "/")
コード例 #23
0
ファイル: task.py プロジェクト: GeertLitjens/DIGITS
    def path(self, filename, relative=False):
        """
        Returns a path to the given file

        Arguments:
        filename -- the requested file

        Keyword arguments:
        relative -- If False, return an absolute path to the file
                    If True, return a path relative to the jobs directory
        """
        if not filename:
            return None
        if os.path.isabs(filename):
            path = filename
        else:
            path = os.path.join(self.job_dir, filename)
        if relative:
            path = os.path.relpath(path, config_value('jobs_dir'))
        return str(path).replace("\\","/")
コード例 #24
0
ファイル: views.py プロジェクト: patrickdamery/DIGITS
def serve_file(path):
    """
    Return a file in the jobs directory

    If you install the nginx.site file, nginx will serve files instead
    and this path will never be used
    """
    jobs_dir = config_value('jobs_dir')
    path = os.path.normpath(os.path.join(jobs_dir, path))

    # Don't allow path manipulation
    if not os.path.commonprefix([path, jobs_dir]).startswith(jobs_dir):
        raise werkzeug.exceptions.Forbidden('Path manipulation not allowed')

    if not os.path.exists(path):
        raise werkzeug.exceptions.NotFound('File not found')
    if os.path.isdir(path):
        raise werkzeug.exceptions.Forbidden('Folder cannot be served')

    with open(path, 'r') as infile:
        response = flask.make_response(infile.read())
        response.headers["Content-Disposition"] = "attachment; filename=%s" % os.path.basename(path)
        return response
コード例 #25
0
ファイル: views.py プロジェクト: CVML/DIGITS
def serve_file(path):
    """
    Return a file in the jobs directory

    If you install the nginx.site file, nginx will serve files instead
    and this path will never be used
    """
    jobs_dir = config_value('jobs_dir')
    path = os.path.normpath(os.path.join(jobs_dir, path))

    # Don't allow path manipulation
    if not os.path.commonprefix([path, jobs_dir]).startswith(jobs_dir):
        raise werkzeug.exceptions.Forbidden('Path manipulation not allowed')

    if not os.path.exists(path):
        raise werkzeug.exceptions.NotFound('File not found')
    if os.path.isdir(path):
        raise werkzeug.exceptions.Forbidden('Folder cannot be served')

    with open(path, 'r') as infile:
        response = flask.make_response(infile.read())
        response.headers["Content-Disposition"] = "attachment; filename=%s" % os.path.basename(path)
        return response
コード例 #26
0
ファイル: main.py プロジェクト: Jahze/smtest
parser = optparse.OptionParser()
parser.add_option("-l", "--log", dest="log", help="Display full log on STDERR", default=False, action="store_true")
parser.add_option("-p", "--allow-players", dest="allow_players", help="Allow tests to run whilst players are on the server", default=False, action="store_true")
parser.add_option("-c", "--config", dest="config_file", help="Location of config.ini", default="config.ini", action="store", type="string")
(options, args) = parser.parse_args()

Logger.Logger.Enable(options.log)

config.read_config(options.config_file)

if not len(args):
    print("No test files supplied")
else:
    server = Server.Server(
        config.config_value('RCON_HOST'),
        config.config_value('RCON_PORT'),
        config.config_value('RCON_PASS')
    );

    if not options.allow_players and server.HasPlayers():
        print("Players are on the server:")
        print(server.Status())
        os._exit(0)

    for i in args:
        t = RunTest(i, server)

        if t == None:
            continue
コード例 #27
0
ファイル: scheduler.py プロジェクト: flx42/DIGITS
    def load_past_jobs(self):
        """
        Look in the jobs directory and load all valid jobs
        """
        failed = 0
        loaded_jobs = []
        for dir_name in sorted(os.listdir(config_value('jobs_dir'))):
            if os.path.isdir(os.path.join(config_value('jobs_dir'), dir_name)):
                exists = False

                # Make sure it hasn't already been loaded
                for job in self.jobs:
                    if job.id() == dir_name:
                        exists = True
                        break

                if not exists:
                    try:
                        job = Job.load(dir_name)
                        # The server might have crashed
                        if job.status.is_running():
                            job.status = Status.ABORT
                        for task in job.tasks:
                            if task.status.is_running():
                                task.status = Status.ABORT

                        # We might have changed some attributes here or in __setstate__
                        job.save()
                        loaded_jobs.append(job)
                    except Exception as e:
                        failed += 1
                        if self.verbose:
                            if str(e):
                                print 'Caught %s while loading job "%s":' % (
                                    type(e).__name__, dir_name)
                                print '\t%s' % e
                            else:
                                print 'Caught %s while loading job "%s"' % (
                                    type(e).__name__, dir_name)

        # add DatasetJobs
        for job in loaded_jobs:
            if isinstance(job, DatasetJob):
                self.jobs.append(job)

        # add ModelJobs
        for job in loaded_jobs:
            if isinstance(job, ModelJob):
                try:
                    # load the DatasetJob
                    job.load_dataset()
                    self.jobs.append(job)
                except Exception as e:
                    failed += 1
                    if self.verbose:
                        if str(e):
                            print 'Caught %s while loading job "%s":' % (
                                type(e).__name__, job.id())
                            print '\t%s' % e
                        else:
                            print 'Caught %s while loading job "%s"' % (
                                type(e).__name__, job.id())

        if failed > 0 and self.verbose:
            print 'WARNING:', failed, 'jobs failed to load.'
コード例 #28
0
ファイル: webapp.py プロジェクト: GarfieldEr007/DIGITS
import flask
from flask.ext.socketio import SocketIO

from digits import utils
from config import config_value
import digits.scheduler

### Create Flask, Scheduler and SocketIO objects

app = flask.Flask(__name__)
app.config["DEBUG"] = True
# Disable CSRF checking in WTForms
app.config["WTF_CSRF_ENABLED"] = False
# This is still necessary for SocketIO
app.config["SECRET_KEY"] = config_value("secret_key")
app.url_map.redirect_defaults = False
socketio = SocketIO(app)
scheduler = digits.scheduler.Scheduler(config_value("gpu_list"), True)

### Register filters and views

app.jinja_env.globals["server_name"] = config_value("server_name")
app.jinja_env.globals["server_version"] = digits.__version__
app.jinja_env.filters["print_time"] = utils.time_filters.print_time
app.jinja_env.filters["print_time_diff"] = utils.time_filters.print_time_diff
app.jinja_env.filters["print_time_since"] = utils.time_filters.print_time_since
app.jinja_env.filters["sizeof_fmt"] = utils.sizeof_fmt
app.jinja_env.filters["has_permission"] = utils.auth.has_permission
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
コード例 #29
0
ファイル: views.py プロジェクト: dchall88/DIGITS
def home(dataset_id=None):
    """
    DIGITS home page
    Returns information about each job on the server

    Returns JSON when requested:
        {
            datasets: [{id, name, status},...],
            models: [{id, name, status},...]
        }
    """

    # Dataset Job
    if dataset_id is None:
        job_type = dataset.DatasetJob
        name = 'Dataset'
    # Model Job
    else:
        job_type = model.ModelJob
        name = 'Model'

    running_jobs = get_job_list(job_type, True, dataset_id)
    completed_jobs = get_job_list(job_type, False, dataset_id)

    if request_wants_json():
        data = {
            'version': digits.__version__,
            'jobs_dir': config_value('jobs_dir'),
            'job_type': name,
            'jobs': [j.json_dict()
                     for j in running_jobs + completed_jobs],
        }
        if config_value('server_name'):
            data['server_name'] = config_value('server_name')
        return flask.jsonify(data)
    else:
        if dataset_id is None:
            name = 'Dataset'
            dataset_name = None
            options = [
                ('New Dataset', [
                    {
                        'title': 'Images',
                        'id': 'images',
                        'url': flask.url_for('image_classification_dataset_new'),
                    },
                    {
                        'title': 'Generic',
                        'id': 'generic',
                        'url': flask.url_for('generic_image_dataset_new'),
                    },
                ])
            ]
        else:
            dataset_name, dataset_type = get_dataset_name(dataset_id)
            if dataset_type == 'Image Classification Dataset':
                options = [
                    ('New Model', [
                        {
                            'title': 'Classification',
                            'id': 'classification',
                            'url': flask.url_for('image_classification_model_new', dataset_id=dataset_id),
                        },
                    ])
                ]
            elif dataset_type == 'Generic Image Dataset':
                options = [
                    ('New Model', [
                        {
                            'title': 'Generic',
                            'id': 'generic-classification',
                            'url': flask.url_for('generic_image_model_new', dataset_id=dataset_id),
                        },
                    ])
                ]

        return flask.render_template(
            'home.html',
            name=name,
            dataset_name=dataset_name,
            dataset_id=dataset_id,
            options=options,
            running_jobs=running_jobs,
            completed_jobs=completed_jobs,
            total_gpu_count=len(scheduler.resources['gpus']),
            remaining_gpu_count=sum(r.remaining() for r in scheduler.resources['gpus']),
        )
コード例 #30
0
ファイル: test_webapp.py プロジェクト: patrickdamery/DIGITS
 def test_select_gpu(self):
     """model - select GPU"""
     for index in config_value('gpu_list').split(','):
         yield self.check_select_gpu, index
コード例 #31
0
ファイル: test_webapp.py プロジェクト: patrickdamery/DIGITS
class TestModelCreation(WebappBaseTest):
    """
    Model creation tests
    """
    @classmethod
    def setUpClass(cls):
        super(TestModelCreation, cls).setUpClass()
        cls.datasets = {
            image_type: cls.create_dataset(
                method='folder',
                folder_train=dataset.data_path,
                resize_width=DUMMY_IMAGE_DIM,
                resize_height=DUMMY_IMAGE_DIM,
            )
            for image_type, dataset in cls.image_type_data.iteritems()
        }

    def test_page_model_new(self):
        """new image classification model page"""
        rv = self.app.get('/models/images/classification/new')
        assert rv.status_code == 200, 'page load failed with %s' % rv.status_code
        assert 'New Image Classification Model' in rv.data, 'unexpected page format'

    def test_visualize_network(self):
        """visualize network"""
        rv = self.app.post('/models/visualize-network',
                           data={'custom_network': dummy_network})
        s = BeautifulSoup(rv.data)
        body = s.select('body')
        assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (
            rv.status_code, body)
        image = s.select('img')
        assert image is not None, "didn't return an image"

    def test_alltests(self):
        for image_type in ImageType.TYPES:
            yield self.check_create_json, image_type
            yield self.check_create_delete, image_type
            yield self.check_create_wait_delete, image_type
            yield self.check_create_abort_delete, image_type
            yield self.check_snapshot_interval_2, image_type
            yield self.check_snapshot_interval_0_5, image_type

    def check_create_json(self, image_type):
        """model - create w/ json"""
        self.create_quick_model(self.datasets[image_type], json=True)

    def check_create_delete(self, image_type):
        """model - create, delete"""
        job_id = self.create_quick_model(self.datasets[image_type])
        assert self.delete_model(job_id) == 200, 'delete failed'
        assert not self.model_exists(job_id), 'model exists after delete'

    def check_create_wait_delete(self, image_type):
        """model - create, wait, delete"""
        job_id = self.create_quick_model(self.datasets[image_type])
        assert self.model_wait_completion(job_id) == 'Done', 'create failed'
        assert self.delete_model(job_id) == 200, 'delete failed'
        assert not self.model_exists(job_id), 'model exists after delete'

    def check_create_abort_delete(self, image_type):
        """model - create, abort, delete"""
        job_id = self.create_quick_model(self.datasets[image_type])
        assert self.abort_model(job_id) == 200, 'abort failed'
        assert self.delete_model(job_id) == 200, 'delete failed'
        assert not self.model_exists(job_id), 'model exists after delete'

    def check_snapshot_interval_2(self, image_type):
        """model - snapshot_interval 2"""
        job_id = self.create_quick_model(self.datasets[image_type],
                                         train_epochs=1,
                                         snapshot_interval=0.5)
        assert self.model_wait_completion(job_id) == 'Done', 'create failed'
        rv = self.app.get('/models/%s.json' % job_id)
        assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
        content = json.loads(rv.data)
        assert len(content['snapshots']) > 1, 'should take >1 snapshot'

    def check_snapshot_interval_0_5(self, image_type):
        """model - snapshot_interval 0.5"""
        job_id = self.create_quick_model(self.datasets[image_type],
                                         train_epochs=4,
                                         snapshot_interval=2)
        assert self.model_wait_completion(job_id) == 'Done', 'create failed'
        rv = self.app.get('/models/%s.json' % job_id)
        assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
        content = json.loads(rv.data)
        assert len(content['snapshots']) == 2, 'should take 2 snapshots'

    # for the GPU tests, only test the first dataset.

    @unittest.skipIf(not config_value('gpu_list'), 'no GPUs selected')
    @unittest.skipIf(not config_value('caffe_root')['cuda_enabled'],
                     'CUDA disabled')
    @unittest.skipIf(
        config_value('caffe_root')['multi_gpu'], 'multi-GPU enabled')
    def test_select_gpu(self):
        """model - select GPU"""
        for index in config_value('gpu_list').split(','):
            yield self.check_select_gpu, index

    def check_select_gpu(self, gpu_index):
        job_id = self.create_quick_model(self.datasets[ImageType.COLOR],
                                         select_gpu=gpu_index)
        assert self.delete_model(job_id) == 200, 'delete failed'

    @unittest.skipIf(not config_value('gpu_list'), 'no GPUs selected')
    @unittest.skipIf(not config_value('caffe_root')['cuda_enabled'],
                     'CUDA disabled')
    @unittest.skipIf(not config_value('caffe_root')['multi_gpu'],
                     'multi-GPU disabled')
    def test_select_gpus(self):
        """model - select GPUs"""
        # test all possible combinations
        gpu_list = config_value('gpu_list').split(',')
        for i in xrange(len(gpu_list)):
            for combination in itertools.combinations(gpu_list, i + 1):
                yield self.check_select_gpus, combination

    def check_select_gpus(self, gpu_list):
        job_id = self.create_quick_model(self.datasets[ImageType.COLOR],
                                         select_gpus_list=','.join(gpu_list))
        assert self.delete_model(job_id) == 200, 'delete failed'
コード例 #32
0
ファイル: test_scheduler.py プロジェクト: Cloud-CV/DIGITS
 def get_scheduler(self):
     return _.Scheduler(config_value('gpu_list'))
コード例 #33
0
def home(dataset_id=None):
    """
    DIGITS home page
    Returns information about each job on the server

    Returns JSON when requested:
        {
            datasets: [{id, name, status},...],
            models: [{id, name, status},...]
        }
    """

    # Dataset Job
    if dataset_id is None:
        job_type = dataset.DatasetJob
        name = 'Dataset'
    # Model Job
    else:
        job_type = model.ModelJob
        name = 'Model'

    running_jobs = get_job_list(job_type, True, dataset_id)
    completed_jobs = get_job_list(job_type, False, dataset_id)

    if request_wants_json():
        data = {
            'version': digits.__version__,
            'jobs_dir': config_value('jobs_dir'),
            'job_type': name,
            'jobs': [j.json_dict() for j in running_jobs + completed_jobs],
        }
        if config_value('server_name'):
            data['server_name'] = config_value('server_name')
        return flask.jsonify(data)
    else:
        if dataset_id is None:
            name = 'Dataset'
            dataset_name = None
            options = [('New Dataset', [
                {
                    'title': 'Images',
                    'id': 'images',
                    'url': flask.url_for('image_classification_dataset_new'),
                },
                {
                    'title': 'Generic',
                    'id': 'generic',
                    'url': flask.url_for('generic_image_dataset_new'),
                },
            ])]
        else:
            dataset_name, dataset_type = get_dataset_name(dataset_id)
            if dataset_type == 'Image Classification Dataset':
                options = [('New Model', [
                    {
                        'title':
                        'Classification',
                        'id':
                        'classification',
                        'url':
                        flask.url_for('image_classification_model_new',
                                      dataset_id=dataset_id),
                    },
                ])]
            elif dataset_type == 'Generic Image Dataset':
                options = [('New Model', [
                    {
                        'title':
                        'Generic',
                        'id':
                        'generic-classification',
                        'url':
                        flask.url_for('generic_image_model_new',
                                      dataset_id=dataset_id),
                    },
                ])]

        return flask.render_template(
            'home.html',
            name=name,
            dataset_name=dataset_name,
            dataset_id=dataset_id,
            options=options,
            running_jobs=running_jobs,
            completed_jobs=completed_jobs,
            total_gpu_count=len(scheduler.resources['gpus']),
            remaining_gpu_count=sum(r.remaining()
                                    for r in scheduler.resources['gpus']),
        )
コード例 #34
0
ファイル: views.py プロジェクト: iwalkdaline/DIGITS
def home():
    """
    DIGITS home page
    Returns information about each job on the server

    Returns JSON when requested:
        {
            datasets: [{id, name, status},...],
            models: [{id, name, status},...]
        }
    """
    running_datasets = get_job_list(dataset.DatasetJob, True)
    completed_datasets = get_job_list(dataset.DatasetJob, False)
    running_models = get_job_list(model.ModelJob, True)
    completed_models = get_job_list(model.ModelJob, False)

    if request_wants_json():
        data = {
            'version':
            digits.__version__,
            'jobs_dir':
            config_value('jobs_dir'),
            'datasets':
            [j.json_dict() for j in running_datasets + completed_datasets],
            'models':
            [j.json_dict() for j in running_models + completed_models],
        }
        if config_value('server_name'):
            data['server_name'] = config_value('server_name')
        return flask.jsonify(data)
    else:
        new_dataset_options = [('Images', [
            {
                'title': 'Classification',
                'id': 'image-classification',
                'url': flask.url_for('image_classification_dataset_new'),
            },
            {
                'title': 'Other',
                'id': 'image-generic',
                'url': flask.url_for('generic_image_dataset_new'),
            },
        ])]
        new_model_options = [('Images', [
            {
                'title': 'Classification',
                'id': 'image-classification',
                'url': flask.url_for('image_classification_model_new'),
            },
            {
                'title': 'Other',
                'id': 'image-generic',
                'url': flask.url_for('generic_image_model_new'),
            },
        ])]

        return flask.render_template(
            'home.html',
            new_dataset_options=new_dataset_options,
            running_datasets=running_datasets,
            completed_datasets=completed_datasets,
            new_model_options=new_model_options,
            running_models=running_models,
            completed_models=completed_models,
            total_gpu_count=len(scheduler.resources['gpus']),
            remaining_gpu_count=sum(r.remaining()
                                    for r in scheduler.resources['gpus']),
        )
コード例 #35
0
ファイル: webapp.py プロジェクト: JFerguson20/DIGITS
import flask
from flask.ext.socketio import SocketIO

from digits import utils
from config import config_value
import digits.scheduler

### Create Flask, Scheduler and SocketIO objects

app = flask.Flask(__name__)
app.config['DEBUG'] = False
# Disable CSRF checking in WTForms
app.config['WTF_CSRF_ENABLED'] = False
# This is still necessary for SocketIO
app.config['SECRET_KEY'] = config_value('secret_key')
app.url_map.redirect_defaults = False
socketio = SocketIO(app)
scheduler = digits.scheduler.Scheduler(config_value('gpu_list'))

# Set up flask API documentation, if installed
try:
    from flask.ext.autodoc import Autodoc
    _doc = Autodoc(app)
    autodoc = _doc.doc # decorator
except ImportError:
    def autodoc(*args, **kwargs):
        def _doc(f):
            # noop decorator
            return f
        return _doc
コード例 #36
0
 def setUpClass(cls):
     cfg = load_yaml(fetch_file(resource_uri("test_config.yaml")))
     TestEvtPoolManager.db_file = config_value(cfg, '/dev/evt_db_path')
     remove(TestEvtPoolManager.db_file)
コード例 #37
0
ファイル: test_scheduler.py プロジェクト: yyyreal/DIGITS
 def setUpClass(cls):
     cls.s = _.Scheduler(config_value('gpu_list'))
     assert cls.s.start(), 'failed to start'
コード例 #38
0
ファイル: test_scheduler.py プロジェクト: Cloud-CV/DIGITS
 def setUpClass(cls):
     cls.s = _.Scheduler(config_value('gpu_list'))
     assert cls.s.start(), 'failed to start'
コード例 #39
0
ファイル: views.py プロジェクト: Cloud-CV/DIGITS
def home():
    """
    DIGITS home page
    Returns information about each job on the server

    Returns JSON when requested:
        {
            datasets: [{id, name, status},...],
            models: [{id, name, status},...]
        }
    """
    running_datasets    = get_job_list(dataset.DatasetJob, True)
    completed_datasets  = get_job_list(dataset.DatasetJob, False)
    running_models      = get_job_list(model.ModelJob, True)
    completed_models    = get_job_list(model.ModelJob, False)

    if request_wants_json():
        data = {
                'version': digits.__version__,
                'jobs_dir': config_value('jobs_dir'),
                'datasets': [j.json_dict()
                    for j in running_datasets + completed_datasets],
                'models': [j.json_dict()
                    for j in running_models + completed_models],
                }
        if config_value('server_name'):
            data['server_name'] = config_value('server_name')
        return flask.jsonify(data)
    else:
        new_dataset_options = [
                ('Images', [
                    {
                        'title': 'Classification',
                        'id': 'image-classification',
                        'url': flask.url_for('image_classification_dataset_new'),
                        },
                    {
                        'title': 'Other',
                        'id': 'image-generic',
                        'url': flask.url_for('generic_image_dataset_new'),
                        },
                    ])
                ]
        new_model_options = [
                ('Images', [
                    {
                        'title': 'Classification',
                        'id': 'image-classification',
                        'url': flask.url_for('image_classification_model_new'),
                        },
                    {
                        'title': 'Other',
                        'id': 'image-generic',
                        'url': flask.url_for('generic_image_model_new'),
                        },
                    ])
                ]

        return flask.render_template('home.html',
                new_dataset_options = new_dataset_options,
                running_datasets    = running_datasets,
                completed_datasets  = completed_datasets,
                new_model_options   = new_model_options,
                running_models      = running_models,
                completed_models    = completed_models,
                total_gpu_count     = len(scheduler.resources['gpus']),
                remaining_gpu_count = sum(r.remaining() for r in scheduler.resources['gpus']),
                )
コード例 #40
0
ファイル: test_webapp.py プロジェクト: hemrampal/DIGITS
 def test_select_gpu(self):
     """model - select GPU"""
     for index in config_value('gpu_list').split(','):
         yield self.check_select_gpu, index
コード例 #41
0
ファイル: views.py プロジェクト: GeertLitjens/DIGITS
def home():
    """
    DIGITS home page
    Returns information about each job on the server

    Returns JSON when requested:
        {
            datasets: [{id, name, status},...],
            models: [{id, name, status},...]
        }
    """
    running_datasets = get_job_list(dataset.DatasetJob, True)
    completed_datasets = get_job_list(dataset.DatasetJob, False)
    running_models = get_job_list(model.ModelJob, True)
    completed_models = get_job_list(model.ModelJob, False)

    if request_wants_json():
        data = {
            "version": digits.__version__,
            "jobs_dir": config_value("jobs_dir"),
            "datasets": [j.json_dict() for j in running_datasets + completed_datasets],
            "models": [j.json_dict() for j in running_models + completed_models],
        }
        if config_value("server_name"):
            data["server_name"] = config_value("server_name")
        return flask.jsonify(data)
    else:
        new_dataset_options = [
            (
                "Images",
                [
                    {
                        "title": "Classification",
                        "id": "image-classification",
                        "url": flask.url_for("image_classification_dataset_new"),
                    },
                    {"title": "Other", "id": "image-generic", "url": flask.url_for("generic_image_dataset_new")},
                ],
            )
        ]
        new_model_options = [
            (
                "Images",
                [
                    {
                        "title": "Classification",
                        "id": "image-classification",
                        "url": flask.url_for("image_classification_model_new"),
                    },
                    {"title": "Other", "id": "image-generic", "url": flask.url_for("generic_image_model_new")},
                ],
            )
        ]

        return flask.render_template(
            "home.html",
            new_dataset_options=new_dataset_options,
            running_datasets=running_datasets,
            completed_datasets=completed_datasets,
            new_model_options=new_model_options,
            running_models=running_models,
            completed_models=completed_models,
            total_gpu_count=len(scheduler.resources["gpus"]),
            remaining_gpu_count=sum(r.remaining() for r in scheduler.resources["gpus"]),
        )
コード例 #42
0
ファイル: test_scheduler.py プロジェクト: yyyreal/DIGITS
 def get_scheduler(self):
     return _.Scheduler(config_value('gpu_list'))
コード例 #43
0
import flask
from flask.ext.socketio import SocketIO

from digits import utils
from config import config_value
import digits.scheduler

### Create Flask, Scheduler and SocketIO objects

app = flask.Flask(__name__)
app.config['DEBUG'] = True
# Disable CSRF checking in WTForms
app.config['WTF_CSRF_ENABLED'] = False
# This is still necessary for SocketIO
app.config['SECRET_KEY'] = config_value('secret_key')
app.url_map.redirect_defaults = False
socketio = SocketIO(app)
scheduler = digits.scheduler.Scheduler(config_value('gpu_list'))

# Set up flask API documentation, if installed
try:
    from flask.ext.autodoc import Autodoc
    _doc = Autodoc(app)
    autodoc = _doc.doc  # decorator
except ImportError:

    def autodoc(*args, **kwargs):
        def _doc(f):
            # noop decorator
            return f
コード例 #44
0
 def setUpClass(cls):
     QSPTest.setUpClass()
     cfg = load_yaml(fetch_file(resource_uri("test_config.yaml")))
     TestSqlLite3Worker.db_file = config_value(cfg, '/dev/evt_db_path')
     remove(TestSqlLite3Worker.db_file)