コード例 #1
0
ファイル: _balsam.py プロジェクト: xiaming9880/deephyper
    def _create_balsam_task(self, x):
        args = f"'{self.encode(x)}'"
        envs = f"KERAS_BACKEND={self.KERAS_BACKEND}:KMP_BLOCK_TIME=0"

        ranks_per_node = self.num_ranks_per_node
        threads_per_rank = self.num_threads_per_rank

        # override cli value by x's value
        if "hyperparameters" in x:
            if "ranks_per_node" in x["hyperparameters"]:
                ranks_per_node = x["hyperparameters"]["ranks_per_node"]
                threads_per_rank = self.num_threads_per_node // ranks_per_node

        resources = {
            "num_nodes": self.num_nodes_per_eval,
            "ranks_per_node": ranks_per_node,
            "threads_per_rank": threads_per_rank,
            "threads_per_core": 2,
            "node_packing_count": self.num_evals_per_node,
            "cpu_affinity": "depth",
        }

        for key in resources:
            if key in x:
                resources[key] = x[key]

        task = BalsamJob(application=self.appName,
                         args=args,
                         environ_vars=envs,
                         **resources)
        return task
コード例 #2
0
def add_task(point):
    job = BalsamJob(
        application=app_name,
        args=shlex.quote(json.dumps(point, cls=JSONEncoder)),
        num_nodes=1,
        ranks_per_node=1,
    )
    return job
コード例 #3
0
def add_task(point):
    job = BalsamJob(
        application=app_name,
        data={'point': to_encodable(point)},
        num_nodes=1,
        ranks_per_node=1,
    )
    return job
コード例 #4
0
ファイル: dag.py プロジェクト: mgierada/balsam
def add_job(name,
            workflow,
            application,
            description='',
            args='',
            mpi_flags='',
            num_nodes=1,
            ranks_per_node=1,
            cpu_affinity='depth',
            threads_per_rank=1,
            threads_per_core=1,
            environ_vars={},
            data=None,
            save=True,
            **kwargs):
    '''Add a new job to the BalsamJob DB

    Creates a new job and saves it to the database in CREATED state.
    The job is initialized with all blank/default values for its fields; these
    must be configured by the user or provided via ``kwargs``

    Args:
        - ``kwargs`` (*dict*): contains BalsamJob fields (keys) and their values to
          be set on BalsamJob instantiation.

    Returns:
        - ``job`` (*BalsamJob*): the newly-created BalsamJob instance

    Raises:
        - ``ValueError``: if an invalid field name is provided to *kwargs*
    '''
    job = BalsamJob()
    job.name = name
    job.workflow = workflow
    job.application = application
    job.description = description
    job.args = args
    job.mpi_flags = mpi_flags
    job.num_nodes = num_nodes
    job.ranks_per_node = ranks_per_node
    job.threads_per_rank = threads_per_rank
    job.threads_per_core = threads_per_core
    job.cpu_affinity = cpu_affinity
    job.environ_vars = environ_vars
    job.data = data if data else dict()
    job.get_application()

    for k, v in kwargs.items():
        setattr(job, k, v)

    if current_job:
        job.queued_launch = current_job.queued_launch
    if save:
        job.save()
    return job
コード例 #5
0
def create_jobs(N):
    """If we're on a command line, create N tasks to square a number"""
    for i in range(N):
        job = BalsamJob(
            name=f"square{i}",
            workflow="demo-square",
            application="square",
        )
        job.data["x"] = i
        job.save()
    print(f"Created {N} jobs")
コード例 #6
0
def pre_submit(problem, run, workflow):
    """Validate command line; prepare apps"""
    from balsam.core.models import BalsamJob

    validate(problem, run, workflow)
    print("Bootstrapping apps...", end="", flush=True)
    bootstrap_apps()
    print("OK")

    job = BalsamJob(name=workflow, workflow=workflow)
    return job
コード例 #7
0
def new_job(name, workdir, workflow_tag):
    '''Create a new BalsamJob object *without* saving it to DB'''
    return BalsamJob(
        name=name,
        user_workdir=workdir,  # the job will run inside this directory
        workflow=workflow_tag,
        application=APPNAME,
        num_nodes=NNODES,
        ranks_per_node=RPN,
        threads_per_rank=TPR,
        cpu_affinity='depth',
    )
コード例 #8
0
ファイル: init.py プロジェクト: mgierada/balsam
def run_migrations():
    from django.core.management import call_command
    from balsam.django_config.db_index import refresh_db_index
    setup()
    print(f"DB settings:", settings.DATABASES['default'])
    call_command('makemigrations', interactive=True, verbosity=2)
    call_command('migrate', interactive=True, verbosity=2)
    refresh_db_index()
    try:
        from balsam.core.models import BalsamJob
        j = BalsamJob()
        j.save()
        j.delete()
    except:
        print("BalsamJob table not properly created")
        raise
    else:
        print("BalsamJob table created successfully")
コード例 #9
0
    def _create_balsam_task(self, x):
        args = f"'{self.encode(x)}'"
        envs = f"KERAS_BACKEND={self.KERAS_BACKEND}"
        # envs = ":".join(f'KERAS_BACKEND={self.KERAS_BACKEND} OMP_NUM_THREADS=62 KMP_BLOCKTIME=0 KMP_AFFINITY=\"granularity=fine,compact,1,0\"'.split())
        resources = {
            'num_nodes': 1,
            'ranks_per_node': 1,
            'threads_per_rank': 64,
            'node_packing_count': self.WORKERS_PER_NODE,
        }
        for key in resources:
            if key in x:
                resources[key] = x[key]

        task = BalsamJob(application=self.appName,
                         args=args,
                         environ_vars=envs,
                         **resources)
        return task
コード例 #10
0
ファイル: _balsam.py プロジェクト: xclmj/deephyper
    def _create_balsam_task(self, x):
        args = f"'{self.encode(x)}'"
        envs = f"KERAS_BACKEND={self.KERAS_BACKEND}"
        resources = {
            "num_nodes": self.num_nodes_per_eval,
            "ranks_per_node": self.num_ranks_per_node,
            "threads_per_rank": self.num_threads_per_rank,
            "node_packing_count": self.num_evals_per_node
        }

        for key in resources:
            if key in x:
                resources[key] = x[key]

        task = BalsamJob(application=self.appName,
                         args=args,
                         environ_vars=envs,
                         **resources)
        return task
コード例 #11
0
ファイル: dag.py プロジェクト: coreyjadams/balsam
def clone(job, **kwargs):
    assert isinstance(job, BalsamJob)
    new_job = BalsamJob()

    exclude_fields = '''_state objects source state tick user_workdir
    lock state_history job_id'''.split()
    fields = [f for f in job.__dict__ if f not in exclude_fields]

    for f in fields:
        new_job.__dict__[f] = job.__dict__[f]
    assert new_job.pk != job.pk

    for k, v in kwargs.items():
        try:
            field = job._meta.get_field(k)
        except:
            raise ValueError(f"Invalid field name: {k}")
        else:
            new_job.__dict__[k] = v
    return new_job
コード例 #12
0
    RPN,
    TRIALS,
    COMMON_PARAMS,
    BENCHMARK_SCRIPTS,
)
from balsam.core.models import BalsamJob, ApplicationDefinition


RELEASE_PATH = os.environ['RELEASE_PATH']
PYTHON = os.path.join(RELEASE_PATH, 'env', 'bin', 'python')

for script_path in BENCHMARK_SCRIPTS:
    executable = ' '.join((PYTHON, script_path))
    app_name = script_path[script_path.find('osu_') + 4:-3]
    app, created = ApplicationDefinition.objects.get_or_create(
        name=app_name,
        defaults=dict(
            executable=executable,
        )
    )
    for (num_nodes, rpn, trial) in product(NUM_NODES, RPN, TRIALS):
        job = BalsamJob(
            name=f"{num_nodes}nodes.{rpn}rpn.{trial}",
            workflow=f"{app_name}",
            application=app_name,
            num_nodes=num_nodes,
            ranks_per_node=rpn,
            **COMMON_PARAMS,
        )
        job.save()