Пример #1
0
 def test_set_tag_when_in_job_sets_tag(self):
     self.foundations_job.job_id = self.job_id
     set_tag(self.random_tag, self.random_tag_value)
     self.message_router.push_message.assert_called_with(
         "job_tag",
         {
             "job_id": self.job_id,
             "key": self.random_tag,
             "value": self.random_tag_value,
         },
     )
    def _set_tags(klass, job_name, tags):
        from foundations_contrib.global_state import current_foundations_job
        from foundations import set_tag

        foundations_job = current_foundations_job()
        foundations_job.job_id = job_name

        if tags is not None:
            for key, value in tags.items():
                set_tag(key, value)

        foundations_job.job_id = None
    def _set_tags(klass, job_name, tags):
        from foundations_contrib.global_state import current_foundations_context
        from foundations import set_tag

        pipeline_context = current_foundations_context().pipeline_context()
        pipeline_context.file_name = job_name

        if tags is not None:
            for key, value in tags.items():
                set_tag(key, value)

        pipeline_context.file_name = None
Пример #4
0
import foundations
from foundations import set_tag
from foundations_contrib.global_state import current_foundations_job

from model import *

set_tag('model', 'cnn')


def print_words():
    print(f'Job \'{current_foundations_job().job_id}\' deployed')
    print('Hello World!')


print_words()

addition_result = add(82, 2)
set_tag('Loss', addition_result)

subtraction_result = subtract(44, 2)
foundations.log_metric('Accuracy', subtraction_result)
Пример #5
0
import foundations

foundations.log_metric('key', 'value')
foundations.set_tag('key', value='value')
foundations.log_param('param', 'param_value')
print('Hello World!')
Пример #6
0
import os

import foundations
from foundations_contrib.global_state import current_foundations_context, message_router
from foundations_events.producers.jobs import RunJob

foundations.set_project_name('default')

job_id = os.environ['ACCEPTANCE_TEST_JOB_ID']
pipeline_context = current_foundations_context().pipeline_context()
pipeline_context.file_name = job_id

RunJob(message_router, pipeline_context).push_message()

foundations.set_tag('model type', 'simple mlp')
foundations.set_tag('data set', 'out of time')
foundations.set_tag('what I was doing,', 'drinking tea')

print('Hello World!')
Пример #7
0
def set_tensorboard_logdir(path):
    import atexit
    import foundations

    atexit.register(_create_tensorboard_logdir(path))
    foundations.set_tag('tf', 'tf')
Пример #8
0
import foundations

foundations.set_tag("Str", "")
foundations.set_tag("Int", "")
foundations.set_tag("Float", "")
foundations.set_tag("None", "")
Пример #9
0
import foundations

foundations.log_metric('hello', 20)
foundations.set_tag('this_tag', value='this_value')
foundations.set_tag('that_tag', value='that_value')
Пример #10
0
from utils import parse_and_override_params

import foundations

# Fix random seed
torch.manual_seed(0)
np.random.seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

params = foundations.load_parameters()

data_dict = parse_and_override_params(params)

# Set job tags to easily spot data in use
foundations.set_tag(
    f'{data_dict[params["train_data"]]}: {params["train_data"]}')
# foundations.set_tag(f'big {params["train_data"]}')

print('Creating datasets')
# Get dataloaders
train_dl, val_base_dl, val_augment_dl, display_dl_iter = create_dataloaders(
    params)

print('Creating loss function')
# Loss function
criterion = nn.CrossEntropyLoss()

print('Creating model')
# Create model, freeze layers and change last layer
model = create_model(bool(params['use_hidden_layer']), params['dropout'])
_ = print_model_params(model)