def test_check_log_updated(result_path): result = Result(path_name=result_path) assert _check_log_updated(result) assert result.log_modified_at is not None # NOTE: getmtime precision is rough, so back 0.1s on purpose modified_at = result.log_modified_at - datetime.timedelta( milliseconds=100) result.log_modified_at = modified_at with open(os.path.join(result_path, 'log'), 'r') as f: logs = json.load(f) logs.append({ "main/loss": 0.04882155358791351, "validation/main/loss": 0.09093106538057327, "iteration": 1800, "elapsed_time": 23.046298027038574, "epoch": 3, "main/accuracy": 0.9839146733283997, "validation/main/accuracy": 0.9726001620292664 }) with open(os.path.join(result_path, 'log'), 'w') as f: json.dump(logs, f) assert _check_log_updated(result) assert result.log_modified_at != modified_at modified_at = result.log_modified_at assert not _check_log_updated(result) assert result.log_modified_at == modified_at os.remove(os.path.join(result_path, 'log')) assert not _check_log_updated(result)
def _register_result(project_id, result_path): result_path = os.path.abspath(result_path) contain_log_file = os.path.isfile(os.path.join(result_path, 'log')) if not contain_log_file: return Result.create(project_id=project_id, path_name=result_path)
def _register_result(project_id, result_path): result_path = os.path.abspath(result_path) contain_log_file = os.path.isfile(os.path.join(result_path, 'log')) if not contain_log_file: return False result_size = DB_SESSION.query(Result).filter_by( path_name=result_path).count() if result_size is 0: Result.create(project_id=project_id, path_name=result_path)
def test_crawl_result_invalid_default_name_file(func_dir): conf_path = os.path.join(func_dir, '.chainerui_conf') with open(conf_path, 'w') as f: f.write('{"default_result_name": "default_name"') # broken JSON result = Result(func_dir) assert result.name is None result2 = crawl_result(result, force=True, commit=False) assert result2.name is None
def _register_result(project_id, result_path): from chainerui import DB_SESSION from chainerui.models.result import Result result_path = os.path.abspath(result_path) contain_log_file = os.path.isfile(os.path.join(result_path, 'log')) if not contain_log_file: return False result_size = DB_SESSION.query(Result).filter_by( path_name=result_path ).count() if result_size is 0: new_result = Result(project_id=project_id, path_name=result_path) DB_SESSION.add(new_result) DB_SESSION.commit()
def post(self, project_id=None): project = db.session.query(Project).filter_by(id=project_id).first() if project is None: return jsonify({ 'project': None, 'message': 'No interface defined for URL.' }), 404 data = request.get_json() result_json = data.get('result') path = result_json.get('pathName', '') if path == '': return jsonify({ 'result': None, 'message': 'Path of the result is not set.' }), 400 result = db.session.query(Result).filter_by(path_name=path).first() if result is not None: return jsonify({ 'result': None, 'message': 'Result path \'%s\' already registered.' % path }), 400 name = result_json.get('name', None) crawlable = result_json.get('crawlable', True) log_modified_at = result_json.get('logModifiedAt', None) if log_modified_at is not None: log_modified_at = datetime.datetime.fromtimestamp(log_modified_at) result = Result.create(path_name=path, name=name, project_id=project_id, log_modified_at=log_modified_at, crawlable=crawlable) # don't return all data to reduce data size return jsonify({'result': {'id': result.id}})
def test_crawl_result_default_name(func_dir): conf_path = os.path.join(func_dir, '.chainerui_conf') chainerui_conf = {'dummy_key': 'default_name'} with open(conf_path, 'w') as f: json.dump(chainerui_conf, f) # basic test is checked on 'test_api.py', so this test checks only # reset logic. result = Result(func_dir) assert result.name is None result2 = crawl_result(result, force=True, commit=False) assert result2.name is None chainerui_conf['default_result_name'] = 'default_name' with open(conf_path, 'w') as f: json.dump(chainerui_conf, f) result3 = crawl_result(result2, force=True, commit=False) assert result3.name == 'default_name' chainerui_conf['default_result_name'] = 'updated_name' with open(conf_path, 'w') as f: json.dump(chainerui_conf, f) result4 = crawl_result(result3, force=True, commit=False) assert result4.name == 'default_name' # not updated
def test_crawl_result_reset(func_dir): # basic test is checked on 'test_api.py', so this test checks only # reset logic. result = Result(func_dir) result.updated_at = datetime.datetime.now() result.logs = [Log({'loss': 0.5}), Log({'loss': 0.2}), Log({'loss': 0.01})] result.commands = [Command('take_sanpshot'), Command('stop')] result.snapshots = [ Snapshot('snapshot_iter_10', 10), Snapshot('snapshot_iter_11', 11)] actual = crawl_result(result, force=True, commit=False) assert len(actual.logs) == 2 assert len(actual.commands) == 1 assert len(actual.snapshots) == 1 open(os.path.join(func_dir, 'snapshot_iter_200'), 'w').close() actual2 = crawl_result(actual, force=True, commit=False) assert len(actual2.logs) == 2 assert len(actual2.commands) == 1 assert len(actual2.snapshots) == 2
def _get_dummy_result(path): r = Result(path_name=path) r.id = 0 return r
def _get_dummy_result(self): r = Result(path_name=self._dir) r.id = 0 return r