def put(self, id, project_id=None): """put.""" result = DB_SESSION.query(Result).filter_by(id=id).first() if result is None: response = jsonify({ 'result': None, 'message': 'No interface defined for URL.' }) return response, 404 request_json = request.get_json() request_result = request_json.get('result') name = request_result.get('name', None) if name is not None: result.name = name is_unregistered = request_result.get('isUnregistered', None) if is_unregistered is not None: result.is_unregistered = is_unregistered DB_SESSION.add(result) DB_SESSION.commit() return jsonify({'result': result.serialize})
def create(cls, result_id=None, summary=None, file_modified_at=None): """Initialize an instance and save it to db.""" asset = cls(result_id, summary, file_modified_at) DB_SESSION.add(asset) DB_SESSION.commit() return asset
def create(cls, path_name=None, name=None): """initialize an instance and save it to db.""" project = cls(path_name, name) DB_SESSION.add(project) DB_SESSION.commit() collect_results(project, force=True)
def delete(self, id, project_id=None): """delete.""" result = DB_SESSION.query(Result).filter_by(id=id).first() if result is None: response = jsonify({ 'result': None, 'message': 'No interface defined for URL.' }) return response, 404 DB_SESSION.delete(result) DB_SESSION.commit() return jsonify({'result': result.serialize})
def create(cls, path_name=None, name=None, project_id=None, log_modified_at=None): """Initialize an instance and save it to db.""" result = cls(path_name, name, project_id, log_modified_at) DB_SESSION.add(result) DB_SESSION.commit() crawl_result(result, True) return result
def delete(self, id): """delete.""" project = DB_SESSION.query(Project).filter_by(id=id).first() if project is None: response = jsonify({ 'projects': None, 'message': 'No interface defined for URL.' }) return response, 404 DB_SESSION.delete(project) DB_SESSION.commit() return jsonify({'project': project.serialize})
def collect_results(project, force=False): """collect_results.""" now = datetime.datetime.now() if (now - project.updated_at).total_seconds() < 4 and (not force): return project result_paths = [] if os.path.isdir(project.path_name): result_paths.extend(_list_result_paths(project.path_name)) for result_path in result_paths: _register_result(project.id, result_path) project.updated_at = datetime.datetime.now() DB_SESSION.commit()
def _register_result(project_id, result_path): from chainerui import DB_SESSION from chainerui.models.result import Result result_path = os.path.abspath(result_path) contain_log_file = os.path.isfile(os.path.join(result_path, 'log')) if not contain_log_file: return False result_size = DB_SESSION.query(Result).filter_by( path_name=result_path ).count() if result_size is 0: new_result = Result(project_id=project_id, path_name=result_path) DB_SESSION.add(new_result) DB_SESSION.commit()
def crawl_result(result_id, force=False): """crawl_results.""" current_result = DB_SESSION.query(Result).filter_by(id=result_id).first() now = datetime.datetime.now() if (not force) and (now - current_result.updated_at).total_seconds() < 4: return current_result # if log file is not updated, not necessary to get log contents is_updated = _check_log_updated(current_result) crawled_result = crawl_result_path(current_result.path_name, is_updated) if is_updated: current_log_idx = len(current_result.logs) if len(crawled_result['logs']) < current_log_idx: current_log_idx = 0 current_result.logs = [] current_result.args = None for log in crawled_result['logs'][current_log_idx:]: current_result.logs.append(Log(log)) if current_result.args is None: current_result.args = Argument(json.dumps(crawled_result['args'])) current_result.commands = [] current_result.snapshots = [] for cmd in crawled_result['commands']: current_result.commands.append(cmd.to_model()) for snapshot in crawled_result['snapshots']: number_str = snapshot.split('snapshot_iter_')[1] if is_numberable(number_str): current_result.snapshots.append(Snapshot(snapshot, int(number_str))) current_result.updated_at = datetime.datetime.now() DB_SESSION.commit() return current_result
def crawl_result(result_id, force=None): """crawl_results.""" current_result = DB_SESSION.query(Result).filter_by(id=result_id).first() now = datetime.datetime.now() if force is None and (now - current_result.updated_at).total_seconds() < 4: return current_result crawled_result = crawl_result_path(current_result.path_name) need_reset = len(crawled_result['logs']) < len(current_result.logs) if need_reset: current_result.logs = [] current_result.args = None current_result.commands = [] current_result.snapshots = [] for log in crawled_result['logs'][len(current_result.logs):]: current_result.logs.append(Log(json.dumps(log))) if current_result.args is None: current_result.args = Argument(json.dumps(crawled_result['args'])) for cmd in crawled_result['commands'][len(current_result.commands):]: current_result.commands.append(cmd.to_model()) for snapshot in crawled_result['snapshots'][len(current_result.snapshots ):]: number_str = snapshot.split('snapshot_iter_')[1] if is_numberable(number_str): current_result.snapshots.append(Snapshot(snapshot, int(number_str))) current_result.updated_at = datetime.datetime.now() DB_SESSION.commit() return current_result
def collect_images(result, assets, force=False): """collect images from meta file Collecting images only when the metafile is updated. If number of images are decreased, assets are reset and re-collect the images. """ path_name = result.path_name info_path = os.path.join(path_name, '.chainerui_images') start_idx = len(assets) if not os.path.isfile(info_path): return assets file_modified_at = datetime.datetime.fromtimestamp( os.path.getmtime(info_path)) if start_idx > 0: if assets[-1].file_modified_at == file_modified_at: return assets with open(info_path, 'r') as f: info_list = json.load(f, object_pairs_hook=OrderedDict) if len(info_list) < start_idx: start_idx = 0 assets = [] for base_info in info_list[start_idx:]: image_path = base_info.pop('images') asset = Asset.create(result_id=result.id, summary=base_info, file_modified_at=file_modified_at) for key, path in image_path.items(): with open(os.path.join(path_name, path), 'rb') as f: data = f.read() content = Bindata(asset_id=asset.id, name=path, tag=key, content=data) asset.content_list.append(content) assets.append(asset) DB_SESSION.commit() return assets
def put(self, id): """put.""" project = DB_SESSION.query(Project).filter_by(id=id).first() if project is None: return jsonify({ 'project': None, 'message': 'No interface defined for URL.' }), 404 request_project = request.get_json().get('project') project_name = request_project.get('name', None) if project_name is not None: project.name = project_name DB_SESSION.add(project) DB_SESSION.commit() return jsonify({'project': project.serialize})