예제 #1
0
파일: views.py 프로젝트: arsengit/aimui
    def get(self):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        repo = project.repo
        metrics = set()
        params = {}

        for exp_name in repo.list_branches():
            for run_hash in repo.list_branch_commits(exp_name):
                run = repo.select_run_metrics(exp_name, run_hash)
                if run is not None:
                    run_params = copy.deepcopy(run.params)
                    if run_params is None or len(run_params) == 0:
                        continue
                    if '__METRICS__' in run_params:
                        del run_params['__METRICS__']
                    dump_dict_values(run_params, {})
                    params = deep_merge(params, run_params)
                    for m in run.metrics.keys():
                        metrics.add(m)

        dump_dict_values(params, True)

        return jsonify({
            'params': params,
            'metrics': list(metrics),
        })
예제 #2
0
파일: views.py 프로젝트: jialin-wu-02/aimde
    def get(self, insight_name):
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        commits = get_commits(insight_name)

        return jsonify(commits)
예제 #3
0
    def get(self):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        return jsonify({
            'branches': project.repo.list_branches(),
        })
예제 #4
0
    def get(self):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        return jsonify({
            'name': project.name,
            'path': project.path,
            'tf_enabled': project.tf_enabled,
            'description': project.description,
            'branches': project.repo.list_branches(),
        })
예제 #5
0
파일: views.py 프로젝트: jialin-wu-02/aimde
    def get(self):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        # Get project branches list
        project_path = '/store'
        project_branches = get_project_branches(project_path)

        return jsonify({
            'name': project.name,
            'description': project.description,
            'branches': project_branches,
        })
예제 #6
0
    def post(self, experiment, commit_hash):
        # Get project
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        if project.repo.is_archived(experiment, commit_hash):
            project.repo.unarchive(experiment, commit_hash)
            return jsonify({
                'archived': False,
            })
        else:
            project.repo.archive(experiment, commit_hash)
            return jsonify({
                'archived': True,
            })
예제 #7
0
    def get(self):
        # Get project
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        raw_expression = request.args.get('q').strip()

        if 'run.archived' not in raw_expression:
            default_expression = 'run.archived is not True'
        else:
            default_expression = None

        if raw_expression:
            try:
                parser = Expression()
                parser.parse(raw_expression)
            except Diagnostic as d:
                parser_error_logs = d.logs or []
                for error_log in reversed(parser_error_logs):
                    if not isinstance(error_log, Notification):
                        continue
                    if error_log.severity != Severity.ERROR:
                        continue
                    error_location = error_log.location
                    if error_location:
                        return make_response(jsonify({
                            'type': 'parse_error',
                            'statement': raw_expression,
                            'location': error_location.col,
                        }), 403)
                return make_response(jsonify({}), 403)
            except Exception:
                return make_response(jsonify({}), 403)

        runs = project.repo.select_runs(raw_expression, default_expression)

        serialized_runs = []
        for run in runs:
            serialized_runs.append(run.to_dict())

        return jsonify({
            'runs': serialized_runs,
        })
예제 #8
0
    def get(self):
        expression = request.args.get('q').strip()

        if 'run.archived' not in expression:
            default_expression = 'run.archived is not True'
        else:
            default_expression = None

        # Get project
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        runs = project.repo.select_runs(expression, default_expression)

        serialized_runs = []
        for run in runs:
            serialized_runs.append(run.to_dict())

        return jsonify({
            'runs': serialized_runs,
        })
예제 #9
0
    def get(self, experiment_name, commit_id, file_path):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        objects_dir_path = os.path.join('/store', experiment_name, commit_id,
                                        'objects')

        file_path = os.path.join(*file_path.split('+')) + '.log'
        dist_abs_path = os.path.join(objects_dir_path, file_path)

        if not os.path.isfile(dist_abs_path):
            return make_response(jsonify({}), 404)

        # Read file specified by found path
        try:
            obj_data_content = read_artifact_log(dist_abs_path, 500)
            comp_content = list(map(lambda x: json.loads(x), obj_data_content))
            return comp_content
        except:
            return []
예제 #10
0
파일: views.py 프로젝트: arsengit/aimui
    def get(self):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        last_synced_run = db.session\
            .query(func.max(Commit.session_started_at),
                   func.max(Commit.session_closed_at))\
            .first()
        last_synced_run_time = max(last_synced_run[0] or 0, last_synced_run[1]
                                   or 0)

        modified_runs = project.get_modified_runs(last_synced_run_time)
        upgrade_runs_table(project, modified_runs)

        all_runs = db.session\
            .query(Commit.hash,
                   Commit.experiment_name,
                   Commit.session_started_at)\
            .filter(Commit.session_started_at > 0)\
            .all()

        experiments = {r.experiment_name for r in all_runs}
        activity_counter = Counter([
            datetime.fromtimestamp(r.session_started_at,
                                   pytz.timezone(
                                       request.tz)).strftime('%Y-%m-%d')
            if r.session_started_at > 0 else 0 for r in all_runs
        ])

        return jsonify({
            'num_experiments': len(experiments),
            'num_runs': len(all_runs),
            'activity_map': dict(activity_counter),
        })
예제 #11
0
    def get(self):
        search_statement = request.args.get('q').strip()

        # TODO: get from request
        steps_num = 50

        runs = []

        # Parse statement
        try:
            parser = Statement()
            parsed_stmt = parser.parse(search_statement.strip())
        except:
            return make_response(jsonify({}), 403)

        statement_select = parsed_stmt.node['select']
        statement_expr = parsed_stmt.node['expression']

        aim_runs, tf_logs = separate_select_statement(statement_select)

        if 'run.archived' not in search_statement:
            default_expression = 'run.archived is not True'
        else:
            default_expression = None

        # Get project
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        aim_metrics = project.repo.select_metrics(aim_runs, statement_expr,
                                                  default_expression)
        if aim_metrics and len(aim_metrics):
            runs += aim_metrics

        # Get tf.summary logs
        if len(tf_logs) > 0:
            try:
                tf_runs = select_tf_summary_scalars(tf_logs, statement_expr)
                if tf_runs and len(tf_runs):
                    runs += tf_runs
            except:
                pass

        # Get the longest trace length
        max_num_records = 0
        for run in runs:
            if is_tf_run(run):
                for metric in run['metrics']:
                    for trace in metric['traces']:
                        if trace['num_steps'] > max_num_records:
                            max_num_records = trace['num_steps']
            else:
                run.open_storage()
                for metric in run.metrics.values():
                    try:
                        metric.open_artifact()
                        for trace in metric.traces:
                            if trace.num_records > max_num_records:
                                max_num_records = trace.num_records
                    except:
                        pass
                    finally:
                        pass
            #         metric.close_artifact()
            # run.close_storage()

        # Scale all traces
        steps = scale_trace_steps(max_num_records, steps_num)

        # Retrieve records
        for run in runs:
            if is_tf_run(run):
                for metric in run['metrics']:
                    for trace in metric['traces']:
                        trace_range = range(len(
                            trace['data']))[steps.start:steps.stop:steps.step]
                        trace_scaled_data = []
                        for i in trace_range:
                            trace_scaled_data.append(trace['data'][i])
                        trace['data'] = trace_scaled_data
            else:
                # run.open_storage()
                for metric in run.metrics.values():
                    try:
                        # metric.open_artifact()
                        for trace in metric.traces:
                            for r in trace.read_records(steps):
                                base, metric_record = MetricRecord.deserialize(
                                    r)
                                trace.append((
                                    metric_record.value,  # 0 => value
                                    base.step,  # 1 => step
                                    (
                                        base.epoch
                                        if base.has_epoch  # 2 => epoch
                                        else None),  #
                                    base.timestamp,  # 3 => time
                                ))
                    except:
                        pass
                    finally:
                        metric.close_artifact()
                run.close_storage()

        runs_list = []
        for run in runs:
            if not is_tf_run(run):
                runs_list.append(run.to_dict())
            else:
                runs_list.append(run)

        return jsonify({
            'runs': runs_list,
        })
예제 #12
0
    def get(self, experiment_name, commit_id):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        dir_path = os.path.join('/store', experiment_name)

        # Check if experiment exists
        if not os.path.isdir(dir_path):
            return jsonify({
                'init': True,
                'branch_init': False,
            })

        # Get commits
        commits = get_branch_commits(dir_path)

        # Get specified commit
        commit = None
        if commit_id == 'latest':
            for commit_item, config in commits.items():
                if commit is None or config['date'] > commit['date']:
                    commit = config
        else:
            commit = commits.get(commit_id)

        if not commit:
            return jsonify({
                'init': True,
                'branch_init': True,
                'branch_empty': True,
            })

        if 'process' in commit.keys():
            if not commit['process']['finish']:
                if commit['process'].get('start_date'):
                    duration = time.time() - commit['process']['start_date']
                    commit['process']['time'] = duration
                else:
                    commit['process']['time'] = None
            elif commit['process'].get('start_date') is not None \
                    and commit['process'].get('finish_date') is not None:
                commit['process']['time'] = commit['process']['finish_date'] \
                                            - commit['process']['start_date']

        objects_dir_path = os.path.join(dir_path, commit['hash'], 'objects')
        meta_file_path = os.path.join(objects_dir_path, 'meta.json')

        # Read meta file content
        try:
            with open(meta_file_path, 'r+') as meta_file:
                meta_file_content = json.loads(meta_file.read())
        except:
            meta_file_content = {}

        # Get all artifacts(objects) listed in the meta file
        metric_objects = []
        model_objects = []
        dir_objects = []
        map_objects = []
        stats_objects = []

        # Limit distributions
        for obj_key, obj in meta_file_content.items():
            if obj['type'] == 'dir':
                dir_objects.append({
                    'name': obj['name'],
                    'cat': obj['cat'],
                    'data': obj['data'],
                    'data_path': obj['data_path'],
                })
            elif obj['type'] == 'models':
                model_file_path = os.path.join(objects_dir_path, 'models',
                                               '{}.aim'.format(obj['name']))
                model_file_size = os.stat(model_file_path).st_size
                model_objects.append({
                    'name': obj['name'],
                    'data': obj['data'],
                    'size': model_file_size,
                })
            elif (obj['type'] == 'metrics'
                  and obj['data_path'] != '__AIMRECORDS__') or \
                    ('map' in obj['type'] or obj['type'] == 'map'):
                # obj['type'] == 'distribution':
                # Get object's data file path
                obj_data_file_path = os.path.join(objects_dir_path,
                                                  obj['data_path'], obj_key)

                # Incompatible version
                if obj_key.endswith('.json'):
                    return make_response(jsonify({}), 501)

            if obj['type'] == 'metrics':
                steps = 75
                run = project.repo.select_run_metrics(experiment_name,
                                                      commit['hash'],
                                                      obj['name'])
                if run is not None and run.metrics.get(obj['name']) \
                        and len(run.metrics[obj['name']].traces):
                    metric = run.metrics[obj['name']]
                    run.open_storage()
                    metric.open_artifact()
                    traces = []
                    for trace in metric.traces:
                        num = trace.num_records
                        step = num // steps or 1
                        for r in trace.read_records(slice(0, num, step)):
                            base, metric_record = MetricRecord.deserialize(r)
                            trace.append((
                                base.step,  # 0 => step
                                metric_record.value,  # 1 => value
                            ))
                        traces.append(trace.to_dict())
                    metric.close_artifact()
                    run.close_storage()
                else:
                    traces = []

                metric_objects.append({
                    'name': obj['name'],
                    'mode': 'plot',
                    'traces': traces,
                })
            elif 'map' in obj['type'] or obj['type'] == 'map':
                try:
                    params_str = read_artifact_log(obj_data_file_path, 1)
                    if params_str:
                        map_objects.append({
                            'name':
                            obj['name'],
                            'data':
                            json.loads(params_str[0]),
                            'nested':
                            'nested_map' in obj['type']
                        })
                except:
                    pass

        # Return found objects
        return jsonify({
            'init': True,
            'branch_init': True,
            'branch_empty': False,
            'commit': commit,
            'commits': commits,
            'metrics': metric_objects,
            'models': model_objects,
            'dirs': dir_objects,
            'maps': map_objects,
            'stats': stats_objects,
        })
예제 #13
0
파일: views.py 프로젝트: jialin-wu-02/aimde
    def get(self, experiment_name, commit_id):
        project = Project()

        if not project.exists():
            return make_response(jsonify({}), 404)

        dir_path = os.path.join('/store', experiment_name)

        # Check if experiment exists
        if not os.path.isdir(dir_path):
            return jsonify({
                'init': True,
                'branch_init': False,
            })

        # Get commits
        commits = get_branch_commits(dir_path)

        # Get specified commit
        commit = None
        if commit_id == 'latest':
            for commit_item, config in commits.items():
                if commit is None or config['date'] > commit['date']:
                    commit = config
        elif commit_id == 'index':
            commit = {
                'hash': 'index',
                'date': time.time(),
                'index': True,
            }
        else:
            commit = commits.get(commit_id)

        if not commit:
            return make_response(jsonify({}), 404)

        objects_dir_path = os.path.join(dir_path, commit['hash'], 'objects')
        meta_file_path = os.path.join(objects_dir_path, 'meta.json')

        # Read meta file content
        try:
            with open(meta_file_path, 'r+') as meta_file:
                meta_file_content = json.loads(meta_file.read())
        except:
            meta_file_content = {}

        if commit['hash'] == 'index' and len(meta_file_content) == 0:
            return jsonify({
                'init': True,
                'branch_init': True,
                'index_empty': True,
                'commit': commit,
                'commits': commits,
            })

        # Get all artifacts(objects) listed in the meta file
        metric_objects = []
        model_objects = []
        dir_objects = []
        map_objects = []
        stats_objects = []

        records_storage = Storage(objects_dir_path, 'r')

        # Limit distributions
        for obj_key, obj in meta_file_content.items():
            if obj['type'] == 'dir':
                dir_objects.append({
                    'name': obj['name'],
                    'cat': obj['cat'],
                    'data': obj['data'],
                    'data_path': obj['data_path'],
                })
            elif obj['type'] == 'models':
                model_file_path = os.path.join(objects_dir_path, 'models',
                                               '{}.aim'.format(obj['name']))
                model_file_size = os.stat(model_file_path).st_size
                model_objects.append({
                    'name': obj['name'],
                    'data': obj['data'],
                    'size': model_file_size,
                })
            elif (obj['type'] == 'metrics' and obj['data_path'] != '__AIMRECORDS__') or \
                    obj['type'] == 'map':
                # obj['type'] == 'distribution':
                # Get object's data file path
                obj_data_file_path = os.path.join(objects_dir_path,
                                                  obj['data_path'], obj_key)

                # Incompatible version
                if obj_key.endswith('.json'):
                    return make_response(jsonify({}), 501)

            if obj['type'] == 'metrics':
                comp_content = []
                if obj['data_path'] == '__AIMRECORDS__':
                    format = 'aimrecords'
                    records_storage.open(obj['name'],
                                         uncommitted_bucket_visible=True)
                    for r in records_storage.read_records(
                            obj['name'], slice(-1000, None)):
                        base, metric_record = Metric.deserialize(r)
                        comp_content.append(metric_record.value)
                    records_storage.close(obj['name'])
                else:
                    format = 'json_log'
                    obj_data_content = read_artifact_log(
                        obj_data_file_path, 1000)
                    comp_content = list(
                        map(lambda x: float(x), obj_data_content))
                metric_objects.append({
                    'name': obj['name'],
                    'mode': 'plot',
                    'data': comp_content,
                    'format': format,
                })
            elif obj['type'] == 'map':
                try:
                    params_str = read_artifact_log(obj_data_file_path, 1)
                    if params_str:
                        map_objects.append({
                            'name': obj['name'],
                            'data': json.loads(params_str[0]),
                        })
                except:
                    pass

        records_storage.close()

        # Return found objects
        return jsonify({
            'init': True,
            'branch_init': True,
            'commit': commit,
            'commits': commits,
            'metrics': metric_objects,
            'models': model_objects,
            'dirs': dir_objects,
            'maps': map_objects,
            'stats': stats_objects,
        })
예제 #14
0
    def get(self):
        try:
            steps_num = int(request.args.get('p').strip())
        except:
            steps_num = 50

        # Get project
        project = Project()
        if not project.exists():
            return make_response(jsonify({}), 404)

        search_statement = request.args.get('q').strip()

        # Parse statement
        try:
            parser = Statement()
            parsed_stmt = parser.parse(search_statement.strip())
        except Diagnostic as d:
            parser_error_logs = d.logs or []
            for error_log in reversed(parser_error_logs):
                if not isinstance(error_log, Notification):
                    continue
                if error_log.severity != Severity.ERROR:
                    continue
                error_location = error_log.location
                if error_location:
                    return make_response(jsonify({
                        'type': 'parse_error',
                        'statement': search_statement,
                        'location': error_location.col,
                    }), 403)
            return make_response(jsonify({}), 403)
        except Exception:
            return make_response(jsonify({}), 403)

        statement_select = parsed_stmt.node['select']
        statement_expr = parsed_stmt.node['expression']

        aim_select, tf_logs = separate_select_statement(statement_select)

        if 'run.archived' not in search_statement:
            default_expression = 'run.archived is not True'
        else:
            default_expression = None

        aim_select_result = project.repo.select(aim_select,
                                                statement_expr,
                                                default_expression)

        (
            aim_selected_runs,
            aim_selected_params,
            aim_selected_metrics,
        ) = (
            aim_select_result.runs,
            aim_select_result.get_selected_params(),
            aim_select_result.get_selected_metrics_context()
        )

        aim_selected_runs.sort(key=lambda r: r.config.get('date'), reverse=True)

        response = {
            'runs': [],
            'params': [],
            'agg_metrics': {},
            'meta': {
                'tf_selected': False,
                'params_selected': False,
                'metrics_selected': False,
            },
        }

        retrieve_traces = False
        retrieve_agg_metrics = False

        if len(aim_selected_params):
            response['meta']['params_selected'] = True
            response['params'] = aim_selected_params
            if len(aim_selected_metrics):
                response['meta']['metrics_selected'] = True
                response['agg_metrics'] = aim_selected_metrics
                retrieve_agg_metrics = True
        elif len(aim_selected_metrics):
            response['meta']['metrics_selected'] = True
            retrieve_traces = True

        runs = []

        if aim_selected_runs and len(aim_selected_runs):
            runs += aim_selected_runs
        if len(tf_logs) > 0:
            if not retrieve_traces:
                # TODO: aggregate tf logs and return aggregated values
                response['meta']['tf_selected'] = True
                pass
            else:
                try:
                    tf_runs = select_tf_summary_scalars(tf_logs, statement_expr)
                    if tf_runs and len(tf_runs):
                        runs += tf_runs
                except:
                    pass
                else:
                    response['meta']['tf_selected'] = True

        if retrieve_traces:
            # Get the longest trace length
            max_num_records = 0
            for run in runs:
                if is_tf_run(run):
                    for metric in run['metrics']:
                        for trace in metric['traces']:
                            if trace['num_steps'] > max_num_records:
                                max_num_records = trace['num_steps']
                else:
                    run.open_storage()
                    for metric in run.metrics.values():
                        try:
                            metric.open_artifact()
                            for trace in metric.traces:
                                if trace.num_records > max_num_records:
                                    max_num_records = trace.num_records
                        except:
                            pass
                        finally:
                            pass
                            # metric.close_artifact()
                    # run.close_storage()

            # Scale all traces
            steps = scale_trace_steps(max_num_records, steps_num)

            # Retrieve records
            for run in runs:
                if is_tf_run(run):
                    for metric in run['metrics']:
                        for trace in metric['traces']:
                            trace_range = range(len(trace['data']))[steps.start:
                                                                    steps.stop:
                                                                    steps.step]
                            trace_scaled_data = []
                            for i in trace_range:
                                trace_scaled_data.append(trace['data'][i])
                            trace['data'] = trace_scaled_data
                else:
                    # run.open_storage()
                    for metric in run.metrics.values():
                        try:
                            # metric.open_artifact()
                            for trace in metric.traces:
                                for r in trace.read_records(steps):
                                    base, metric_record = MetricRecord.deserialize(r)
                                    trace.append((
                                        metric_record.value,  # 0 => value
                                        base.step,  # 1 => step
                                        (base.epoch if base.has_epoch else None), # 2 => epoch
                                        base.timestamp,  # 3 => time
                                    ))
                        except:
                            pass
                        finally:
                            metric.close_artifact()
                    run.close_storage()

        if retrieve_agg_metrics:
            # TODO: Retrieve and return aggregated metrics
            pass

        runs_list = []
        for run in runs:
            if not is_tf_run(run):
                runs_list.append(run.to_dict(include_only_selected_agg_metrics=True))
            else:
                runs_list.append(run)

        response['runs'] = runs_list

        return response