Ejemplo n.º 1
0
def api_project_close(project_id):
    project = AnnotationProject.query.filter_by(id=project_id).first()
    if not project or project.is_active is False:
        return '', http.HTTPStatus.NOT_MODIFIED
    else:
        for doc_status in project.doc_statuses:
            results = AnnotationResult.query.filter_by(status_id=doc_status.id).all()
            results_json = {}
            for result in results:
                results_json[result.id] = json.loads(result.result_json)
            if len(results_json) != 0:
                Document.add_results(doc_status.doc_id, results_json)
                DocStatus.close(doc_status.id)
        AnnotationProject.deactivate(project_id)
        return '', http.HTTPStatus.OK
Ejemplo n.º 2
0
    def test_run_command_not_ec2(self, use_case: mock.Mock):
        client = APIClient()
        user_model = UserModel.objects.get(email="test_email")
        client.force_authenticate(user=user_model)

        # Company1のIDを取得
        tenant_id = TenantModel.objects.get(tenant_name="test_tenant_users_in_tenant_1").id

        # AWS環境のIDを取得
        aws_id = AwsEnvironmentModel.objects.get(aws_account_id="test_aws1").id

        run_command = use_case.return_value.run_command
        run_command.return_value = Command(
            Document("document_name", [Parameter(key="param", value="value")]),
            Ec2("ap-northeast-1", "i-123456789012")
        )

        # 検証対象の実行
        response = client.post(
            path=self.api_path.format(tenant_id, aws_id).replace("ec2", "rds") + "run_command/",
            data=dict(
                name="document_name",
                parameters=[dict(key="param", value="value")]
            ),
            format='json')

        run_command.assert_not_called()
        self.assertEqual(response.status_code, 400)
Ejemplo n.º 3
0
def one_split(db, idx, sanity_data):
    # Insert dataset
    dataset = Dataset(name="ALG_FACT"+str(idx))
    db.session.add(dataset)
    db.session.commit()

    summaries_path = os.path.join(dataset_path, 'summaries')
    documents_path = os.path.join(dataset_path, 'documents')
    for doc_id in sanity_data:
        file_name = doc_id + ".data"
        file_path = os.path.join(documents_path, file_name)
        summ_path = os.path.join(summaries_path, file_name)
        with open(summ_path, 'r') as infile:
            summ_json = json.load(infile)
        with open(file_path, 'r') as infile:
            json_result = json.load(infile)
            did = json_result['doc_id']
            for i, item in enumerate(summ_json):
                if item['name'].find("|||") == -1:
                    continue
                if example_filter(item['text']):
                    continue
                document = Document(
                    dataset_id=dataset.id,
                    doc_id=json_result['doc_id'],
                    doc_json=json.dumps(json_result),
                    summary=json.dumps(item),
                    sanity_statement=sanity_data[did]["sanity_statement"],
                    sanity_answer=sanity_data[did]["sanity_answer"]
                )
                db.session.add(document)
                db.session.commit()
Ejemplo n.º 4
0
def init_database(db):
    # user = User(email='admin@localhost', password='******')
    # db.session.add(user)
    # db.session.commit()
    # dataset_path = '/home/acp16hh/Projects/Research/Experiments/Exp_Elly_Human_Evaluation/src/Mock_Dataset_2/BBC_Sample'
    dataset_path = '/home/acp16hh/Projects/Research/Experiments/Exp_Elly_Human_Evaluation/src/Mock_Dataset_2/BBC'
    dataset_name = os.path.split(dataset_path)[1]

    summaries_path = os.path.join(dataset_path, 'summaries')
    documents_path = os.path.join(dataset_path, 'documents')

    # Existing dataset
    dataset = db.session.query(Dataset).filter_by(name='BBC').first()
    # Insert dataset
    # dataset = Dataset(name=dataset_name)
    # db.session.add(dataset)
    # db.session.commit()

    # Insert documents
    for file in os.listdir(documents_path):
        file_path = os.path.join(documents_path, file)
        with open(file_path, 'r') as infile:
            json_result = json.load(infile)
            document = Document(
                dataset_id=dataset.id,
                doc_id=json_result['doc_id'],
                doc_json=json.dumps(json_result)
            )
            db.session.add(document)
            db.session.commit()

    # Insert Summaries
    for folder in os.listdir(summaries_path):
        if 'topic' not in folder:
            continue
        if folder.startswith('ref'):
            summary_group = SummaryGroup(name='%s_ref_%s' % (dataset_name, folder[4:]),
                                         dataset_id=dataset.id, is_ref=True)
        elif folder.startswith('system'):
            summary_group = SummaryGroup(name='%s_system_%s' % (dataset_name, folder[7:]),
                                         dataset_id=dataset.id, is_ref=False)
        else:
            break
        db.session.add(summary_group)
        db.session.commit()
        ref_path = os.path.join(summaries_path, folder)
        for file in os.listdir(ref_path):
            with open(os.path.join(ref_path, file), 'r') as infile:
                text = ' '.join(infile.readlines()).strip()
                document = db.session.query(Document).filter_by(doc_id=os.path.splitext(file)[0]).first()
                summary = Summary(
                    doc_id=document.id,
                    text=text,
                    summary_group_id=summary_group.id
                )
                db.session.add(summary)
                db.session.commit()
def init_database(db):
    # user = User(email='admin@localhost', password='******')
    # db.session.add(user)
    # db.session.commit()
    dataset_path = '../backend/BBC'
    dataset_name = os.path.split(dataset_path)[1]

    summaries_path = os.path.join(dataset_path, 'summaries')
    documents_path = os.path.join(dataset_path, 'documents')

    # Existing dataset
    #dataset = db.session.query(Dataset).filter_by(name='BBC').first()
    # Insert dataset
    dataset = Dataset(name="BBC_test")
    db.session.add(dataset)
    db.session.commit()

    # Insert documents
    for file in os.listdir(documents_path):
        file_path = os.path.join(documents_path, file)
        with open(file_path, 'r') as infile:
            json_result = json.load(infile)
            document = Document(dataset_id=dataset.id,
                                doc_id=json_result['doc_id'],
                                doc_json=json.dumps(json_result),
                                summary="aaaaaaa")
            db.session.add(document)
            db.session.commit()

    # Insert Summaries
    for folder in os.listdir(summaries_path):
        if folder.startswith('ref'):
            summary_group = SummaryGroup(name='%s_ref_%s' %
                                         (dataset_name, folder[4:]),
                                         dataset_id=dataset.id,
                                         is_ref=True)
        elif folder.startswith('system'):
            summary_group = SummaryGroup(name='%s_system_%s' %
                                         (dataset_name, folder[7:]),
                                         dataset_id=dataset.id,
                                         is_ref=False)
        else:
            break
        db.session.add(summary_group)
        db.session.commit()
        ref_path = os.path.join(summaries_path, folder)
        for file in os.listdir(ref_path):
            with open(os.path.join(ref_path, file), 'r') as infile:
                text = ' '.join(infile.readlines()).strip()
                document = db.session.query(Document).filter_by(
                    doc_id=os.path.splitext(file)[0]).first()
                summary = Summary(doc_id=document.id,
                                  text=text,
                                  summary_group_id=summary_group.id)
                db.session.add(summary)
                db.session.commit()
Ejemplo n.º 6
0
    def describe_document(self, document_name):
        response = self.client.describe_document(Name=document_name)

        document_dict = response["Document"]

        return Document(name=document_dict["Name"],
                        parameters=[
                            Parameter(key=param["Name"],
                                      description=param["Description"])
                            for param in document_dict["Parameters"]
                        ])
    def test_get_document_no_aws_env(self, use_case: mock.Mock):
        client = APIClient()
        user_model = UserModel.objects.get(email="test_email")
        client.force_authenticate(user=user_model)

        describe_document = use_case.return_value.describe_document
        describe_document.return_value = Document(name="test", parameters=[])

        # 検証対象の実行
        response = client.get(path=self.api_path.format(-1, -1, "test/"),
                              format='json')

        describe_document.assert_not_called()
        self.assertEqual(response.status_code, 404)
def init_database(db):
    # user = User(email='admin@localhost', password='******')
    # db.session.add(user)
    # db.session.commit()
    dataset_path = '../backend/BBC_pair'
    dataset_name = os.path.split(dataset_path)[1]

    summaries_path = os.path.join(dataset_path, 'summaries')
    documents_path = os.path.join(dataset_path, 'documents')
    sanity_path = os.path.join(dataset_path, 'sanity_id/sanity.txt')

    # Existing dataset
    #dataset = db.session.query(Dataset).filter_by(name='BBC').first()
    # Insert dataset
    dataset = Dataset(name="BBC")
    db.session.add(dataset)
    db.session.commit()

    sanity_data = {}
    for line in open(sanity_path):
        flist = line.strip().split("\t")
        sanity_data[flist[0]] = {
            "sanity_answer": bool(int(flist[2])),
            "sanity_statement": flist[1]
        }

    # Insert documents
    for file in os.listdir(documents_path):
        file_path = os.path.join(documents_path, file)
        summ_path = os.path.join(summaries_path, file)
        with open(summ_path, 'r') as infile:
            summ_json = json.load(infile)
        with open(file_path, 'r') as infile:
            json_result = json.load(infile)
            did = json_result['doc_id']
            for i, item in enumerate(summ_json):
                document = Document(
                    dataset_id=dataset.id,
                    doc_id=json_result['doc_id'],
                    doc_json=json.dumps(json_result),
                    summary=json.dumps(item),
                    sanity_statement=sanity_data[did]["sanity_statement"],
                    sanity_answer=sanity_data[did]["sanity_answer"])
                db.session.add(document)
                db.session.commit()
    def test_get_document(self, use_case: mock.Mock):
        client = APIClient()
        user_model = UserModel.objects.get(email="test_email")
        client.force_authenticate(user=user_model)

        # Company1のIDを取得
        tenant_id = TenantModel.objects.get(
            tenant_name="test_tenant_users_in_tenant_1").id
        # AWS環境のIDを取得
        aws_id = AwsEnvironmentModel.objects.get(aws_account_id="test_aws1").id

        describe_document = use_case.return_value.describe_document
        describe_document.return_value = Document(name="test", parameters=[])

        # 検証対象の実行
        response = client.get(path=self.api_path.format(
            tenant_id, aws_id, "test/"),
                              format='json')

        describe_document.assert_called_once()
        self.assertEqual(response.status_code, 200)
Ejemplo n.º 10
0
 def _build_documents(documents: list):
     return [Document(name=doc_dict["Name"]) for doc_dict in documents]
Ejemplo n.º 11
0
def api_document_get(doc_id):
    if request.method == 'GET':
        doc_json = json.dumps(Document.get_dict(doc_id))
        if doc_json:
            return jsonify(doc_json), http.HTTPStatus.Ok