def mock_create_labbooks(fixture_working_dir):
    # Create a labbook in the temporary directory
    config_file = fixture_working_dir[0]
    lb = LabBook(fixture_working_dir[0])
    lb.new(owner={"username": UT_USERNAME},
           name=UT_LBNAME,
           description="Cats labbook 1")

    # Create a file in the dir
    with open(os.path.join(fixture_working_dir[1], 'unittest-examplefile'),
              'w') as sf:
        sf.write("test data")
        sf.seek(0)
    FileOperations.insert_file(lb, 'code', sf.name)

    assert os.path.isfile(
        os.path.join(lb.root_dir, 'code', 'unittest-examplefile'))

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)
    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())
        with app.app_context():
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()
            client = Client(
                schema,
                middleware=[LabBookLoaderMiddleware(), error_middleware],
                context_value=ContextMock())
            yield lb, client, schema
    shutil.rmtree(fixture_working_dir, ignore_errors=True)
コード例 #2
0
    def test_detail_record_node(self, fixture_working_dir, snapshot, fixture_test_file):
        """Test getting an detail record by node ID"""
        lb = LabBook(fixture_working_dir[0])
        lb.new(owner={"username": "******"}, name="labbook1", description="my test description")
        FileOperations.insert_file(lb, "code", fixture_test_file)

        # Get activity record to
        query = """
        {
          labbook(name: "labbook1", owner: "default") {               
            activityRecords {
                edges{
                    node{
                        id
                        commit
                        linkedCommit
                        message
                        type
                        show
                        importance
                        tags
                        detailObjects{
                            id
                            key
                            type
                            data
                            show
                            importance
                            tags
                        }
                        }                        
                    }    
            }
          }
        }
        """
        result1 = fixture_working_dir[2].execute(query)

        query = """
            {{
                node(id: "{}") {{
                    ... on ActivityDetailObject {{
                            id
                            key
                            type
                            data
                            show
                            importance
                            tags   
                    }}
                }}
            }}
            """.format(result1['data']['labbook']['activityRecords']['edges'][0]['node']['detailObjects'][0]['id'])
        result2 = fixture_working_dir[2].execute(query)
        assert result2['data']['node'] == result1['data']['labbook']['activityRecords']['edges'][0]['node']['detailObjects'][0]
コード例 #3
0
def mock_create_labbooks(fixture_working_dir):
    # Create a labbook in the temporary directory
    lb = LabBook(fixture_working_dir[0])
    lb.new(owner={"username": "******"}, name="labbook1", description="Cats labbook 1")

    # Create a file in the dir
    with open(os.path.join(fixture_working_dir[1], 'sillyfile'), 'w') as sf:
        sf.write("1234567")
        sf.seek(0)
    FileOperations.insert_file(lb, 'code', sf.name)

    assert os.path.isfile(os.path.join(lb.root_dir, 'code', 'sillyfile'))
    # name of the config file, temporary working directory, the schema
    yield fixture_working_dir
コード例 #4
0
 def resolve_size_bytes(self, info):
     """Return the size of the labbook on disk (in bytes).
     NOTE! This must be a string, as graphene can't quite handle big integers. """
     return info.context.labbook_loader.load(
         f"{get_logged_in_username()}&{self.owner}&{self.name}").then(
             lambda labbook: str(
                 FileOperations.content_size(labbook=labbook)))
コード例 #5
0
 def mutate_and_get_payload(cls,
                            root,
                            info,
                            owner,
                            labbook_name,
                            transaction_id,
                            cancel=False,
                            rollback=False,
                            client_mutation_id=None):
     username = get_logged_in_username()
     working_directory = Configuration().config['git']['working_directory']
     inferred_lb_directory = os.path.join(working_directory, username,
                                          owner, 'labbooks', labbook_name)
     lb = LabBook(author=get_logged_in_author())
     lb.from_directory(inferred_lb_directory)
     FileOperations.complete_batch(lb,
                                   transaction_id,
                                   cancel=cancel,
                                   rollback=rollback)
     return CompleteBatchUploadTransaction(success=True)
    def test_reflect_deleted_files_on_merge_in(self, mock_create_labbooks):
        lb, client = mock_create_labbooks[0], mock_create_labbooks[1]
        with open('/tmp/s1.txt', 'w') as s1:
            s1.write('original-file\ndata')
        FileOperations.insert_file(lb, section='code', src_file=s1.name)
        bm = BranchManager(lb, username=UT_USERNAME)

        nb = bm.create_branch('new-branch')
        assert os.path.exists(os.path.join(lb.root_dir, 'code', 's1.txt'))
        lb.delete_file('code', 's1.txt')
        assert lb.is_repo_clean
        assert not os.path.exists(os.path.join(lb.root_dir, 'code', 's1.txt'))

        bm.workon_branch(bm.workspace_branch)
        assert os.path.exists(os.path.join(lb.root_dir, 'code', 's1.txt'))

        merge_q = f"""
        mutation x {{
            mergeFromBranch(input: {{
                owner: "{UT_USERNAME}",
                labbookName: "{UT_LBNAME}",
                otherBranchName: "{nb}",
                force: false            
            }}) {{
                labbook{{
                    name
                    description
                    availableBranchNames
                    activeBranchName
                }}
            }}
        }}
        """
        r = client.execute(merge_q)
        assert 'errors' not in r
        r['data']['mergeFromBranch']['labbook'][
            'activeBranchName'] == 'gm.workspace-default'
        assert not os.path.exists(os.path.join(lb.root_dir, 'code', 's1.txt'))
コード例 #7
0
    def mutate_and_process_upload(cls,
                                  info,
                                  owner,
                                  labbook_name,
                                  section,
                                  file_path,
                                  chunk_upload_params,
                                  transaction_id,
                                  client_mutation_id=None):
        if not cls.upload_file_path:
            logger.error('No file uploaded')
            raise ValueError('No file uploaded')

        try:
            username = get_logged_in_username()
            working_directory = Configuration().config['git'] \
                ['working_directory']
            inferred_lb_directory = os.path.join(working_directory, username,
                                                 owner, 'labbooks',
                                                 labbook_name)
            lb = LabBook(author=get_logged_in_author())
            lb.from_directory(inferred_lb_directory)
            dstpath = os.path.join(os.path.dirname(file_path), cls.filename)

            fops = FileOperations.put_file(labbook=lb,
                                           section=section,
                                           src_file=cls.upload_file_path,
                                           dst_path=dstpath,
                                           txid=transaction_id)
        finally:
            try:
                logger.debug(f"Removing temp file {cls.upload_file_path}")
                os.remove(cls.upload_file_path)
            except FileNotFoundError:
                pass

        # Create data to populate edge
        create_data = {
            'owner': owner,
            'name': labbook_name,
            'section': section,
            'key': fops['key'],
            '_file_info': fops
        }

        # TODO: Fix cursor implementation..
        # this currently doesn't make sense when adding edges
        cursor = base64.b64encode(f"{0}".encode('utf-8'))
        return AddLabbookFile(new_labbook_file_edge=LabbookFileConnection.Edge(
            node=LabbookFile(**create_data), cursor=cursor))
    def test_conflicted_merge_from_force_success(self, mock_create_labbooks,
                                                 snapshot):
        lb, client = mock_create_labbooks[0], mock_create_labbooks[1]
        with open('/tmp/s1.txt', 'w') as s1:
            s1.write('original-file\ndata')
        FileOperations.insert_file(lb, section='code', src_file=s1.name)
        bm = BranchManager(lb, username=UT_USERNAME)

        nb = bm.create_branch('new-branch')
        with open('/tmp/s1.txt', 'w') as s1:
            s1.write('branch-conflict-data')
        FileOperations.insert_file(lb, section='code', src_file=s1.name)

        bm.workon_branch(bm.workspace_branch)
        with open('/tmp/s1.txt', 'w') as s1:
            s1.write('mainline-conflict-data')
        FileOperations.insert_file(lb, section='code', src_file=s1.name)

        merge_q = f"""
        mutation x {{
            mergeFromBranch(input: {{
                owner: "{UT_USERNAME}",
                labbookName: "{UT_LBNAME}",
                otherBranchName: "{nb}",
                force: true            
            }}) {{                         
                labbook{{
                    name
                    description
                    availableBranchNames
                    activeBranchName
                }}
            }}
        }}
        """
        r = client.execute(merge_q)
        assert 'errors' not in r
        snapshot.assert_match(r)
        r['data']['mergeFromBranch']['labbook'][
            'activeBranchName'] == 'gm.workspace-default'
コード例 #9
0
 def resolve_is_untracked(self, info):
     return info.context.labbook_loader.load(
         f"{get_logged_in_username()}&{self.owner}&{self.name}").then(
             lambda labbook: FileOperations.is_set_untracked(
                 labbook=labbook, section=str(self.section)))
コード例 #10
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               name,
                               description,
                               repository,
                               component_id,
                               revision,
                               is_untracked=False,
                               client_mutation_id=None):
        username = get_logged_in_username()

        # Create a new empty LabBook
        lb = LabBook(author=get_logged_in_author())
        # TODO: Set owner/namespace properly once supported fully
        lb.new(owner={"username": username},
               username=username,
               name=name,
               description=description,
               bypass_lfs=is_untracked)

        if is_untracked:
            FileOperations.set_untracked(lb, 'input')
            FileOperations.set_untracked(lb, 'output')
            input_set = FileOperations.is_set_untracked(lb, 'input')
            output_set = FileOperations.is_set_untracked(lb, 'output')
            if not (input_set and output_set):
                raise ValueError(
                    f'{str(lb)} untracking for input/output in malformed state'
                )
            if not lb.is_repo_clean:
                raise ValueError(
                    f'{str(lb)} should have clean Git state after setting for untracked'
                )

        # Create a Activity Store instance
        store = ActivityStore(lb)

        # Create detail record
        adr = ActivityDetailRecord(ActivityDetailType.LABBOOK,
                                   show=False,
                                   importance=0)
        adr.add_value('text/plain', f"Created new LabBook: {username}/{name}")

        # Create activity record
        ar = ActivityRecord(ActivityType.LABBOOK,
                            message=f"Created new LabBook: {username}/{name}",
                            show=True,
                            importance=255,
                            linked_commit=lb.git.commit_hash)
        ar.add_detail_object(adr)

        # Store
        store.create_activity_record(ar)

        # Add Base component
        cm = ComponentManager(lb)
        cm.add_component("base", repository, component_id, revision)

        # Prime dataloader with labbook you just created
        dataloader = LabBookLoader()
        dataloader.prime(f"{username}&{username}&{lb.name}", lb)

        # Get a graphene instance of the newly created LabBook
        return CreateLabbook(labbook=Labbook(owner=username, name=lb.name))
コード例 #11
0
    def test_get_recent_activity(self, fixture_working_dir, snapshot,
                                 fixture_test_file):
        """Test paging through activity records"""
        lb = LabBook(fixture_working_dir[0],
                     author=GitAuthor(name="tester", email="*****@*****.**"))
        lb.new(owner={"username": "******"},
               name="labbook11",
               description="my test description")
        FileOperations.insert_file(lb, "code", fixture_test_file)

        # fake activity
        store = ActivityStore(lb)
        adr1 = ActivityDetailRecord(ActivityDetailType.CODE)
        adr1.show = False
        adr1.importance = 100
        adr1.add_value("text/plain", "first")

        ar = ActivityRecord(ActivityType.CODE,
                            show=False,
                            message="ran some code",
                            importance=50,
                            linked_commit="asdf")

        ar.add_detail_object(adr1)

        # Create Activity Record
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt')
        lb.makedir("input/test")
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt', "test")
        lb.makedir("input/test2")
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt', "test2")
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "output", '/tmp/test_file.txt')

        # Get all records at once with no pagination args and verify cursors look OK directly
        query = """
                    {
                      labbook(owner: "default", name: "labbook11") {
                        overview {
                          recentActivity {
                            message
                            type
                            show
                            importance
                            tags
                          }
                        }
                      }
                    }
                    """
        snapshot.assert_match(fixture_working_dir[2].execute(query))
コード例 #12
0
    def test_import_labbook(self, fixture_working_dir):
        """Test batch uploading, but not full import"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(fixture_working_dir[3], middleware=[LabBookLoaderMiddleware()])

        # Create a temporary labbook
        lb = LabBook(fixture_working_dir[0])
        lb.new(owner={"username": "******"}, name="test-export", description="Tester")

        # Create a largeish file in the dir
        with open(os.path.join(fixture_working_dir[1], 'testfile.bin'), 'wb') as testfile:
            testfile.write(os.urandom(9000000))
        FileOperations.insert_file(lb, 'input', testfile.name)

        # Export labbook
        zip_file = export_labbook_as_zip(lb.root_dir, tempfile.gettempdir())
        lb_dir = lb.root_dir

        # Get upload params
        chunk_size = 4194304
        file_info = os.stat(zip_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size/chunk_size))

        with open(zip_file, 'rb') as tf:
            for chunk_index in range(total_chunks):
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation myMutation{{
                              importLabbook(input:{{
                                chunkUploadParams:{{
                                  uploadId: "jfdjfdjdisdjwdoijwlkfjd",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(zip_file)}"
                                }}
                              }}) {{
                                importJobKey
                                buildImageJobKey
                              }}
                            }}
                            """
                result = client.execute(query, context_value=DummyContext(file))
                assert "errors" not in result
                if chunk_index == total_chunks - 1:
                    assert type(result['data']['importLabbook']['importJobKey']) == str
                    assert type(result['data']['importLabbook']['buildImageJobKey']) == str
                    assert "rq:job:" in result['data']['importLabbook']['importJobKey']
                    assert "rq:job:" in result['data']['importLabbook']['buildImageJobKey']

                chunk.close()