예제 #1
0
    def test_delete_dir(self, mock_create_labbooks):

        lb = LabBook(mock_create_labbooks[0])
        lb.from_name('default', 'default', 'labbook1')
        lb.makedir('code/subdir')
        lb.git.add_all('code/')
        lb.git.commit("blah")

        dir_path = os.path.join(lb.root_dir, 'code', 'subdir')
        assert os.path.exists(dir_path) is True

        # Note, deleting a file should work with and without a trailing / at the end.
        query = """
        mutation deleteLabbookFile {
          deleteLabbookFile(
            input: {
              owner: "default",
              labbookName: "labbook1",
              section: "code",
              filePath: "subdir/",
              isDirectory: true
            }) {
              success
            }
        }
        """
        res = mock_create_labbooks[2].execute(query)
        assert res['data']['deleteLabbookFile']['success'] is True

        assert os.path.exists(dir_path) is False
        assert os.path.exists(os.path.join(lb.root_dir, 'code')) is True
    def test_sync_1(self, remote_bare_repo, mock_create_labbooks_no_lfs,
                    mock_config_file):

        # Setup responses mock for this test
        responses.add(responses.GET,
                      'https://usersrv.gigantum.io/key',
                      json={'key': 'afaketoken'},
                      status=200)

        test_user_lb = LabBook(mock_create_labbooks_no_lfs[0])
        test_user_lb.from_name('default', 'default', 'labbook1')
        test_user_wf = GitWorkflow(test_user_lb)
        test_user_wf.publish('default')

        # Mock the request context so a fake authorization header is present
        builder = EnvironBuilder(path='/labbook',
                                 method='POST',
                                 headers={'Authorization': 'Bearer AJDFHASD'})
        env = builder.get_environ()
        req = Request(environ=env)

        remote_url = test_user_lb.root_dir
        assert remote_url

        sally_lb = LabBook(mock_config_file[0])
        sally_lb.from_remote(remote_url,
                             username="******",
                             owner="default",
                             labbook_name="labbook1")
        sally_wf = GitWorkflow(sally_lb)
        assert sally_lb.active_branch == "gm.workspace-sally"
        sally_lb.makedir(relative_path='code/sally-dir',
                         create_activity_record=True)
        sally_wf.sync('sally')

        sync_query = """
        mutation x {
            syncLabbook(input: {
                labbookName: "labbook1",
                owner: "default"
            }) {
                updateCount
                updatedLabbook {
                    isRepoClean
                }
            }
        }
        """
        r = mock_create_labbooks_no_lfs[2].execute(sync_query,
                                                   context_value=req)

        assert 'errors' not in r
        assert r['data']['syncLabbook']['updateCount'] == 1
        assert r['data']['syncLabbook']['updatedLabbook']['isRepoClean'] is True
        assert test_user_lb.active_branch == 'gm.workspace-default'
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               section,
                               directory,
                               client_mutation_id=None):
        username = get_logged_in_username()

        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)
        lb.makedir(os.path.join(section, directory),
                   create_activity_record=True)
        logger.info(f"Made new directory in `{directory}`")

        # Prime dataloader with labbook you already loaded
        dataloader = LabBookLoader()
        dataloader.prime(f"{owner}&{labbook_name}&{lb.name}", lb)

        # Create data to populate edge
        file_info = lb.get_file_info(section, directory)
        create_data = {
            'owner': owner,
            'name': labbook_name,
            'section': section,
            'key': file_info['key'],
            '_file_info': file_info
        }

        # TODO: Fix cursor implementation, this currently doesn't make sense
        cursor = base64.b64encode(f"{0}".encode('utf-8'))

        return MakeLabbookDirectory(
            new_labbook_file_edge=LabbookFileConnection.Edge(
                node=LabbookFile(**create_data), cursor=cursor))
예제 #4
0
    def test_add_file(self, mock_create_labbooks):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_labbooks[3],
                        middleware=[LabBookLoaderMiddleware()])

        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat")
        est_size = 9000000
        try:
            os.remove(test_file)
        except:
            pass
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(est_size))

        new_file_size = os.path.getsize(tf.name)
        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        target_file = os.path.join(mock_create_labbooks[1], 'default',
                                   'default', 'labbooks', 'labbook1', 'code',
                                   'newdir', "myValidFile.dat")
        lb = LabBook(mock_create_labbooks[0])
        lb.from_directory(
            os.path.join(mock_create_labbooks[1], 'default', 'default',
                         'labbooks', 'labbook1'))
        lb.makedir('code/newdir', create_activity_record=True)

        txid = "000-unitest-transaction"
        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False
            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addLabbookFile{{
                              addLabbookFile(input:{{owner:"default",
                                                      labbookName: "labbook1",
                                                      section: "code",
                                                      filePath: "newdir/myValidFile.dat",
                                                      transactionId: "{txid}",
                                chunkUploadParams:{{
                                  uploadId: "fdsfdsfdsfdfs",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newLabbookFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))
        assert 'errors' not in r
        # So, these will only be populated once the last chunk is uploaded. Will be None otherwise.
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'isDir'] is False
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'key'] == 'newdir/myValidFile.dat'
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'size'] == f"{new_file_size}"
        # When done uploading, file should exist in the labbook
        assert os.path.exists(target_file)
        assert os.path.isfile(target_file)

        complete_query = f"""
        mutation completeQuery {{
            completeBatchUploadTransaction(input: {{
                owner: "default",
                labbookName: "labbook1",
                transactionId: "{txid}"
            }}) {{
                success
            }}
        }}
        """
        r = client.execute(complete_query, context_value=DummyContext(file))
        assert 'errors' not in r
        assert lb.is_repo_clean
        assert 'Uploaded new file' in lb.git.log()[0]['message']
예제 #5
0
    def test_add_file_fail_due_to_git_ignore(self, mock_create_labbooks):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_labbooks[3],
                        middleware=[LabBookLoaderMiddleware()])

        new_file_size = 9000000
        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), ".DS_Store")
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(new_file_size))

        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        target_file = os.path.join(mock_create_labbooks[1], 'default',
                                   'default', 'labbooks', 'labbook1', 'code',
                                   'newdir', '.DS_Store')
        try:
            os.remove(target_file)
        except:
            pass
        lb = LabBook(mock_create_labbooks[0])
        lb.from_directory(
            os.path.join(mock_create_labbooks[1], 'default', 'default',
                         'labbooks', 'labbook1'))
        lb.makedir('code/newdir', create_activity_record=True)

        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False

            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addLabbookFile{{
                              addLabbookFile(input:{{owner:"default",
                                                      labbookName: "labbook1",
                                                      section: "code",
                                                      filePath: "newdir/.DS_Store",
                                                      transactionId: "111-unittest-tx",
                                chunkUploadParams:{{
                                  uploadId: "jfdjfdjdisdjwdoijwlkfjd",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newLabbookFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))

            # This must be outside of the chunk upload loop
            pprint.pprint(r)
            assert 'errors' in r
            assert len(r['errors']) == 1
            assert 'matches ignored pattern' in r['errors'][0]['message']

        # When done uploading, file should exist in the labbook
        assert os.path.isfile(target_file) is False
        assert os.path.exists(target_file) is False
    def test_get_recent_activity(self, fixture_working_dir, snapshot,
                                 fixture_test_file):
        """Test paging through activity records"""
        lb = LabBook(fixture_working_dir[0],
                     author=GitAuthor(name="tester", email="*****@*****.**"))
        lb.new(owner={"username": "******"},
               name="labbook11",
               description="my test description")
        FileOperations.insert_file(lb, "code", fixture_test_file)

        # fake activity
        store = ActivityStore(lb)
        adr1 = ActivityDetailRecord(ActivityDetailType.CODE)
        adr1.show = False
        adr1.importance = 100
        adr1.add_value("text/plain", "first")

        ar = ActivityRecord(ActivityType.CODE,
                            show=False,
                            message="ran some code",
                            importance=50,
                            linked_commit="asdf")

        ar.add_detail_object(adr1)

        # Create Activity Record
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt')
        lb.makedir("input/test")
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt', "test")
        lb.makedir("input/test2")
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "input", '/tmp/test_file.txt', "test2")
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        store.create_activity_record(ar)
        open('/tmp/test_file.txt', 'w').write("xxx" * 50)
        FileOperations.insert_file(lb, "output", '/tmp/test_file.txt')

        # Get all records at once with no pagination args and verify cursors look OK directly
        query = """
                    {
                      labbook(owner: "default", name: "labbook11") {
                        overview {
                          recentActivity {
                            message
                            type
                            show
                            importance
                            tags
                          }
                        }
                      }
                    }
                    """
        snapshot.assert_match(fixture_working_dir[2].execute(query))