def testUpload(self): self.SetGetRepoSuccess() full_paths = self.CreateFiles(['dir/file2', 'file1']) result = upload.UploadManager().Upload('branch1', self.tmpdir) self.push_mock.assert_called_with('branch1', self.tmpdir, full_paths) self.assertEqual('branch1', result['branch']) self.assertEqual(2, result['files_written']) self.assertEqual(0, result['files_skipped']) self.assertEqual( sum([ len(file_utils.ReadFileContents(path)) for path in full_paths ]), result['size_written']) cloud_repo = result['source_contexts'][0]['context']['cloudRepo'] self.assertEqual( { 'projectId': 'test_project', 'repoName': 'google-source-captures' }, cloud_repo['repoId']['projectRepoId']) self.assertEqual({ 'kind': 'MOVABLE', 'name': 'branch1', }, cloud_repo['aliasContext'])
def Run(self, args): """Run the upload command.""" if not os.path.isdir(args.directory): raise exceptions.InvalidArgumentException( 'directory', args.directory + ' is not a directory.') mgr = upload.UploadManager() result = mgr.Upload(args.branch, args.directory) output_dir = args.source_context_directory if output_dir: files.MakeDir(output_dir) output_dir = os.path.realpath(output_dir) extended_contexts = result['source_contexts'] result['context_file'] = os.path.join(output_dir, 'source-context.json') best_context = context_util.BestSourceContext(extended_contexts) result['best_context'] = context_util.BestSourceContext(extended_contexts) files.WriteFileContents(result['context_file'], json.dumps(best_context)) log.status.write('Wrote {0} file(s), {1} bytes.\n'.format( result['files_written'], result['size_written'])) files_skipped = result['files_skipped'] if files_skipped: log.status.write('Skipped {0} file(s) due to size limitations.\n'.format( files_skipped)) return [result]
def testUploadWithGitFiles(self): self.SetGetRepoSuccess() full_paths = self.CreateFiles(['file1', '.git/file2']) upload.UploadManager().Upload('branch1', self.tmpdir) self.push_mock.assert_called_with('branch1', self.tmpdir, full_paths[:1])
def testUploadWithGcloudIgnore(self): self.SetGetRepoSuccess() full_paths = self.CreateFiles( ['.gcloudignore', 'file1', 'ignoredfile', 'ignoreddir/file']) with open(full_paths[0], 'w') as f: f.write('ignored*\n') upload.UploadManager().Upload('branch1', self.tmpdir) self.push_mock.assert_called_with('branch1', self.tmpdir, [full_paths[0], full_paths[1]])
def testUploadWithGeneratedName(self): self.SetGetRepoSuccess() self.StartObjectPatch(upload, '_GetNow', return_value=datetime(1970, 1, 1)) self.StartObjectPatch( upload, '_GetUuid', return_value=uuid.UUID('12345678123456781234567812345678')) full_paths = self.CreateFiles(['dir/file2', 'file1']) upload.UploadManager().Upload(None, self.tmpdir) self.push_mock.assert_called_with( '1970/01/01-00.00.00.12345678123456781234567812345678', self.tmpdir, full_paths)
def testUploadWithTooLargeFiles(self): self.SetGetRepoSuccess() original_size_threshold = upload.UploadManager.SIZE_THRESHOLD try: self.assertEqual(256 * 2**10, original_size_threshold) full_paths = self.CreateFiles(['file1', 'filethatistoolarge']) upload.UploadManager.SIZE_THRESHOLD = len( file_utils.ReadFileContents(full_paths[1])) - 1 result = upload.UploadManager().Upload('branch1', self.tmpdir) self.push_mock.assert_called_with('branch1', self.tmpdir, full_paths[:1]) self.assertEqual(1, result['files_skipped']) finally: upload.UploadManager.SIZE_THRESHOLD = original_size_threshold
def testUploadWithNoRepo(self): self.SetGetRepoNotFound() with self.assertRaises(upload.RepoNotFoundError) as e: upload.UploadManager().Upload('branch1', self.tmpdir) self.assertIn('gcloud source repos create', six.text_type(e.exception))