def multipart_master_merge(self, s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file): prefix = download_key + '/' App.door43_s3_handler().download_dir( prefix, source_dir) # get previous templated files source_dir = os.path.join(source_dir, download_key) files = sorted(glob(os.path.join(source_dir, '*.*'))) for f in files: App.logger.debug("Downloaded: " + f) fname = os.path.join(source_dir, 'index.html') if os.path.isfile(fname): os.remove(fname) # remove index if already exists elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") templater = init_template(resource_type, source_dir, output_dir, template_file) # restore index from previous passes index_json = self.get_templater_index(s3_commit_key, 'index.json') templater.titles = index_json['titles'] templater.chapters = index_json['chapters'] templater.book_codes = index_json['book_codes'] templater.already_converted = templater.files # do not reconvert files # merge the source files with the template try: self.run_templater(templater) success = True except Exception as e: App.logger.error( "Error multi-part applying template {0} to resource type {1}". format(template_file, resource_type)) self.close() success = False return source_dir, success
def test_obs_deploy_revision_to_door43(self): self.mock_s3_obs_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) self.assertTrue(App.door43_s3_handler().key_exists('{0}/50.html'.format(self.project_key)))
def multipart_master_merge(self, s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file): prefix = download_key + '/' App.door43_s3_handler().download_dir(prefix, source_dir) # get previous templated files source_dir = os.path.join(source_dir, download_key) files = sorted(glob(os.path.join(source_dir, '*.*'))) for f in files: App.logger.debug("Downloaded: " + f) fname = os.path.join(source_dir, 'index.html') if os.path.isfile(fname): os.remove(fname) # remove index if already exists elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") templater = init_template(resource_type, source_dir, output_dir, template_file) # restore index from previous passes index_json = self.get_templater_index(s3_commit_key, 'index.json') templater.titles = index_json['titles'] templater.chapters = index_json['chapters'] templater.book_codes = index_json['book_codes'] templater.already_converted = templater.files # do not reconvert files # merge the source files with the template try: self.run_templater(templater) success = True except Exception as e: App.logger.error("Error multi-part applying template {0} to resource type {1}".format(template_file, resource_type)) self.close() success = False return source_dir, success
def mock_s3_tn_project(self, part): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tn_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tn_converted') unzip(zip_file, out_dir) src_dir = os.path.join(out_dir, 'en_tn_converted') self.project_files = [ f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f)) ] self.project_key = 'u/door43/en_tn/12345678' build_log = file_utils.load_json_object( os.path.join(src_dir, 'build_log.json')) build_log['part'] = part file_utils.write_file(os.path.join(src_dir, 'build_log.json'), build_log) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'build_log.json'), '{0}/{1}/build_log.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'index.json'), '{0}/{1}/index.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'build_log.json'), '{0}/{1}/finished'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, '01-GEN.html'), '{0}/{1}/01-GEN.html'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tq/project.json') App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def mock_s3_bible_project(self, test_file_name, project_key, multi_part=False): converted_proj_dir = os.path.join(self.resources_dir, 'converted_projects') test_file_base = test_file_name.split('.zip')[0] zip_file = os.path.join(converted_proj_dir, test_file_name) out_dir = os.path.join(self.temp_dir, test_file_base) unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, test_file_base) + os.path.sep self.project_files = file_utils.get_files(out_dir) self.project_key = project_key for filename in self.project_files: sub_path = filename.split(project_dir)[1].replace(os.path.sep, '/') # Make sure it is a bucket path App.cdn_s3_handler().upload_file(filename, '{0}/{1}'.format(project_key, sub_path)) if multi_part: # copy files from cdn to door43 base_name = os.path.basename(filename) if '.html' in base_name: with codecs.open(filename, 'r', 'utf-8-sig') as f: soup = BeautifulSoup(f, 'html.parser') # add nav tag new_tag = soup.new_tag('div', id='right-sidebar') soup.body.append(new_tag) html = unicode(soup) file_utils.write_file(filename, html.encode('ascii', 'xmlcharrefreplace')) App.door43_s3_handler().upload_file(filename, '{0}/{1}'.format(project_key, base_name)) # u, user, repo = project_key App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def test_tq_deploy_revision_to_door43(self): # given self.mock_s3_tq_project() build_log_key = '{0}/build_log.json'.format(self.project_key) # when ret = self.deployer.deploy_revision_to_door43(build_log_key) # then self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) files_to_verify = ['manifest.yaml'] for book in BOOK_NUMBERS: html_file = '{0}-{1}.html'.format(BOOK_NUMBERS[book], book.upper()) files_to_verify.append(html_file) for file_name in files_to_verify: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key)) parent_key = '/'.join(self.project_key.split('/')[:-1]) for file_name in ['project.json']: key = '{0}/{1}'.format(parent_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def test_obs_deploy_revision_to_door43(self): self.mock_s3_obs_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) self.assertTrue(App.door43_s3_handler().key_exists( '{0}/50.html'.format(self.project_key)))
def test_tw_deploy_revision_to_door43(self): self.mock_s3_tw_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) for file_name in ['index.html', 'kt.html', 'names.html', 'other.html', 'build_log.json', 'manifest.yaml']: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key)) parent_key = '/'.join(self.project_key.split('/')[:-1]) for file_name in ['project.json']: key = '{0}/{1}'.format(parent_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def mock_s3_obs_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en-obs-complete.zip') out_dir = os.path.join(self.temp_dir, 'en-obs-complete') unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, 'door43', 'en-obs', '12345678') self.project_files = [f for f in os.listdir(project_dir) if os.path.isfile(os.path.join(project_dir, f))] self.project_key = 'u/door43/en-obs/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file(os.path.join(project_dir, filename), '{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(out_dir, 'door43', 'en-obs', 'project.json'), 'u/door43/en-obs/project.json') App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({ 'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6 }), 'en': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747 }), 'es': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776 }), 'fr': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868 }) }
def test_tn_deploy_revision_to_door43(self): # given part = '1' self.mock_s3_tn_project(part) build_log_key = '{0}/{1}/build_log.json'.format(self.project_key, part) # when ret = self.deployer.deploy_revision_to_door43(build_log_key) # then self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists('{0}/build_log.json'.format(self.project_key))) files_to_verify = ['01-GEN.html', 'index.json'] for file_name in files_to_verify: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6}), 'en': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747}), 'es': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776}), 'fr': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868}) }
def validate_conversion(self, user, repo, success, build_log_json, commit_id, commit_sha, commit_path, expected_output_names, job, chapter_count=-1, file_ext=""): self.assertTrue(len(build_log_json) > 0) self.assertIsNotNone(job) self.temp_dir = tempfile.mkdtemp(prefix='testing_') if not (type(expected_output_names) is list): expected_output_names = [expected_output_names] # put string in list # check pre-convert files self.download_and_check_zip_file(self.preconvert_handler, expected_output_names, self.preprocessor_output_extension, self.get_preconvert_s3_key(commit_sha), "preconvert", success, chapter_count, file_ext) # check converted files destination_key = self.get_destination_s3_key(commit_sha, repo, user) converted_build_log = self.check_destination_files(self.cdn_handler, expected_output_names, "html", destination_key, chapter_count) # check required fields App.logger.debug(converted_build_log) saved_build_json = json.loads(converted_build_log) self.assertTrue('commit_id' in saved_build_json) self.assertTrue('repo_owner' in saved_build_json) self.assertTrue('repo_name' in saved_build_json) self.assertTrue('created_at' in saved_build_json) self.assertTrue('source' in saved_build_json) self.assertTrue('errors' in saved_build_json) self.assertTrue('warnings' in saved_build_json) self.assertTrue('message' in saved_build_json) self.assertTrue('status' in saved_build_json) self.assertEqual(len(commit_id), COMMIT_LENGTH) self.assertIsNotNone(commit_sha) self.assertIsNotNone(commit_path) if len(job.errors) > 0: self.warn("WARNING: Found job errors: " + str(job.errors)) if len(build_log_json['errors']) > 0: self.warn("WARNING: Found build_log errors: " + str(build_log_json['errors'])) door43_handler = App.door43_s3_handler() deployed_build_log = self.check_deployed_files(door43_handler, expected_output_names, "html", destination_key, chapter_count) self.compare_build_logs(converted_build_log, deployed_build_log, destination_key) if len(self.warnings): App.logger.debug("\n#######\nHave warnings:\n#######\n" + '\n'.join(self.warnings)) self.assertTrue(success) # Test that repo is in manifest table tx_manifest = TxManifest.get(repo_name=repo, user_name=user) # Giving TxManifest above just the composite keys will cause it to load all the data from the App. self.assertIsNotNone(tx_manifest) self.assertEqual(tx_manifest.repo_name, repo) self.assertEqual(tx_manifest.user_name, user)
def test_tw_deploy_revision_to_door43(self): self.mock_s3_tw_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) for file_name in [ 'index.html', 'kt.html', 'names.html', 'other.html', 'build_log.json', 'manifest.yaml' ]: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key)) parent_key = '/'.join(self.project_key.split('/')[:-1]) for file_name in ['project.json']: key = '{0}/{1}'.format(parent_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def test_tn_deploy_revision_to_door43(self): # given part = '1' self.mock_s3_tn_project(part) build_log_key = '{0}/{1}/build_log.json'.format(self.project_key, part) # when ret = self.deployer.deploy_revision_to_door43(build_log_key) # then self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists( '{0}/build_log.json'.format(self.project_key))) files_to_verify = ['01-GEN.html', 'index.json'] for file_name in files_to_verify: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def mock_s3_tw_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tw_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tw_converted') unzip(zip_file, out_dir) self.src_dir = src_dir = os.path.join(out_dir, 'en_tw_converted') self.project_files = [ f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f)) ] self.project_key = 'u/door43/en_tw/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file( os.path.join(src_dir, filename), '{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tw/project.json') App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def mock_s3_bible_project(self, test_file_name, project_key, multi_part=False): converted_proj_dir = os.path.join(self.resources_dir, 'converted_projects') test_file_base = test_file_name.split('.zip')[0] zip_file = os.path.join(converted_proj_dir, test_file_name) out_dir = os.path.join(self.temp_dir, test_file_base) unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, test_file_base) + os.path.sep self.project_files = file_utils.get_files(out_dir) self.project_key = project_key for filename in self.project_files: sub_path = filename.split(project_dir)[1].replace( os.path.sep, '/') # Make sure it is a bucket path App.cdn_s3_handler().upload_file( filename, '{0}/{1}'.format(project_key, sub_path)) if multi_part: # copy files from cdn to door43 base_name = os.path.basename(filename) if '.html' in base_name: with codecs.open(filename, 'r', 'utf-8-sig') as f: soup = BeautifulSoup(f, 'html.parser') # add nav tag new_tag = soup.new_tag('div', id='right-sidebar') soup.body.append(new_tag) html = unicode(soup) file_utils.write_file( filename, html.encode('ascii', 'xmlcharrefreplace')) App.door43_s3_handler().upload_file( filename, '{0}/{1}'.format(project_key, base_name)) # u, user, repo = project_key App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def mock_s3_tn_project(self, part): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tn_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tn_converted') unzip(zip_file, out_dir) src_dir = os.path.join(out_dir, 'en_tn_converted') self.project_files = [f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f))] self.project_key = 'u/door43/en_tn/12345678' build_log = file_utils.load_json_object(os.path.join(src_dir, 'build_log.json')) build_log['part'] = part file_utils.write_file(os.path.join(src_dir, 'build_log.json'), build_log) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'build_log.json'), '{0}/{1}/build_log.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'index.json'), '{0}/{1}/index.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'build_log.json'), '{0}/{1}/finished'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, '01-GEN.html'), '{0}/{1}/01-GEN.html'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tq/project.json') App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def deploy_revision_to_door43(self, build_log_key): """ Deploys a single revision of a project to door43.org :param string build_log_key: :return bool: """ build_log = None try: build_log = App.cdn_s3_handler().get_json(build_log_key, catch_exception=False) except Exception as e: App.logger.debug("Deploying error could not access {0}: {1}".format(build_log_key, str(e))) pass if not build_log or 'commit_id' not in build_log or 'repo_owner' not in build_log \ or 'repo_name' not in build_log: App.logger.debug("Exiting, Invalid build log at {0}: {1}".format(build_log_key, build_log)) return False start = time.time() App.logger.debug("Deploying, build log: " + json.dumps(build_log)[:256]) user = build_log['repo_owner'] repo_name = build_log['repo_name'] commit_id = build_log['commit_id'][:10] s3_commit_key = 'u/{0}/{1}/{2}'.format(user, repo_name, commit_id) s3_repo_key = 'u/{0}/{1}'.format(user, repo_name) download_key = s3_commit_key do_part_template_only = False do_multipart_merge = False if 'multiple' in build_log: do_multipart_merge = build_log['multiple'] App.logger.debug("Found multi-part merge: {0}".format(download_key)) prefix = download_key + '/' undeployed = self.get_undeployed_parts(prefix) if len(undeployed) > 0: App.logger.debug("Exiting, Parts not yet deployed: {0}".format(undeployed)) return False key_deployed_ = download_key + '/final_deployed' if App.cdn_s3_handler().key_exists(key_deployed_): App.logger.debug("Exiting, Already merged parts: {0}".format(download_key)) return False self.write_data_to_file(self.temp_dir, key_deployed_, 'final_deployed', ' ') # flag that deploy has begun App.logger.debug("Continuing with merge: {0}".format(download_key)) elif 'part' in build_log: part = build_log['part'] download_key += '/' + part do_part_template_only = True App.logger.debug("Found partial: {0}".format(download_key)) if not App.cdn_s3_handler().key_exists(download_key + '/finished'): App.logger.debug("Exiting, Not ready to process partial") return False source_dir = tempfile.mkdtemp(prefix='source_', dir=self.temp_dir) output_dir = tempfile.mkdtemp(prefix='output_', dir=self.temp_dir) template_dir = tempfile.mkdtemp(prefix='template_', dir=self.temp_dir) resource_type = build_log['resource_type'] template_key = 'templates/project-page.html' template_file = os.path.join(template_dir, 'project-page.html') App.logger.debug("Downloading {0} to {1}...".format(template_key, template_file)) App.door43_s3_handler().download_file(template_key, template_file) if not do_multipart_merge: source_dir, success = self.template_converted_files(build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file) if not success: return False else: source_dir, success = self.multipart_master_merge(s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file) if not success: return False ####################### # # Now do the deploy # ####################### if not do_part_template_only or do_multipart_merge: # Copy first HTML file to index.html if index.html doesn't exist html_files = sorted(glob(os.path.join(output_dir, '*.html'))) index_file = os.path.join(output_dir, 'index.html') if len(html_files) > 0 and not os.path.isfile(index_file): copyfile(os.path.join(output_dir, html_files[0]), index_file) # Copy all other files over that don't already exist in output_dir, like css files for filename in sorted(glob(os.path.join(source_dir, '*'))): output_file = os.path.join(output_dir, os.path.basename(filename)) if not os.path.exists(output_file) and not os.path.isdir(filename): copyfile(filename, output_file) if do_part_template_only: # move files to common area basename = os.path.basename(filename) if basename not in ['finished', 'build_log.json', 'index.html', 'merged.json', 'lint_log.json']: App.logger.debug("Moving {0} to common area".format(basename)) App.cdn_s3_handler().upload_file(filename, s3_commit_key + '/' + basename, cache_time=0) App.cdn_s3_handler().delete_file(download_key + '/' + basename) # save master build_log.json file_utils.write_file(os.path.join(output_dir, 'build_log.json'), build_log) App.logger.debug("Final build_log.json:\n" + json.dumps(build_log)[:256]) # Upload all files to the door43.org bucket for root, dirs, files in os.walk(output_dir): for f in sorted(files): path = os.path.join(root, f) if os.path.isdir(path): continue key = s3_commit_key + path.replace(output_dir, '').replace(os.path.sep, '/') App.logger.debug("Uploading {0} to {1}".format(path, key)) App.door43_s3_handler().upload_file(path, key, cache_time=0) if not do_part_template_only: # Now we place json files and redirect index.html for the whole repo to this index.html file try: App.door43_s3_handler().copy(from_key='{0}/project.json'.format(s3_repo_key), from_bucket=App.cdn_bucket) App.door43_s3_handler().copy(from_key='{0}/manifest.json'.format(s3_commit_key), to_key='{0}/manifest.json'.format(s3_repo_key)) App.door43_s3_handler().redirect(s3_repo_key, '/' + s3_commit_key) App.door43_s3_handler().redirect(s3_repo_key + '/index.html', '/' + s3_commit_key) self.write_data_to_file(output_dir, s3_commit_key, 'deployed', ' ') # flag that deploy has finished except: pass else: # if processing part of multi-part merge self.write_data_to_file(output_dir, download_key, 'deployed', ' ') # flag that deploy has finished if App.cdn_s3_handler().key_exists(s3_commit_key + '/final_build_log.json'): App.logger.debug("final build detected") App.logger.debug("conversions all finished, trigger final merge") App.cdn_s3_handler().copy(from_key=s3_commit_key + '/final_build_log.json', to_key=s3_commit_key + '/build_log.json') elapsed_seconds = int(time.time() - start) App.logger.debug("deploy type partial={0}, multi_merge={1}".format(do_part_template_only, do_multipart_merge)) App.logger.debug("deploy completed in {0} seconds".format(elapsed_seconds)) self.close() return True
def validate_bible_results(self, ret, build_log_key, expect_success, output_key): self.assertEqual(ret, expect_success) if expect_success: if output_key: self.assertTrue(App.door43_s3_handler().key_exists(output_key))
def deploy_revision_to_door43(self, build_log_key): """ Deploys a single revision of a project to door43.org :param string build_log_key: :return bool: """ build_log = None try: build_log = App.cdn_s3_handler().get_json(build_log_key, catch_exception=False) except Exception as e: App.logger.debug( "Deploying error could not access {0}: {1}".format( build_log_key, str(e))) pass if not build_log or 'commit_id' not in build_log or 'repo_owner' not in build_log \ or 'repo_name' not in build_log: App.logger.debug("Exiting, Invalid build log at {0}: {1}".format( build_log_key, build_log)) return False start = time.time() App.logger.debug("Deploying, build log: " + json.dumps(build_log)[:256]) user = build_log['repo_owner'] repo_name = build_log['repo_name'] commit_id = build_log['commit_id'][:10] s3_commit_key = 'u/{0}/{1}/{2}'.format(user, repo_name, commit_id) s3_repo_key = 'u/{0}/{1}'.format(user, repo_name) download_key = s3_commit_key do_part_template_only = False do_multipart_merge = False if 'multiple' in build_log: do_multipart_merge = build_log['multiple'] App.logger.debug( "Found multi-part merge: {0}".format(download_key)) prefix = download_key + '/' undeployed = self.get_undeployed_parts(prefix) if len(undeployed) > 0: App.logger.debug( "Exiting, Parts not yet deployed: {0}".format(undeployed)) return False key_deployed_ = download_key + '/final_deployed' if App.cdn_s3_handler().key_exists(key_deployed_): App.logger.debug( "Exiting, Already merged parts: {0}".format(download_key)) return False self.write_data_to_file(self.temp_dir, key_deployed_, 'final_deployed', ' ') # flag that deploy has begun App.logger.debug("Continuing with merge: {0}".format(download_key)) elif 'part' in build_log: part = build_log['part'] download_key += '/' + part do_part_template_only = True App.logger.debug("Found partial: {0}".format(download_key)) if not App.cdn_s3_handler().key_exists(download_key + '/finished'): App.logger.debug("Exiting, Not ready to process partial") return False source_dir = tempfile.mkdtemp(prefix='source_', dir=self.temp_dir) output_dir = tempfile.mkdtemp(prefix='output_', dir=self.temp_dir) template_dir = tempfile.mkdtemp(prefix='template_', dir=self.temp_dir) resource_type = build_log['resource_type'] template_key = 'templates/project-page.html' template_file = os.path.join(template_dir, 'project-page.html') App.logger.debug("Downloading {0} to {1}...".format( App.door43_bucket + "/" + template_key, template_file)) App.door43_s3_handler().download_file(template_key, template_file) if not do_multipart_merge: source_dir, success = self.template_converted_files( build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file) if not success: return False else: source_dir, success = self.multipart_master_merge( s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file) if not success: return False ####################### # # Now do the deploy # ####################### if not do_part_template_only or do_multipart_merge: # Copy first HTML file to index.html if index.html doesn't exist html_files = sorted(glob(os.path.join(output_dir, '*.html'))) index_file = os.path.join(output_dir, 'index.html') if len(html_files) > 0 and not os.path.isfile(index_file): copyfile(os.path.join(output_dir, html_files[0]), index_file) # Copy all other files over that don't already exist in output_dir, like css files for filename in sorted(glob(os.path.join(source_dir, '*'))): output_file = os.path.join(output_dir, os.path.basename(filename)) if not os.path.exists(output_file) and not os.path.isdir(filename): copyfile(filename, output_file) if do_part_template_only: # move files to common area basename = os.path.basename(filename) if basename not in [ 'finished', 'build_log.json', 'index.html', 'merged.json', 'lint_log.json' ]: App.logger.debug( "Moving {0} to common area".format(basename)) App.cdn_s3_handler().upload_file(filename, s3_commit_key + '/' + basename, cache_time=0) App.cdn_s3_handler().delete_file(download_key + '/' + basename) # save master build_log.json file_utils.write_file(os.path.join(output_dir, 'build_log.json'), build_log) App.logger.debug("Final build_log.json:\n" + json.dumps(build_log)[:256]) # Upload all files to the door43.org bucket for root, dirs, files in os.walk(output_dir): for f in sorted(files): path = os.path.join(root, f) if os.path.isdir(path): continue key = s3_commit_key + path.replace(output_dir, '').replace( os.path.sep, '/') App.logger.debug("Uploading {0} to {1}".format(path, key)) App.door43_s3_handler().upload_file(path, key, cache_time=0) if not do_part_template_only: # Now we place json files and redirect index.html for the whole repo to this index.html file try: App.door43_s3_handler().copy( from_key='{0}/project.json'.format(s3_repo_key), from_bucket=App.cdn_bucket) App.door43_s3_handler().copy( from_key='{0}/manifest.json'.format(s3_commit_key), to_key='{0}/manifest.json'.format(s3_repo_key)) App.door43_s3_handler().redirect(s3_repo_key, '/' + s3_commit_key) App.door43_s3_handler().redirect(s3_repo_key + '/index.html', '/' + s3_commit_key) self.write_data_to_file(output_dir, s3_commit_key, 'deployed', ' ') # flag that deploy has finished except: pass else: # if processing part of multi-part merge self.write_data_to_file(output_dir, download_key, 'deployed', ' ') # flag that deploy has finished if App.cdn_s3_handler().key_exists(s3_commit_key + '/final_build_log.json'): App.logger.debug("final build detected") App.logger.debug( "conversions all finished, trigger final merge") App.cdn_s3_handler().copy( from_key=s3_commit_key + '/final_build_log.json', to_key=s3_commit_key + '/build_log.json') elapsed_seconds = int(time.time() - start) App.logger.debug("deploy type partial={0}, multi_merge={1}".format( do_part_template_only, do_multipart_merge)) App.logger.debug( "deploy completed in {0} seconds".format(elapsed_seconds)) self.close() return True
def validate_conversion(self, user, repo, success, build_log_json, commit_id, commit_sha, commit_path, expected_output_names, job, chapter_count=-1, file_ext=""): self.assertTrue(len(build_log_json) > 0) self.assertIsNotNone(job) self.temp_dir = tempfile.mkdtemp(prefix='testing_') if not (type(expected_output_names) is list): expected_output_names = [expected_output_names ] # put string in list # check pre-convert files self.download_and_check_zip_file( self.preconvert_handler, expected_output_names, self.preprocessor_output_extension, self.get_preconvert_s3_key(commit_sha), "preconvert", success, chapter_count, file_ext) # check converted files destination_key = self.get_destination_s3_key(commit_sha, repo, user) converted_build_log = self.check_destination_files( self.cdn_handler, expected_output_names, "html", destination_key, chapter_count) # check required fields App.logger.debug(converted_build_log) saved_build_json = json.loads(converted_build_log) self.assertTrue('commit_id' in saved_build_json) self.assertTrue('repo_owner' in saved_build_json) self.assertTrue('repo_name' in saved_build_json) self.assertTrue('created_at' in saved_build_json) self.assertTrue('source' in saved_build_json) self.assertTrue('errors' in saved_build_json) self.assertTrue('warnings' in saved_build_json) self.assertTrue('message' in saved_build_json) self.assertTrue('status' in saved_build_json) self.assertEqual(len(commit_id), COMMIT_LENGTH) self.assertIsNotNone(commit_sha) self.assertIsNotNone(commit_path) if len(job.errors) > 0: self.warn("WARNING: Found job errors: " + str(job.errors)) if len(build_log_json['errors']) > 0: self.warn("WARNING: Found build_log errors: " + str(build_log_json['errors'])) door43_handler = App.door43_s3_handler() deployed_build_log = self.check_deployed_files(door43_handler, expected_output_names, "html", destination_key, chapter_count) self.compare_build_logs(converted_build_log, deployed_build_log, destination_key) if len(self.warnings): App.logger.debug("\n#######\nHave warnings:\n#######\n" + '\n'.join(self.warnings)) self.assertTrue(success) # Test that repo is in manifest table tx_manifest = TxManifest.get(repo_name=repo, user_name=user) # Giving TxManifest above just the composite keys will cause it to load all the data from the App. self.assertIsNotNone(tx_manifest) self.assertEqual(tx_manifest.repo_name, repo) self.assertEqual(tx_manifest.user_name, user)