def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() App.pre_convert_s3_handler().create_bucket() App.cdn_s3_handler().upload_file = self.mock_cdn_upload_file App.cdn_s3_handler().get_json = self.mock_cdn_get_json App.pre_convert_s3_handler().upload_file = self.mock_s3_upload_file try: os.makedirs(ClientWebhookTest.base_temp_dir) except: pass self.temp_dir = tempfile.mkdtemp(dir=self.base_temp_dir, prefix='webhookTest_') self.job_converter_count = 0 self.job_linter_count = 0 self.uploaded_files = [] self.job_data = { 'job_id': '123456890', 'status': 'started', 'success': False, 'resource_type': 'obs', 'input_format': 'md', 'output_format': 'html', 'convert_module': 'module1', 'created_at': datetime.utcnow(), 'errors': [] } self.register_modules()
def upload_archive(self): if self.cdn_file and os.path.isdir(os.path.dirname(self.cdn_file)): copy(self.output_zip_file, self.cdn_file) elif App.cdn_s3_handler(): App.cdn_s3_handler().upload_file(self.output_zip_file, self.cdn_file, cache_time=0)
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() App.cdn_s3_handler().upload_file = self.mock_cdn_upload_file App.cdn_s3_handler().get_json = self.mock_cdn_get_json App.cdn_s3_handler().key_exists = self.mock_cdn_key_exists try: os.makedirs(self.base_temp_dir) except: pass self.temp_dir = tempfile.mkdtemp(dir=self.base_temp_dir, prefix='callbackTest_') self.transferred_files = [] self.raiseDownloadException = False self.source_folder = None self.results_key = 'u/results' self.lint_callback_data = { 'identifier': 'dummy_id', 's3_results_key': self.results_key, 'success': True, 'info': [], 'warnings': [], 'errors': [] } self.expected_error_count = 0 self.expected_warning_count = 0 self.expected_log_count = 0 self.expected_status = "success" self.expected_success = True self.expected_all_parts_completed = True self.expected_multipart = False
def update_project_json(self, commit_id, job, repo_name, repo_owner): """ :param string commit_id: :param TxJob job: :param string repo_name: :param string repo_owner: :return: """ project_json_key = 'u/{0}/{1}/project.json'.format(repo_owner, repo_name) project_json = App.cdn_s3_handler().get_json(project_json_key) project_json['user'] = repo_owner project_json['repo'] = repo_name project_json['repo_url'] = 'https://git.door43.org/{0}/{1}'.format(repo_owner, repo_name) commit = { 'id': commit_id, 'created_at': job.created_at, 'status': job.status, 'success': job.success, 'started_at': None, 'ended_at': None } if 'commits' not in project_json: project_json['commits'] = [] commits = [] for c in project_json['commits']: if c['id'] != commit_id: commits.append(c) commits.append(commit) project_json['commits'] = commits project_file = os.path.join(self.base_temp_dir, 'project.json') write_file(project_file, project_json) App.cdn_s3_handler().upload_file(project_file, project_json_key)
def upload_converted_files(s3_commit_key, unzip_dir): for root, dirs, files in os.walk(unzip_dir): for f in sorted(files): path = os.path.join(root, f) key = s3_commit_key + path.replace(unzip_dir, '') App.logger.debug('Uploading {0} to {1}'.format(f, key)) App.cdn_s3_handler().upload_file(path, key, cache_time=0)
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_printer") self.printer = ProjectPrinter() self.mock_s3_obs_project()
def mock_s3_bible_project(self, test_file_name, project_key, multi_part=False): converted_proj_dir = os.path.join(self.resources_dir, 'converted_projects') test_file_base = test_file_name.split('.zip')[0] zip_file = os.path.join(converted_proj_dir, test_file_name) out_dir = os.path.join(self.temp_dir, test_file_base) unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, test_file_base) + os.path.sep self.project_files = file_utils.get_files(out_dir) self.project_key = project_key for filename in self.project_files: sub_path = filename.split(project_dir)[1].replace(os.path.sep, '/') # Make sure it is a bucket path App.cdn_s3_handler().upload_file(filename, '{0}/{1}'.format(project_key, sub_path)) if multi_part: # copy files from cdn to door43 base_name = os.path.basename(filename) if '.html' in base_name: with codecs.open(filename, 'r', 'utf-8-sig') as f: soup = BeautifulSoup(f, 'html.parser') # add nav tag new_tag = soup.new_tag('div', id='right-sidebar') soup.body.append(new_tag) html = unicode(soup) file_utils.write_file(filename, html.encode('ascii', 'xmlcharrefreplace')) App.door43_s3_handler().upload_file(filename, '{0}/{1}'.format(project_key, base_name)) # u, user, repo = project_key App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def update_project_file(build_log, output_dir): commit_id = build_log['commit_id'] user_name = build_log['repo_owner'] repo_name = build_log['repo_name'] project_json_key = 'u/{0}/{1}/project.json'.format(user_name, repo_name) project_json = App.cdn_s3_handler().get_json(project_json_key) project_json['user'] = user_name project_json['repo'] = repo_name project_json['repo_url'] = 'https://{0}/{1}/{2}'.format(App.gogs_url, user_name, repo_name) commit = { 'id': commit_id, 'created_at': build_log['created_at'], 'status': build_log['status'], 'success': build_log['success'], 'started_at': None, 'ended_at': None } if 'started_at' in build_log: commit['started_at'] = build_log['started_at'] if 'ended_at' in build_log: commit['ended_at'] = build_log['ended_at'] if 'commits' not in project_json: project_json['commits'] = [] commits = [] for c in project_json['commits']: if c['id'] != commit_id: commits.append(c) commits.append(commit) project_json['commits'] = commits project_file = os.path.join(output_dir, 'project.json') write_file(project_file, project_json) App.cdn_s3_handler().upload_file(project_file, project_json_key, cache_time=0) return project_json
def mock_s3_tn_project(self, part): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tn_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tn_converted') unzip(zip_file, out_dir) src_dir = os.path.join(out_dir, 'en_tn_converted') self.project_files = [ f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f)) ] self.project_key = 'u/door43/en_tn/12345678' build_log = file_utils.load_json_object( os.path.join(src_dir, 'build_log.json')) build_log['part'] = part file_utils.write_file(os.path.join(src_dir, 'build_log.json'), build_log) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'build_log.json'), '{0}/{1}/build_log.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'index.json'), '{0}/{1}/index.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, 'build_log.json'), '{0}/{1}/finished'.format(self.project_key, part)) App.cdn_s3_handler().upload_file( os.path.join(src_dir, '01-GEN.html'), '{0}/{1}/01-GEN.html'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tq/project.json') App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def set_deployed_flags(self, project_key, part_count, skip=-1): tempf = tempfile.mktemp(prefix="temp", suffix="deployed") file_utils.write_file(tempf, ' ') for i in range(0, part_count): if i != skip: key = '{0}/{1}/deployed'.format(project_key, i) App.cdn_s3_handler().upload_file(tempf, key, cache_time=0) os.remove(tempf)
def test_redeploy_all_projects(self): self.mock_s3_obs_project() App.cdn_s3_handler().put_contents( 'u/user1/project1/revision1/build_log.json', '{}') App.cdn_s3_handler().put_contents( 'u/user2/project2/revision2/build_log.json', '{}') self.assertTrue( self.deployer.redeploy_all_projects('test-door43_deployer'))
def template_converted_files(self, build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file): App.cdn_s3_handler().download_dir(download_key + '/', source_dir) source_dir = os.path.join(source_dir, download_key.replace('/', os.path.sep)) elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") html_files = sorted(glob(os.path.join(source_dir, '*.html'))) if len(html_files) < 1: content = '' if len(build_log['errors']) > 0: content += """ <div style="text-align:center;margin-bottom:20px"> <i class="fa fa-times-circle-o" style="font-size: 250px;font-weight: 300;color: red"></i> <br/> <h2>Critical!</h2> <h3>Here is what went wrong with this build:</h3> </div> """ content += '<div><ul><li>' + '</li><li>'.join(build_log['errors']) + '</li></ul></div>' else: content += '<h1 class="conversion-requested">{0}</h1>'.format(build_log['message']) content += '<p><i>No content is available to show for {0} yet.</i></p>'.format(repo_name) html = """ <html lang="en"> <head> <title>{0}</title> </head> <body> <div id="content">{1}</div> </body> </html>""".format(repo_name, content) repo_index_file = os.path.join(source_dir, 'index.html') write_file(repo_index_file, html) # merge the source files with the template templater = init_template(resource_type, source_dir, output_dir, template_file) try: self.run_templater(templater) success = True except Exception as e: App.logger.error("Error applying template {0} to resource type {1}:".format(template_file, resource_type)) App.logger.error(e.message) App.logger.error('{0}: {1}'.format(str(e), traceback.format_exc())) self.close() success = False if success: # update index of templated files index_json_fname = 'index.json' index_json = self.get_templater_index(s3_commit_key, index_json_fname) App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256]) self.update_index_key(index_json, templater, 'titles') self.update_index_key(index_json, templater, 'chapters') self.update_index_key(index_json, templater, 'book_codes') App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256]) self.write_data_to_file(output_dir, s3_commit_key, index_json_fname, index_json) return source_dir, success
def mock_s3_obs_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en-obs-complete.zip') out_dir = os.path.join(self.temp_dir, 'en-obs-complete') unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, 'door43', 'en-obs', '12345678') self.project_files = [f for f in os.listdir(project_dir) if os.path.isfile(os.path.join(project_dir, f))] self.project_key = 'door43/en-obs/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file(os.path.join(project_dir, filename), 'u/{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(out_dir, 'door43', 'en-obs', 'project.json'), 'u/door43/en-obs/project.json')
def test_print_obs(self): self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists('u/{0}/print_all.html'.format(self.project_key))) html = App.cdn_s3_handler().get_file_contents('u/{0}/print_all.html'.format(self.project_key)) soup = BeautifulSoup(html, 'html.parser') self.assertEqual(len(soup.div), 69) self.assertEqual(soup.html['lang'], 'en') self.assertEqual(soup.html['dir'], 'ltr') # Run again, shouldn't have to generate self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists('u/{0}/print_all.html'.format(self.project_key)))
def upload_build_log_to_s3(self, build_log, s3_commit_key, part=''): """ :param dict build_log: :param string s3_commit_key: :param string part: :return: """ build_log_file = os.path.join(self.base_temp_dir, 'build_log.json') write_file(build_log_file, build_log) upload_key = '{0}/{1}build_log.json'.format(s3_commit_key, part) App.logger.debug('Saving build log to ' + upload_key) App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=0)
def get_undeployed_parts(self, prefix): unfinished = [] for o in App.cdn_s3_handler().get_objects(prefix=prefix, suffix='/build_log.json'): parts = o.key.split(prefix) if len(parts) == 2: parts = parts[1].split('/') if len(parts) > 1: part_num = parts[0] deployed_key = prefix + part_num + '/deployed' if not App.cdn_s3_handler().key_exists(deployed_key): App.logger.debug("Part {0} unfinished".format(part_num)) unfinished.append(part_num) return unfinished
def test_print_obs(self): self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists( 'u/{0}/print_all.html'.format(self.project_key))) html = App.cdn_s3_handler().get_file_contents( 'u/{0}/print_all.html'.format(self.project_key)) soup = BeautifulSoup(html, 'html.parser') self.assertEqual(len(soup.div), 69) self.assertEqual(soup.html['lang'], 'en') self.assertEqual(soup.html['dir'], 'ltr') # Run again, shouldn't have to generate self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists( 'u/{0}/print_all.html'.format(self.project_key)))
def mock_s3_tw_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tw_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tw_converted') unzip(zip_file, out_dir) self.src_dir = src_dir = os.path.join(out_dir, 'en_tw_converted') self.project_files = [f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f))] self.project_key = 'u/door43/en_tw/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file(os.path.join(src_dir, filename), '{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tw/project.json') App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({ 'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6 }), 'en': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747 }), 'es': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776 }), 'fr': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868 }) }
def get_undeployed_parts(self, prefix): unfinished = [] for o in App.cdn_s3_handler().get_objects(prefix=prefix, suffix='/build_log.json'): parts = o.key.split(prefix) if len(parts) == 2: parts = parts[1].split('/') if len(parts) > 1: part_num = parts[0] deployed_key = prefix + part_num + '/deployed' if not App.cdn_s3_handler().key_exists(deployed_key): App.logger.debug( "Part {0} unfinished".format(part_num)) unfinished.append(part_num) return unfinished
def get_templater_index(s3_commit_key, index_json_fname): index_json = App.cdn_s3_handler().get_json(s3_commit_key + '/' + index_json_fname) if not index_json: index_json['titles'] = {} index_json['chapters'] = {} index_json['book_codes'] = {} return index_json
def compare_build_logs(self, converted_build_log, deployed_build_log, destination_key): keys = [ "callback", "cdn_bucket", "cdn_file", "commit_id", "commit_message", "commit_url", "committed_by", "compare_url", "convert_module", "created_at", "errors", "identifier", "input_format", "job_id", "log", "output", "output_format", "repo_name", "repo_owner", "resource_type", "source", "status", "success", "user", "warnings" ] if converted_build_log != deployed_build_log: converted_build_log = App.cdn_s3_handler().get_file_contents( os.path.join(destination_key, "build_log.json")) # make sure we have the latest if converted_build_log != deployed_build_log: deployed_build_log_ = json.loads(deployed_build_log) converted_build_log_ = json.loads(converted_build_log) for key in keys: if key not in converted_build_log_: self.warn( "Key {0} missing in converted_build_log".format(key)) continue if key not in deployed_build_log_: self.warn( "Key {0} missing in deployed_build_log".format(key)) continue converted_value = converted_build_log_[key] deployed_value = deployed_build_log_[key] if converted_value != deployed_value: self.warn( "miscompare build of logs in key {0}: '{1}' - '{2}'". format(key, converted_value, deployed_value))
def merge_build_status_for_file(build_log, s3_results_key, file_name, linter_file=False): key = "{0}/{1}".format(s3_results_key, file_name) file_results = App.cdn_s3_handler().get_json(key) if file_results: build_log = ClientLinterCallback.merge_results_logs(build_log, file_results, linter_file) return build_log return None
def is_convert_finished(s3_results_key): key = "{0}/{1}".format(s3_results_key, 'finished') try: convert_finished = App.cdn_s3_handler().key_exists(key) except Exception as e: convert_finished = False return convert_finished
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6}), 'en': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747}), 'es': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776}), 'fr': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868}) }
def mock_s3_obs_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en-obs-complete.zip') out_dir = os.path.join(self.temp_dir, 'en-obs-complete') unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, 'door43', 'en-obs', '12345678') self.project_files = [ f for f in os.listdir(project_dir) if os.path.isfile(os.path.join(project_dir, f)) ] self.project_key = 'door43/en-obs/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file( os.path.join(project_dir, filename), 'u/{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file( os.path.join(out_dir, 'door43', 'en-obs', 'project.json'), 'u/door43/en-obs/project.json')
def print_project(self, project_id): """ :param string project_id: :return string: """ App.cdn_bucket = os.environ.get('CDNBUCKET') self.project_id = project_id if len(project_id.split('/')) != 3: raise Exception('Project not found.') user_name, repo_name, commit_id = project_id.split('/') source_path = 'u/{0}'.format(project_id) print_all_key = '{0}/print_all.html'.format(source_path) print_all_file = tempfile.mktemp(prefix='print_all_') if App.cdn_s3_handler().key_exists(print_all_key): return App.cdn_s3_handler().bucket_name + '/' + print_all_key files_dir = tempfile.mkdtemp(prefix='files_') App.cdn_s3_handler().download_dir(source_path, files_dir) project_dir = os.path.join(files_dir, source_path.replace('/', os.path.sep)) if not os.path.isdir(project_dir): raise Exception('Project not found.') rc = RC(project_dir, repo_name) with codecs.open(print_all_file, 'w', 'utf-8-sig') as print_all: print_all.write("""<html lang="{0}" dir="{1}"> <head> <meta charset="UTF-8"/> <title>{2}: {3}</title> <style type="text/css"> body > div {{ page-break-after: always; }} </style> </head> <body onLoad="window.print()"> <h1>{2}: {3}</h1> """.format(rc.resource.language.identifier, rc.resource.language.direction, rc.resource.language.title, rc.resource.title)) for fname in sorted(glob(os.path.join(project_dir, '*.html')), key=self.front_to_back): with codecs.open(fname, 'r') as f: soup = BeautifulSoup(f, 'html.parser') # get the body of the raw html file content = soup.div if not content: content = BeautifulSoup( '<div>No content</div>', 'html.parser').find('div').extract() content['id'] = os.path.basename(fname) print_all.write(unicode(content)) print_all.write(""" </body> </html> """) App.cdn_s3_handler().upload_file(print_all_file, print_all_key, cache_time=0, content_type='text/html') return App.cdn_s3_handler().bucket_name + '/' + print_all_key
def validate_build_log(self, expected_status, expected_success, final=True): build_log_path = 'build_log.json' if not final else 'final_build_log.json' key = "{0}/{1}".format(self.lint_callback_data['s3_results_key'], build_log_path) build_log = App.cdn_s3_handler().get_json(key) self.assertEquals(build_log['success'], expected_success) self.assertEquals(build_log['status'], expected_status)
def mock_s3_tw_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tw_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tw_converted') unzip(zip_file, out_dir) self.src_dir = src_dir = os.path.join(out_dir, 'en_tw_converted') self.project_files = [ f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f)) ] self.project_key = 'u/door43/en_tw/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file( os.path.join(src_dir, filename), '{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tw/project.json') App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() App.cdn_s3_handler().upload_file = self.mock_cdn_upload_file App.cdn_s3_handler().get_json = self.mock_cdn_get_json App.cdn_s3_handler().key_exists = self.mock_cdn_key_exists self.init_items() self.populate_table() try: os.makedirs(self.base_temp_dir) except: pass self.temp_dir = tempfile.mkdtemp(dir=self.base_temp_dir, prefix='callbackTest_') self.transferred_files = [] self.raiseDownloadException = False self.s3_results_key = 'u/results' self.source_folder = tempfile.mkdtemp(dir=self.temp_dir, prefix='sources_')
def mock_s3_tn_project(self, part): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en_tn_converted.zip') out_dir = os.path.join(self.temp_dir, 'en_tn_converted') unzip(zip_file, out_dir) src_dir = os.path.join(out_dir, 'en_tn_converted') self.project_files = [f for f in os.listdir(src_dir) if os.path.isfile(os.path.join(src_dir, f))] self.project_key = 'u/door43/en_tn/12345678' build_log = file_utils.load_json_object(os.path.join(src_dir, 'build_log.json')) build_log['part'] = part file_utils.write_file(os.path.join(src_dir, 'build_log.json'), build_log) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'build_log.json'), '{0}/{1}/build_log.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'index.json'), '{0}/{1}/index.json'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'build_log.json'), '{0}/{1}/finished'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, '01-GEN.html'), '{0}/{1}/01-GEN.html'.format(self.project_key, part)) App.cdn_s3_handler().upload_file(os.path.join(src_dir, 'project.json'), 'u/door43/en_tq/project.json') App.door43_s3_handler().upload_file(os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def redeploy_all_projects(deploy_function, ignoretime=False): one_day_ago = datetime.utcnow() - timedelta(hours=24) for obj in App.cdn_s3_handler().get_objects(prefix='u/', suffix='build_log.json'): last_modified = obj.last_modified.replace(tzinfo=None) if one_day_ago <= last_modified and not ignoretime: continue App.lambda_handler().invoke(function_name=deploy_function, payload={ 'prefix': App.prefix, 'build_log_key': obj.key }) return True
def mock_s3_bible_project(self, test_file_name, project_key, multi_part=False): converted_proj_dir = os.path.join(self.resources_dir, 'converted_projects') test_file_base = test_file_name.split('.zip')[0] zip_file = os.path.join(converted_proj_dir, test_file_name) out_dir = os.path.join(self.temp_dir, test_file_base) unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, test_file_base) + os.path.sep self.project_files = file_utils.get_files(out_dir) self.project_key = project_key for filename in self.project_files: sub_path = filename.split(project_dir)[1].replace( os.path.sep, '/') # Make sure it is a bucket path App.cdn_s3_handler().upload_file( filename, '{0}/{1}'.format(project_key, sub_path)) if multi_part: # copy files from cdn to door43 base_name = os.path.basename(filename) if '.html' in base_name: with codecs.open(filename, 'r', 'utf-8-sig') as f: soup = BeautifulSoup(f, 'html.parser') # add nav tag new_tag = soup.new_tag('div', id='right-sidebar') soup.body.append(new_tag) html = unicode(soup) file_utils.write_file( filename, html.encode('ascii', 'xmlcharrefreplace')) App.door43_s3_handler().upload_file( filename, '{0}/{1}'.format(project_key, base_name)) # u, user, repo = project_key App.door43_s3_handler().upload_file( os.path.join(self.resources_dir, 'templates', 'project-page.html'), 'templates/project-page.html')
def print_project(self, project_id): """ :param string project_id: :return string: """ self.project_id = project_id if len(project_id.split('/')) != 3: raise Exception('Project not found.') user_name, repo_name, commit_id = project_id.split('/') source_path = 'u/{0}'.format(project_id) print_all_key = '{0}/print_all.html'.format(source_path) print_all_file = tempfile.mktemp(prefix='print_all_') if App.cdn_s3_handler().key_exists(print_all_key): return App.cdn_s3_handler().bucket_name + '/' + print_all_key files_dir = tempfile.mkdtemp(prefix='files_') App.cdn_s3_handler().download_dir(source_path, files_dir) project_dir = os.path.join(files_dir, source_path.replace('/', os.path.sep)) if not os.path.isdir(project_dir): raise Exception('Project not found.') rc = RC(project_dir, repo_name) with codecs.open(print_all_file, 'w', 'utf-8-sig') as print_all: print_all.write("""<html lang="{0}" dir="{1}"> <head> <meta charset="UTF-8"/> <title>{2}: {3}</title> <style type="text/css"> body > div {{ page-break-after: always; }} </style> </head> <body onLoad="window.print()"> <h1>{2}: {3}</h1> """.format(rc.resource.language.identifier, rc.resource.language.direction, rc.resource.language.title, rc.resource.title)) for fname in sorted(glob(os.path.join(project_dir, '*.html')), key=self.front_to_back): with codecs.open(fname, 'r') as f: soup = BeautifulSoup(f, 'html.parser') # get the body of the raw html file content = soup.div if not content: content = BeautifulSoup('<div>No content</div>', 'html.parser').find('div').extract() content['id'] = os.path.basename(fname) print_all.write(unicode(content)) print_all.write(""" </body> </html> """) App.cdn_s3_handler().upload_file(print_all_file, print_all_key, cache_time=0, content_type='text/html') return App.cdn_s3_handler().bucket_name + '/' + print_all_key
def do_conversion_for_repo(self, base_url, user, repo): start = time.time() build_log_json = None job = None success = False self.cdn_handler = App.cdn_s3_handler() # TODO: change this to use gogs API when finished commit_id, commit_path, commit_sha = self.fetch_commit_data_for_repo(base_url, repo, user) commit_len = len(commit_id) if commit_len == COMMIT_LENGTH: self.delete_preconvert_zip_file(commit_sha) self.delete_tx_output_zip_file(commit_id) self.empty_destination_folder(commit_sha, repo, user) build_log_json, success, job = self.do_conversion_job(base_url, commit_id, commit_path, commit_sha, repo, user) App.logger.debug("\nConversion completed in " + str(elapsed_time(start)) + " seconds\n") return build_log_json, commit_id, commit_path, commit_sha, success, job
def redeploy_all_projects(deploy_function): i = 0 one_day_ago = datetime.utcnow() - timedelta(hours=24) for obj in App.cdn_s3_handler().get_objects(prefix='u/', suffix='build_log.json'): i += 1 last_modified = obj.last_modified.replace(tzinfo=None) if one_day_ago <= last_modified: continue App.lambda_handler().invoke( FunctionName=deploy_function, InvocationType='Event', LogType='Tail', Payload=json.dumps({ 'prefix': App.prefix, 'build_log_key': obj.key }) ) return True
def redeploy_all_projects(deploy_function): i = 0 one_day_ago = datetime.utcnow() - timedelta(hours=24) for obj in App.cdn_s3_handler().get_objects(prefix='u/', suffix='build_log.json'): i += 1 last_modified = obj.last_modified.replace(tzinfo=None) if one_day_ago <= last_modified: continue App.lambda_handler().invoke(FunctionName=deploy_function, InvocationType='Event', LogType='Tail', Payload=json.dumps({ 'prefix': App.prefix, 'build_log_key': obj.key })) return True
def do_conversion_for_repo(self, base_url, user, repo): start = time.time() build_log_json = None job = None success = False self.cdn_handler = App.cdn_s3_handler() # TODO: change this to use gogs API when finished commit_id, commit_path, commit_sha = self.fetch_commit_data_for_repo( base_url, repo, user) commit_len = len(commit_id) if commit_len == COMMIT_LENGTH: self.delete_preconvert_zip_file(commit_sha) self.delete_tx_output_zip_file(commit_id) self.empty_destination_folder(commit_sha, repo, user) build_log_json, success, job = self.do_conversion_job( base_url, commit_id, commit_path, commit_sha, repo, user) App.logger.debug("\nConversion completed in " + str(elapsed_time(start)) + " seconds\n") return build_log_json, commit_id, commit_path, commit_sha, success, job
def compare_build_logs(self, converted_build_log, deployed_build_log, destination_key): keys = ["callback", "cdn_bucket", "cdn_file", "commit_id", "commit_message", "commit_url", "committed_by", "compare_url", "convert_module", "created_at", "errors", "identifier", "input_format", "job_id", "log", "output", "output_format", "repo_name", "repo_owner", "resource_type", "source", "status", "success", "user", "warnings"] if converted_build_log != deployed_build_log: converted_build_log = App.cdn_s3_handler().get_file_contents( os.path.join(destination_key, "build_log.json")) # make sure we have the latest if converted_build_log != deployed_build_log: deployed_build_log_ = json.loads(deployed_build_log) converted_build_log_ = json.loads(converted_build_log) for key in keys: if key not in converted_build_log_: self.warn("Key {0} missing in converted_build_log".format(key)) continue if key not in deployed_build_log_: self.warn("Key {0} missing in deployed_build_log".format(key)) continue converted_value = converted_build_log_[key] deployed_value = deployed_build_log_[key] if converted_value != deployed_value: self.warn("miscompare build of logs in key {0}: '{1}' - '{2}'".format(key, converted_value, deployed_value))
def clear_commit_directory_in_cdn(self, s3_commit_key): # clear out the commit directory in the cdn bucket for this project revision for obj in App.cdn_s3_handler().get_objects(prefix=s3_commit_key): App.logger.debug('Removing file: ' + obj.key) App.cdn_s3_handler().delete_file(obj.key)
def write_data_to_file(self, output_dir, s3_commit_key, fname, data): out_file = os.path.join(output_dir, fname) write_file(out_file, data) key = s3_commit_key + '/' + fname App.logger.debug("Writing {0} to {1}': ".format(fname, key)) App.cdn_s3_handler().upload_file(out_file, key, cache_time=0)
def deploy_revision_to_door43(self, build_log_key): """ Deploys a single revision of a project to door43.org :param string build_log_key: :return bool: """ build_log = None try: build_log = App.cdn_s3_handler().get_json(build_log_key, catch_exception=False) except Exception as e: App.logger.debug( "Deploying error could not access {0}: {1}".format( build_log_key, str(e))) pass if not build_log or 'commit_id' not in build_log or 'repo_owner' not in build_log \ or 'repo_name' not in build_log: App.logger.debug("Exiting, Invalid build log at {0}: {1}".format( build_log_key, build_log)) return False start = time.time() App.logger.debug("Deploying, build log: " + json.dumps(build_log)[:256]) user = build_log['repo_owner'] repo_name = build_log['repo_name'] commit_id = build_log['commit_id'][:10] s3_commit_key = 'u/{0}/{1}/{2}'.format(user, repo_name, commit_id) s3_repo_key = 'u/{0}/{1}'.format(user, repo_name) download_key = s3_commit_key do_part_template_only = False do_multipart_merge = False if 'multiple' in build_log: do_multipart_merge = build_log['multiple'] App.logger.debug( "Found multi-part merge: {0}".format(download_key)) prefix = download_key + '/' undeployed = self.get_undeployed_parts(prefix) if len(undeployed) > 0: App.logger.debug( "Exiting, Parts not yet deployed: {0}".format(undeployed)) return False key_deployed_ = download_key + '/final_deployed' if App.cdn_s3_handler().key_exists(key_deployed_): App.logger.debug( "Exiting, Already merged parts: {0}".format(download_key)) return False self.write_data_to_file(self.temp_dir, key_deployed_, 'final_deployed', ' ') # flag that deploy has begun App.logger.debug("Continuing with merge: {0}".format(download_key)) elif 'part' in build_log: part = build_log['part'] download_key += '/' + part do_part_template_only = True App.logger.debug("Found partial: {0}".format(download_key)) if not App.cdn_s3_handler().key_exists(download_key + '/finished'): App.logger.debug("Exiting, Not ready to process partial") return False source_dir = tempfile.mkdtemp(prefix='source_', dir=self.temp_dir) output_dir = tempfile.mkdtemp(prefix='output_', dir=self.temp_dir) template_dir = tempfile.mkdtemp(prefix='template_', dir=self.temp_dir) resource_type = build_log['resource_type'] template_key = 'templates/project-page.html' template_file = os.path.join(template_dir, 'project-page.html') App.logger.debug("Downloading {0} to {1}...".format( App.door43_bucket + "/" + template_key, template_file)) App.door43_s3_handler().download_file(template_key, template_file) if not do_multipart_merge: source_dir, success = self.template_converted_files( build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file) if not success: return False else: source_dir, success = self.multipart_master_merge( s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file) if not success: return False ####################### # # Now do the deploy # ####################### if not do_part_template_only or do_multipart_merge: # Copy first HTML file to index.html if index.html doesn't exist html_files = sorted(glob(os.path.join(output_dir, '*.html'))) index_file = os.path.join(output_dir, 'index.html') if len(html_files) > 0 and not os.path.isfile(index_file): copyfile(os.path.join(output_dir, html_files[0]), index_file) # Copy all other files over that don't already exist in output_dir, like css files for filename in sorted(glob(os.path.join(source_dir, '*'))): output_file = os.path.join(output_dir, os.path.basename(filename)) if not os.path.exists(output_file) and not os.path.isdir(filename): copyfile(filename, output_file) if do_part_template_only: # move files to common area basename = os.path.basename(filename) if basename not in [ 'finished', 'build_log.json', 'index.html', 'merged.json', 'lint_log.json' ]: App.logger.debug( "Moving {0} to common area".format(basename)) App.cdn_s3_handler().upload_file(filename, s3_commit_key + '/' + basename, cache_time=0) App.cdn_s3_handler().delete_file(download_key + '/' + basename) # save master build_log.json file_utils.write_file(os.path.join(output_dir, 'build_log.json'), build_log) App.logger.debug("Final build_log.json:\n" + json.dumps(build_log)[:256]) # Upload all files to the door43.org bucket for root, dirs, files in os.walk(output_dir): for f in sorted(files): path = os.path.join(root, f) if os.path.isdir(path): continue key = s3_commit_key + path.replace(output_dir, '').replace( os.path.sep, '/') App.logger.debug("Uploading {0} to {1}".format(path, key)) App.door43_s3_handler().upload_file(path, key, cache_time=0) if not do_part_template_only: # Now we place json files and redirect index.html for the whole repo to this index.html file try: App.door43_s3_handler().copy( from_key='{0}/project.json'.format(s3_repo_key), from_bucket=App.cdn_bucket) App.door43_s3_handler().copy( from_key='{0}/manifest.json'.format(s3_commit_key), to_key='{0}/manifest.json'.format(s3_repo_key)) App.door43_s3_handler().redirect(s3_repo_key, '/' + s3_commit_key) App.door43_s3_handler().redirect(s3_repo_key + '/index.html', '/' + s3_commit_key) self.write_data_to_file(output_dir, s3_commit_key, 'deployed', ' ') # flag that deploy has finished except: pass else: # if processing part of multi-part merge self.write_data_to_file(output_dir, download_key, 'deployed', ' ') # flag that deploy has finished if App.cdn_s3_handler().key_exists(s3_commit_key + '/final_build_log.json'): App.logger.debug("final build detected") App.logger.debug( "conversions all finished, trigger final merge") App.cdn_s3_handler().copy( from_key=s3_commit_key + '/final_build_log.json', to_key=s3_commit_key + '/build_log.json') elapsed_seconds = int(time.time() - start) App.logger.debug("deploy type partial={0}, multi_merge={1}".format( do_part_template_only, do_multipart_merge)) App.logger.debug( "deploy completed in {0} seconds".format(elapsed_seconds)) self.close() return True
def template_converted_files(self, build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file): App.cdn_s3_handler().download_dir(download_key + '/', source_dir) source_dir = os.path.join(source_dir, download_key.replace('/', os.path.sep)) elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") html_files = sorted(glob(os.path.join(source_dir, '*.html'))) if len(html_files) < 1: content = '' if len(build_log['errors']) > 0: content += """ <div style="text-align:center;margin-bottom:20px"> <i class="fa fa-times-circle-o" style="font-size: 250px;font-weight: 300;color: red"></i> <br/> <h2>Critical!</h2> <h3>Here is what went wrong with this build:</h3> </div> """ content += '<div><ul><li>' + '</li><li>'.join( build_log['errors']) + '</li></ul></div>' else: content += '<h1 class="conversion-requested">{0}</h1>'.format( build_log['message']) content += '<p><i>No content is available to show for {0} yet.</i></p>'.format( repo_name) html = """ <html lang="en"> <head> <title>{0}</title> </head> <body> <div id="content">{1}</div> </body> </html>""".format(repo_name, content) repo_index_file = os.path.join(source_dir, 'index.html') write_file(repo_index_file, html) # merge the source files with the template templater = init_template(resource_type, source_dir, output_dir, template_file) try: self.run_templater(templater) success = True except Exception as e: App.logger.error( "Error applying template {0} to resource type {1}:".format( template_file, resource_type)) App.logger.error(e.message) App.logger.error('{0}: {1}'.format(str(e), traceback.format_exc())) self.close() success = False if success: # update index of templated files index_json_fname = 'index.json' index_json = self.get_templater_index(s3_commit_key, index_json_fname) App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256]) self.update_index_key(index_json, templater, 'titles') self.update_index_key(index_json, templater, 'chapters') self.update_index_key(index_json, templater, 'book_codes') App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256]) self.write_data_to_file(output_dir, s3_commit_key, index_json_fname, index_json) return source_dir, success
def get_results(s3_results_key, file_name): key = "{0}/{1}".format(s3_results_key, file_name) file_results = App.cdn_s3_handler().get_json(key) return file_results
def upload_build_log(build_log, file_name, output_dir, s3_results_key, cache_time=0): build_log_file = os.path.join(output_dir, file_name) write_file(build_log_file, build_log) upload_key = '{0}/{1}'.format(s3_results_key, file_name) App.logger.debug('Saving build log to ' + upload_key) App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=cache_time)
def test_s3_handler(self): self.assertIsNotNone(App.cdn_s3_handler())
def mock_s3_upload_file(self, project_file, s3_key, cache_time=0): bucket_name = App.cdn_s3_handler().bucket.name return self.upload_file(bucket_name, project_file, s3_key)