def init_table(self, view_count): try: App.language_stats_db_handler().table.delete() except: pass App.language_stats_db_handler().resource.create_table( TableName=App.language_stats_table_name, KeySchema=[ { 'AttributeName': 'lang_code', 'KeyType': 'HASH' } ], AttributeDefinitions=[ { 'AttributeName': 'lang_code', 'AttributeType': 'S' } ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 }, ) lang_stats_data = { 'lang_code': ViewCountTest.LANG_CODE.lower(), 'last_updated': datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 'manifest': '{}', 'views': view_count } lang_stats = LanguageStats(lang_stats_data).insert() App.logger.debug("new language: " + lang_stats.lang_code)
def multipart_master_merge(self, s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file): prefix = download_key + '/' App.door43_s3_handler().download_dir(prefix, source_dir) # get previous templated files source_dir = os.path.join(source_dir, download_key) files = sorted(glob(os.path.join(source_dir, '*.*'))) for f in files: App.logger.debug("Downloaded: " + f) fname = os.path.join(source_dir, 'index.html') if os.path.isfile(fname): os.remove(fname) # remove index if already exists elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") templater = init_template(resource_type, source_dir, output_dir, template_file) # restore index from previous passes index_json = self.get_templater_index(s3_commit_key, 'index.json') templater.titles = index_json['titles'] templater.chapters = index_json['chapters'] templater.book_codes = index_json['book_codes'] templater.already_converted = templater.files # do not reconvert files # merge the source files with the template try: self.run_templater(templater) success = True except Exception as e: App.logger.error("Error multi-part applying template {0} to resource type {1}".format(template_file, resource_type)) self.close() success = False return source_dir, success
def test_tq_deploy_revision_to_door43(self): # given self.mock_s3_tq_project() build_log_key = '{0}/build_log.json'.format(self.project_key) # when ret = self.deployer.deploy_revision_to_door43(build_log_key) # then self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) files_to_verify = ['manifest.yaml'] for book in BOOK_NUMBERS: html_file = '{0}-{1}.html'.format(BOOK_NUMBERS[book], book.upper()) files_to_verify.append(html_file) for file_name in files_to_verify: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key)) parent_key = '/'.join(self.project_key.split('/')[:-1]) for file_name in ['project.json']: key = '{0}/{1}'.format(parent_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def _handle(self, event, context): """ :param dict event: :param context: """ deployer = ProjectDeployer() prefix = '' try: if 'prefix' in event: prefix = event['prefix'] if 'Records' in event: # If we got 'Records' that means a template change was uploaded to S3 and we got the trigger for record in event['Records']: # See if it is a notification from an S3 bucket if 's3' in record: bucket_name = record['s3']['bucket']['name'] if '-' in bucket_name: prefix = bucket_name.split('-')[0] + '-' App(prefix=prefix) key = record['s3']['object']['key'] deployer.deploy_revision_to_door43(key) elif 'build_log_key' in event: App(prefix=prefix) deployer.deploy_revision_to_door43(event['build_log_key']) else: App(prefix=prefix) # this is triggered manually through AWS Lambda console to update all projects deploy_function = '{0}tx_door43_deploy'.format(App.prefix) deployer.redeploy_all_projects(deploy_function) except Exception as e: App.logger.debug("Project Deployer Error: " + str(e)) deployer.close()
def handle(self, event, context): """ :param dict event: :param context: :return dict: """ if 'vars' in event: App(**event['vars']) App.logger.debug("EVENT:") App.logger.debug(json.dumps(self.mask_event(event))) self.data = {} if 'data' in event and isinstance(event['data'], dict): self.data = event['data'] if 'body-json' in event and isinstance(event['body-json'], dict): self.data.update(event['body-json']) try: return self._handle(event, context) except Exception as e: App.logger.error(e.message) App.logger.error('{0}: {1}'.format(str(e), traceback.format_exc())) raise EnvironmentError('Bad Request: {}'.format(e.message)) finally: App.db_close()
def test_obs_deploy_revision_to_door43(self): self.mock_s3_obs_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) self.assertTrue(App.door43_s3_handler().key_exists('{0}/50.html'.format(self.project_key)))
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_printer") self.printer = ProjectPrinter() self.mock_s3_obs_project()
def delete_preconvert_zip_file(self, commit_sha): self.preconvert_handler = App.pre_convert_s3_handler() preconvert_key = self.get_preconvert_s3_key(commit_sha) if App.pre_convert_s3_handler().key_exists(preconvert_key): App.logger.debug("deleting preconvert file: " + preconvert_key) App.pre_convert_s3_handler().delete_file(preconvert_key, catch_exception=True)
def multipart_master_merge(self, s3_commit_key, resource_type, download_key, output_dir, source_dir, start, template_file): prefix = download_key + '/' App.door43_s3_handler().download_dir( prefix, source_dir) # get previous templated files source_dir = os.path.join(source_dir, download_key) files = sorted(glob(os.path.join(source_dir, '*.*'))) for f in files: App.logger.debug("Downloaded: " + f) fname = os.path.join(source_dir, 'index.html') if os.path.isfile(fname): os.remove(fname) # remove index if already exists elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") templater = init_template(resource_type, source_dir, output_dir, template_file) # restore index from previous passes index_json = self.get_templater_index(s3_commit_key, 'index.json') templater.titles = index_json['titles'] templater.chapters = index_json['chapters'] templater.book_codes = index_json['book_codes'] templater.already_converted = templater.files # do not reconvert files # merge the source files with the template try: self.run_templater(templater) success = True except Exception as e: App.logger.error( "Error multi-part applying template {0} to resource type {1}". format(template_file, resource_type)) self.close() success = False return source_dir, success
def upload_archive(self): if self.cdn_file and os.path.isdir(os.path.dirname(self.cdn_file)): copy(self.output_zip_file, self.cdn_file) elif App.cdn_s3_handler(): App.cdn_s3_handler().upload_file(self.output_zip_file, self.cdn_file, cache_time=0)
def update_project_json(self, commit_id, job, repo_name, repo_owner): """ :param string commit_id: :param TxJob job: :param string repo_name: :param string repo_owner: :return: """ project_json_key = 'u/{0}/{1}/project.json'.format(repo_owner, repo_name) project_json = App.cdn_s3_handler().get_json(project_json_key) project_json['user'] = repo_owner project_json['repo'] = repo_name project_json['repo_url'] = 'https://git.door43.org/{0}/{1}'.format(repo_owner, repo_name) commit = { 'id': commit_id, 'created_at': job.created_at, 'status': job.status, 'success': job.success, 'started_at': None, 'ended_at': None } if 'commits' not in project_json: project_json['commits'] = [] commits = [] for c in project_json['commits']: if c['id'] != commit_id: commits.append(c) commits.append(commit) project_json['commits'] = commits project_file = os.path.join(self.base_temp_dir, 'project.json') write_file(project_file, project_json) App.cdn_s3_handler().upload_file(project_file, project_json_key)
def update_project_file(build_log, output_dir): commit_id = build_log['commit_id'] user_name = build_log['repo_owner'] repo_name = build_log['repo_name'] project_json_key = 'u/{0}/{1}/project.json'.format(user_name, repo_name) project_json = App.cdn_s3_handler().get_json(project_json_key) project_json['user'] = user_name project_json['repo'] = repo_name project_json['repo_url'] = 'https://{0}/{1}/{2}'.format(App.gogs_url, user_name, repo_name) commit = { 'id': commit_id, 'created_at': build_log['created_at'], 'status': build_log['status'], 'success': build_log['success'], 'started_at': None, 'ended_at': None } if 'started_at' in build_log: commit['started_at'] = build_log['started_at'] if 'ended_at' in build_log: commit['ended_at'] = build_log['ended_at'] if 'commits' not in project_json: project_json['commits'] = [] commits = [] for c in project_json['commits']: if c['id'] != commit_id: commits.append(c) commits.append(commit) project_json['commits'] = commits project_file = os.path.join(output_dir, 'project.json') write_file(project_file, project_json) App.cdn_s3_handler().upload_file(project_file, project_json_key, cache_time=0) return project_json
def handle(self, event, context): """ :param dict event: :param context: :return dict: """ if 'vars' in event: App(**event['vars']) App.logger.debug("EVENT:") App.logger.debug(json.dumps(self.mask_event(event))) self.data = {} if 'data' in event and isinstance(event['data'], dict): self.data = event['data'] if 'body-json' in event and isinstance(event['body-json'], dict): self.data.update(event['body-json']) try: return self._handle(event, context) except Exception as e: App.logger.error(e.message) App.logger.error('{0}: {1}'.format(str(e), traceback.format_exc())) raise EnvironmentError('Bad Request: {}'.format(e.message)) finally: App.db_close()
def upload_converted_files(s3_commit_key, unzip_dir): for root, dirs, files in os.walk(unzip_dir): for f in sorted(files): path = os.path.join(root, f) key = s3_commit_key + path.replace(unzip_dir, '') App.logger.debug('Uploading {0} to {1}'.format(f, key)) App.cdn_s3_handler().upload_file(path, key, cache_time=0)
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() App.cdn_s3_handler().upload_file = self.mock_cdn_upload_file App.cdn_s3_handler().get_json = self.mock_cdn_get_json App.cdn_s3_handler().key_exists = self.mock_cdn_key_exists try: os.makedirs(self.base_temp_dir) except: pass self.temp_dir = tempfile.mkdtemp(dir=self.base_temp_dir, prefix='callbackTest_') self.transferred_files = [] self.raiseDownloadException = False self.source_folder = None self.results_key = 'u/results' self.lint_callback_data = { 'identifier': 'dummy_id', 's3_results_key': self.results_key, 'success': True, 'info': [], 'warnings': [], 'errors': [] } self.expected_error_count = 0 self.expected_warning_count = 0 self.expected_log_count = 0 self.expected_status = "success" self.expected_success = True self.expected_all_parts_completed = True self.expected_multipart = False
def update_project_json(self, commit_id, job, repo_name, repo_owner): """ :param string commit_id: :param TxJob job: :param string repo_name: :param string repo_owner: :return: """ project_json_key = 'u/{0}/{1}/project.json'.format(repo_owner, repo_name) project_json = App.cdn_s3_handler().get_json(project_json_key) project_json['user'] = repo_owner project_json['repo'] = repo_name project_json['repo_url'] = 'https://git.door43.org/{0}/{1}'.format(repo_owner, repo_name) commit = { 'id': commit_id, 'created_at': job.created_at, 'status': job.status, 'success': job.success, 'started_at': None, 'ended_at': None } if 'commits' not in project_json: project_json['commits'] = [] commits = [] for c in project_json['commits']: if c['id'] != commit_id: commits.append(c) commits.append(commit) project_json['commits'] = commits project_file = os.path.join(self.base_temp_dir, 'project.json') write_file(project_file, project_json) App.cdn_s3_handler().upload_file(project_file, project_json_key)
def upload_converted_files(s3_commit_key, unzip_dir): for root, dirs, files in os.walk(unzip_dir): for f in sorted(files): path = os.path.join(root, f) key = s3_commit_key + path.replace(unzip_dir, '') App.logger.debug('Uploading {0} to {1}'.format(f, key)) App.cdn_s3_handler().upload_file(path, key, cache_time=0)
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_printer") self.printer = ProjectPrinter() self.mock_s3_obs_project()
def test_reset_app(self): default_name = App.name App(name='test-name') App() self.assertEqual(App.name, default_name) App.name = 'test-name-2' App(name='test-name-2', reset=False) self.assertNotEqual(App.name, default_name)
def set_deployed_flags(self, project_key, part_count, skip=-1): tempf = tempfile.mktemp(prefix="temp", suffix="deployed") file_utils.write_file(tempf, ' ') for i in range(0, part_count): if i != skip: key = '{0}/{1}/deployed'.format(project_key, i) App.cdn_s3_handler().upload_file(tempf, key, cache_time=0) os.remove(tempf)
def test_redeploy_all_projects(self): self.mock_s3_obs_project() App.cdn_s3_handler().put_contents( 'u/user1/project1/revision1/build_log.json', '{}') App.cdn_s3_handler().put_contents( 'u/user2/project2/revision2/build_log.json', '{}') self.assertTrue( self.deployer.redeploy_all_projects('test-door43_deployer'))
def set_deployed_flags(self, project_key, part_count, skip=-1): tempf = tempfile.mktemp(prefix="temp", suffix="deployed") file_utils.write_file(tempf, ' ') for i in range(0, part_count): if i != skip: key = '{0}/{1}/deployed'.format(project_key, i) App.cdn_s3_handler().upload_file(tempf, key, cache_time=0) os.remove(tempf)
def test_db(self): App(db_connection_string='sqlite:///:memory:') App.db_create_tables([User.__table__]) user = User(name='ed', fullname='Edward Scissorhands', password='******') user.insert() user_from_db = User.get(name='ed') self.assertIsNotNone(user_from_db) self.assertEqual(user_from_db.password, '12345')
def test_obs_deploy_revision_to_door43(self): self.mock_s3_obs_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) self.assertTrue(App.door43_s3_handler().key_exists( '{0}/50.html'.format(self.project_key)))
def process_callback(self): if not self.identifier: error = 'No identifier found' App.logger.error(error) raise Exception(error) if not self.s3_results_key: error = 'No s3_results_key found for identifier = {0}'.format(self.identifier) App.logger.error(error) raise Exception(error) id_parts = self.identifier.split('/') self.multipart = len(id_parts) > 3 if self.multipart: part_count, part_id, book = id_parts[1:4] App.logger.debug('Multiple project, part {0} of {1}, linted book {2}'. format(part_id, part_count, book)) s3__master_results_key = '/'.join(self.s3_results_key.split('/')[:-1]) else: App.logger.debug('Single project') s3__master_results_key = self.s3_results_key build_log = { 'identifier': self.identifier, 'success': self.success, 'multipart_project': self.multipart, 'log': self.log, 'warnings': self.warnings, 'errors': self.errors, 's3_commit_key': self.s3_results_key } if not self.success: msg = "Linter failed for identifier: " + self.identifier build_log['warnings'].append(msg) App.logger.error(msg) else: App.logger.debug("Linter {0} {1} warnings:\n{1}".format(self.identifier, len(self.warnings), '\n'.join(self.warnings[:5]))) has_warnings = len(build_log['warnings']) > 0 if has_warnings: msg = "Linter {0} has Warnings!".format(self.identifier) build_log['log'].append(msg) else: msg = "Linter {0} completed with no warnings".format(self.identifier) build_log['log'].append(msg) ClientLinterCallback.upload_build_log(build_log, 'lint_log.json', self.temp_dir, self.s3_results_key) results = ClientLinterCallback.deploy_if_conversion_finished(s3__master_results_key, self.identifier) if results: self.all_parts_completed = True build_log = results remove_tree(self.temp_dir) # cleanup App.db_close() return build_log
def test_construction_connection_string(self): """ Test the construction of the connection string with multiple attributes """ App(db_protocol='protocol', db_user='******', db_pass='******', db_end_point='my.endpoint.url', db_port='9999', db_name='db', db_connection_string_params='charset=utf8', auto_setup_db=False) expected = "protocol://*****:*****@my.endpoint.url:9999/db?charset=utf8" connection_str = App.construct_connection_string() self.assertEqual(connection_str, expected)
def template_converted_files(self, build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key, source_dir, start, template_file): App.cdn_s3_handler().download_dir(download_key + '/', source_dir) source_dir = os.path.join(source_dir, download_key.replace('/', os.path.sep)) elapsed_seconds = int(time.time() - start) App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds") html_files = sorted(glob(os.path.join(source_dir, '*.html'))) if len(html_files) < 1: content = '' if len(build_log['errors']) > 0: content += """ <div style="text-align:center;margin-bottom:20px"> <i class="fa fa-times-circle-o" style="font-size: 250px;font-weight: 300;color: red"></i> <br/> <h2>Critical!</h2> <h3>Here is what went wrong with this build:</h3> </div> """ content += '<div><ul><li>' + '</li><li>'.join(build_log['errors']) + '</li></ul></div>' else: content += '<h1 class="conversion-requested">{0}</h1>'.format(build_log['message']) content += '<p><i>No content is available to show for {0} yet.</i></p>'.format(repo_name) html = """ <html lang="en"> <head> <title>{0}</title> </head> <body> <div id="content">{1}</div> </body> </html>""".format(repo_name, content) repo_index_file = os.path.join(source_dir, 'index.html') write_file(repo_index_file, html) # merge the source files with the template templater = init_template(resource_type, source_dir, output_dir, template_file) try: self.run_templater(templater) success = True except Exception as e: App.logger.error("Error applying template {0} to resource type {1}:".format(template_file, resource_type)) App.logger.error(e.message) App.logger.error('{0}: {1}'.format(str(e), traceback.format_exc())) self.close() success = False if success: # update index of templated files index_json_fname = 'index.json' index_json = self.get_templater_index(s3_commit_key, index_json_fname) App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256]) self.update_index_key(index_json, templater, 'titles') self.update_index_key(index_json, templater, 'chapters') self.update_index_key(index_json, templater, 'book_codes') App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256]) self.write_data_to_file(output_dir, s3_commit_key, index_json_fname, index_json) return source_dir, success
def test_setup_db_with_connection_string_parts(self): App(db_protocol='sqlite', db_user=None, db_pass=None, db_end_point=None, db_port=None, db_name=':memory:', db_connection_string_params=None) App.db_create_tables([User.__table__]) user = User(name='ed', fullname='Edward Scissorhands', password='******') user.insert() user_from_db = User.get(name='ed') self.assertIsNotNone(user_from_db) self.assertEqual(user_from_db.password, '12345') App.db_close()
def mock_s3_obs_project(self): zip_file = os.path.join(self.resources_dir, 'converted_projects', 'en-obs-complete.zip') out_dir = os.path.join(self.temp_dir, 'en-obs-complete') unzip(zip_file, out_dir) project_dir = os.path.join(out_dir, 'door43', 'en-obs', '12345678') self.project_files = [f for f in os.listdir(project_dir) if os.path.isfile(os.path.join(project_dir, f))] self.project_key = 'door43/en-obs/12345678' for filename in self.project_files: App.cdn_s3_handler().upload_file(os.path.join(project_dir, filename), 'u/{0}/{1}'.format(self.project_key, filename)) App.cdn_s3_handler().upload_file(os.path.join(out_dir, 'door43', 'en-obs', 'project.json'), 'u/door43/en-obs/project.json')
def upload_zip_file(self, commit_id, zip_filepath): file_key = 'preconvert/{0}.zip'.format(commit_id) App.logger.debug('Uploading {0} to {1}/{2}...'.format(zip_filepath, App.pre_convert_bucket, file_key)) try: App.pre_convert_s3_handler().upload_file(zip_filepath, file_key, cache_time=0) except Exception as e: App.logger.error('Failed to upload zipped repo up to server') App.logger.exception(e) finally: App.logger.debug('finished.') return file_key
def get(cls, *args, **kwargs): """ :param args: :param kwargs: :return TxModel: """ if args: kwargs[inspect(cls).primary_key[0].name] = args[0] item = cls.query(**kwargs).first() App.db().close() return item
def test_print_obs(self): self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists('u/{0}/print_all.html'.format(self.project_key))) html = App.cdn_s3_handler().get_file_contents('u/{0}/print_all.html'.format(self.project_key)) soup = BeautifulSoup(html, 'html.parser') self.assertEqual(len(soup.div), 69) self.assertEqual(soup.html['lang'], 'en') self.assertEqual(soup.html['dir'], 'ltr') # Run again, shouldn't have to generate self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists('u/{0}/print_all.html'.format(self.project_key)))
def get(cls, *args, **kwargs): """ :param args: :param kwargs: :return TxModel: """ if args: kwargs[inspect(cls).primary_key[0].name] = args[0] item = cls.query(**kwargs).first() App.db().close() return item
def upload_zip_file(self, commit_id, zip_filepath): file_key = 'preconvert/{0}.zip'.format(commit_id) App.logger.debug('Uploading {0} to {1}/{2}...'.format(zip_filepath, App.pre_convert_bucket, file_key)) try: App.pre_convert_s3_handler().upload_file(zip_filepath, file_key, cache_time=0) except Exception as e: App.logger.error('Failed to upload zipped repo up to server') App.logger.exception(e) finally: App.logger.debug('finished.') return file_key
def upload_build_log_to_s3(self, build_log, s3_commit_key, part=''): """ :param dict build_log: :param string s3_commit_key: :param string part: :return: """ build_log_file = os.path.join(self.base_temp_dir, 'build_log.json') write_file(build_log_file, build_log) upload_key = '{0}/{1}build_log.json'.format(s3_commit_key, part) App.logger.debug('Saving build log to ' + upload_key) App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=0)
def upload_build_log_to_s3(self, build_log, s3_commit_key, part=''): """ :param dict build_log: :param string s3_commit_key: :param string part: :return: """ build_log_file = os.path.join(self.base_temp_dir, 'build_log.json') write_file(build_log_file, build_log) upload_key = '{0}/{1}build_log.json'.format(s3_commit_key, part) App.logger.debug('Saving build log to ' + upload_key) App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=0)
def init_table(self): try: handler = App.language_stats_db_handler() handler.table.delete() except Exception as e: pass App.language_stats_db_handler().resource.create_table( TableName=App.language_stats_table_name, KeySchema=[ { 'AttributeName': 'lang_code', 'KeyType': 'HASH' }, ], AttributeDefinitions=[ { 'AttributeName': 'lang_code', 'AttributeType': 'S' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 }, GlobalSecondaryIndexes=[ { 'IndexName': 'search_type-views-index', 'KeySchema': [ { 'AttributeName': 'search_type', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'ALL' }, 'ProvisionedThroughput': { 'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123 } }, ], ) lang_stats_data = { 'lang_code': '?lc=en', 'last_updated': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 'search_type': 'Y', 'views': SearchCountTest.INITIAL_VIEW_COUNT } lang_stats = LanguageStats(lang_stats_data).insert()
def _handle(self, event, context): """ :param dict event: :param context: """ deployer = ProjectDeployer() prefix = '' deploy_bucket = os.environ.get('DEPLOYBUCKET') cdn_bucket = os.environ.get('CDNBUCKET') deploy_function = os.environ.get('LAMBDA_FUNCTION_NAME') try: if 'prefix' in event: prefix = event['prefix'] if 'Records' in event: # If we got 'Records' that means a template change was uploaded to S3 and we got the trigger for record in event['Records']: # See if it is a notification from an S3 bucket if 's3' in record: bucket_name = record['s3']['bucket']['name'] if '-' in bucket_name: prefix = bucket_name.split('-')[0] + '-' App(prefix=prefix) App.aws_region_name = record['awsRegion'] if cdn_bucket is not None: App.cdn_bucket = cdn_bucket if deploy_bucket is not None: App.door43_bucket = deploy_bucket if bucket_name == deploy_bucket: deployer.redeploy_all_projects( deploy_function, True) else: key = record['s3']['object']['key'] deployer.deploy_revision_to_door43(key) elif 'build_log_key' in event: App(prefix=prefix) if deploy_bucket is not None: App.door43_bucket = deploy_bucket if cdn_bucket is not None: App.cdn_bucket = cdn_bucket deployer.deploy_revision_to_door43(event['build_log_key']) else: App(prefix=prefix) if deploy_bucket is not None: App.door43_bucket = deploy_bucket if cdn_bucket is not None: App.cdn_bucket = cdn_bucket # this is triggered manually through AWS Lambda console to update all projects deployer.redeploy_all_projects(deploy_function) except Exception: App.logger.exception("Project Deployer Error") deployer.close()
def test_prefix_vars(self): App(prefix='') self.assertEqual(App.cdn_bucket, 'cdn.door43.org') self.assertEqual(App.api_url, 'https://api.door43.org') App(prefix='test-') self.assertEqual(App.cdn_bucket, 'test-cdn.door43.org') self.assertEqual(App.api_url, 'https://test-api.door43.org') App(prefix='test2-') self.assertEqual(App.cdn_bucket, 'test2-cdn.door43.org') self.assertEqual(App.api_url, 'https://test2-api.door43.org') App(prefix='') self.assertEqual(App.cdn_bucket, 'cdn.door43.org') self.assertEqual(App.api_url, 'https://api.door43.org')
def test_tw_deploy_revision_to_door43(self): self.mock_s3_tw_project() build_log_key = '{0}/build_log.json'.format(self.project_key) ret = self.deployer.deploy_revision_to_door43(build_log_key) self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists(build_log_key)) for file_name in ['index.html', 'kt.html', 'names.html', 'other.html', 'build_log.json', 'manifest.yaml']: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key)) parent_key = '/'.join(self.project_key.split('/')[:-1]) for file_name in ['project.json']: key = '{0}/{1}'.format(parent_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def get_undeployed_parts(self, prefix): unfinished = [] for o in App.cdn_s3_handler().get_objects(prefix=prefix, suffix='/build_log.json'): parts = o.key.split(prefix) if len(parts) == 2: parts = parts[1].split('/') if len(parts) > 1: part_num = parts[0] deployed_key = prefix + part_num + '/deployed' if not App.cdn_s3_handler().key_exists(deployed_key): App.logger.debug("Part {0} unfinished".format(part_num)) unfinished.append(part_num) return unfinished
def redeploy_all_projects(deploy_function, ignoretime=False): one_day_ago = datetime.utcnow() - timedelta(hours=24) for obj in App.cdn_s3_handler().get_objects(prefix='u/', suffix='build_log.json'): last_modified = obj.last_modified.replace(tzinfo=None) if one_day_ago <= last_modified and not ignoretime: continue App.lambda_handler().invoke(function_name=deploy_function, payload={ 'prefix': App.prefix, 'build_log_key': obj.key }) return True
def init_table(self): try: handler = App.language_stats_db_handler() handler.table.delete() except Exception as e: pass App.language_stats_db_handler().resource.create_table( TableName=App.language_stats_table_name, KeySchema=[ { 'AttributeName': 'lang_code', 'KeyType': 'HASH' }, ], AttributeDefinitions=[ { 'AttributeName': 'lang_code', 'AttributeType': 'S' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 }, GlobalSecondaryIndexes=[ { 'IndexName': 'search_type-views-index', 'KeySchema': [ { 'AttributeName': 'search_type', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'ALL' }, 'ProvisionedThroughput': { 'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123 } }, ], ) lang_stats_data = { 'lang_code': '?lc=en', 'last_updated': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 'search_type': 'Y', 'views': SearchCountTest.INITIAL_VIEW_COUNT } lang_stats = LanguageStats(lang_stats_data).insert()
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') App.cdn_s3_handler().create_bucket() App.pre_convert_s3_handler().create_bucket() App.cdn_s3_handler().upload_file = self.mock_cdn_upload_file App.cdn_s3_handler().get_json = self.mock_cdn_get_json App.pre_convert_s3_handler().upload_file = self.mock_s3_upload_file try: os.makedirs(ClientWebhookTest.base_temp_dir) except: pass self.temp_dir = tempfile.mkdtemp(dir=self.base_temp_dir, prefix='webhookTest_') self.job_converter_count = 0 self.job_linter_count = 0 self.uploaded_files = [] self.job_data = { 'job_id': '123456890', 'status': 'started', 'success': False, 'resource_type': 'obs', 'input_format': 'md', 'output_format': 'html', 'convert_module': 'module1', 'created_at': datetime.utcnow(), 'errors': [] } self.register_modules()
def test_print_obs(self): self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists( 'u/{0}/print_all.html'.format(self.project_key))) html = App.cdn_s3_handler().get_file_contents( 'u/{0}/print_all.html'.format(self.project_key)) soup = BeautifulSoup(html, 'html.parser') self.assertEqual(len(soup.div), 69) self.assertEqual(soup.html['lang'], 'en') self.assertEqual(soup.html['dir'], 'ltr') # Run again, shouldn't have to generate self.printer.print_project(self.project_key) self.assertTrue(App.cdn_s3_handler().key_exists( 'u/{0}/print_all.html'.format(self.project_key)))
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({ 'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6 }), 'en': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747 }), 'es': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776 }), 'fr': TdLanguage({ 'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868 }) }
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName), db_connection_string='sqlite:///:memory:') self.tx_manager = TxManager() self.searches = None self.language_views = None try: App.language_stats_db_handler().table.delete() except: pass App.language_stats_db_handler().resource.create_table( TableName=App.language_stats_table_name, KeySchema=[ { 'AttributeName': 'lang_code', 'KeyType': 'HASH' }, ], AttributeDefinitions=[ { 'AttributeName': 'lang_code', 'AttributeType': 'S' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 }, GlobalSecondaryIndexes=[ { 'IndexName': 'search_type-views-index', 'KeySchema': [ { 'AttributeName': 'search_type', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'ALL' }, 'ProvisionedThroughput': { 'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123 } }, ], )
def get_undeployed_parts(self, prefix): unfinished = [] for o in App.cdn_s3_handler().get_objects(prefix=prefix, suffix='/build_log.json'): parts = o.key.split(prefix) if len(parts) == 2: parts = parts[1].split('/') if len(parts) > 1: part_num = parts[0] deployed_key = prefix + part_num + '/deployed' if not App.cdn_s3_handler().key_exists(deployed_key): App.logger.debug( "Part {0} unfinished".format(part_num)) unfinished.append(part_num) return unfinished
def check_download(self, commit_id): if not commit_id: commit_id = '' App.logger.debug("Start: check for download: " + commit_id) response = { # default to error 'ErrorMessage': DownloadMetrics.ACCESS_FAILED_ERROR + commit_id } if not commit_id: App.logger.warning("Invalid commit: " + commit_id) return response key = 'preconvert/{0}.zip'.format(commit_id) try: download_exists = App.pre_convert_s3_handler().key_exists(key) except Exception as e: App.logger.error("Access failure for '" + key + "': " + str(e)) return response del response['ErrorMessage'] App.logger.debug("Download exists for '" + key + "': " + str(download_exists)) response['download_exists'] = download_exists return response
def get_templater_index(s3_commit_key, index_json_fname): index_json = App.cdn_s3_handler().get_json(s3_commit_key + '/' + index_json_fname) if not index_json: index_json['titles'] = {} index_json['chapters'] = {} index_json['book_codes'] = {} return index_json
def merge_build_status_for_file(build_log, s3_results_key, file_name, linter_file=False): key = "{0}/{1}".format(s3_results_key, file_name) file_results = App.cdn_s3_handler().get_json(key) if file_results: build_log = ClientLinterCallback.merge_results_logs(build_log, file_results, linter_file) return build_log return None
def is_convert_finished(s3_results_key): key = "{0}/{1}".format(s3_results_key, 'finished') try: convert_finished = App.cdn_s3_handler().key_exists(key) except Exception as e: convert_finished = False return convert_finished
def test_update_manifest(self): repo_name = self.items['francis/fr_ulb']['repo_name'] user_name = self.items['francis/fr_ulb']['user_name'] tx_manifest = TxManifest.get(repo_name=repo_name, user_name=user_name) # Update by setting fields and calling update() tx_manifest.resource_id = 'udb' tx_manifest.title = 'Unlocked Dynamic Bible' tx_manifest.update() manifest_from_db = TxManifest.get(repo_name=repo_name, user_name=user_name) self.assertEqual(manifest_from_db.title, tx_manifest.title) # Update by giving a dict to update() tx_manifest.views = 5 tx_manifest.update() manifest_from_db = TxManifest.get(repo_name=repo_name, user_name=user_name) self.assertEqual(manifest_from_db.views, 5) App.db_close()
def test_tn_deploy_revision_to_door43(self): # given part = '1' self.mock_s3_tn_project(part) build_log_key = '{0}/{1}/build_log.json'.format(self.project_key, part) # when ret = self.deployer.deploy_revision_to_door43(build_log_key) # then self.assertTrue(ret) self.assertTrue(App.door43_s3_handler().key_exists('{0}/build_log.json'.format(self.project_key))) files_to_verify = ['01-GEN.html', 'index.json'] for file_name in files_to_verify: key = '{0}/{1}'.format(self.project_key, file_name) self.assertTrue(App.door43_s3_handler().key_exists(key), "Key not found: {0}".format(key))
def setUp(self): """Runs before each test.""" App(prefix='{0}-'.format(self._testMethodName)) App.cdn_s3_handler().create_bucket() App.door43_s3_handler().create_bucket() self.temp_dir = tempfile.mkdtemp(prefix="test_project_deployer") self.deployer = ProjectDeployer() TdLanguage.language_list = { 'aa': TdLanguage({'gw': False, 'ld': 'ltr', 'ang': 'Afar', 'lc': 'aa', 'ln': 'Afaraf', 'lr': 'Africa', 'pk': 6}), 'en': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'English', 'lc': 'en', 'ln': 'English', 'lr': 'Europe', 'pk': 1747}), 'es': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'Spanish', 'lc': 'es', 'ln': 'espa\xf1ol', 'lr': 'Europe', 'pk': 1776}), 'fr': TdLanguage({'gw': True, 'ld': 'ltr', 'ang': 'French', 'lc': 'fr', 'ln': 'fran\xe7ais, langue fran\xe7aise', 'lr': 'Europe', 'pk': 1868}) }