def write_data_to_file_and_upload_to_CDN(self, output_dir:str, s3_commit_key:str,
                                                 fname:str, data:Union[str, Dict[str,Any]]) -> None:
     out_file = os.path.join(output_dir, fname)
     write_file(out_file, data)
     key = s3_commit_key + '/' + fname
     AppSettings.logger.debug(f"Uploading '{fname}' to {AppSettings.cdn_bucket_name} {key} …")
     AppSettings.cdn_s3_handler().upload_file(out_file, key, cache_time=0)
예제 #2
0
 def test_reset_app(self):
     default_name = AppSettings.name
     AppSettings(name='test-name')
     AppSettings()
     self.assertEqual(AppSettings.name, default_name)
     AppSettings.name = 'test-name-2'
     AppSettings(name='test-name-2', reset=False)
     self.assertNotEqual(AppSettings.name, default_name)
 def set_deployed_flags(self, project_key, part_count, skip=-1):
     tempf = tempfile.mktemp(prefix="temp", suffix="deployed")
     file_utils.write_file(tempf, ' ')
     for i in range(0, part_count):
         if i != skip:
             key = '{0}/{1}/deployed'.format(project_key, i)
             AppSettings.cdn_s3_handler().upload_file(tempf,
                                                      key,
                                                      cache_time=0)
     os.remove(tempf)
예제 #4
0
 def test_db(self):
     AppSettings(db_connection_string='sqlite:///:memory:')
     AppSettings.db_create_tables([User.__table__])
     user = User(name='ed',
                 fullname='Edward Scissorhands',
                 password='******')
     user.insert()
     user_from_db = User.get(name='ed')
     self.assertIsNotNone(user_from_db)
     self.assertEqual(user_from_db.password, '12345')
 def get(cls, *args, **kwargs):
     """
     :param args:
     :param kwargs:
     :return TxModel:
     """
     if args:
         kwargs[inspect(cls).primary_key[0].name] = args[0]
     item = cls.query(**kwargs).first()
     AppSettings.db().close()
     return item
예제 #6
0
 def test_prefix_vars(self):
     AppSettings(prefix='')
     self.assertEqual(AppSettings.cdn_bucket_name, 'cdn.door43.org')
     # self.assertEqual(AppSettings.api_url, 'https://api.door43.org')
     AppSettings(prefix='test-')
     self.assertEqual(AppSettings.cdn_bucket_name, 'test-cdn.door43.org')
     # self.assertEqual(AppSettings.api_url, 'https://test-api.door43.org')
     AppSettings(prefix='test2-')
     self.assertEqual(AppSettings.cdn_bucket_name, 'test2-cdn.door43.org')
     # self.assertEqual(AppSettings.api_url, 'https://test2-api.door43.org')
     AppSettings(prefix='')
     self.assertEqual(AppSettings.cdn_bucket_name, 'cdn.door43.org')
예제 #7
0
    def upload_converted_files_to_CDN(s3_commit_key:str, unzip_dir:str) -> None:
        """
        Uploads the converted (but not templated) files to the cdn.door43.org bucket

        NOTE: These are used from there by the Print button/function.
        """
        AppSettings.logger.info(f"Uploading converted files from {unzip_dir} to {prefix}CDN {s3_commit_key} …")
        for root, _dirs, files in os.walk(unzip_dir):
            for filename in sorted(files):
                filepath = os.path.join(root, filename)
                key = s3_commit_key + filepath.replace(unzip_dir, '')
                AppSettings.logger.debug(f"Uploading {filename} to {prefix}CDN {key} …")
                AppSettings.cdn_s3_handler().upload_file(filepath, key, cache_time=0)
예제 #8
0
 def upload_archive(self) -> None:
     """
     Uploads self.output_zip_file
     """
     #AppSettings.logger.debug("converter.upload_archive()")
     if self.cdn_file_key and os.path.isdir(
             os.path.dirname(self.cdn_file_key)):
         #AppSettings.logger.debug("converter.upload_archive() doing copy")
         copy(self.output_zip_file, self.cdn_file_key)
     elif AppSettings.cdn_s3_handler():
         #AppSettings.logger.debug("converter.upload_archive() using S3 handler")
         AppSettings.cdn_s3_handler().upload_file(self.output_zip_file,
                                                  self.cdn_file_key,
                                                  cache_time=0)
 def setUp(self):
     """Runs before each test."""
     AppSettings(prefix=f'{self._testMethodName}-')
     AppSettings.cdn_s3_handler().create_bucket()
     AppSettings.door43_s3_handler().create_bucket()
     self.temp_dir = tempfile.mkdtemp(prefix='Door43_test_project_deployer')
     self.deployer = ProjectDeployer(self.temp_dir)
     TdLanguage.language_list = {
         'aa':
         TdLanguage({
             'gw': False,
             'ld': 'ltr',
             'ang': 'Afar',
             'lc': 'aa',
             'ln': 'Afaraf',
             'lr': 'Africa',
             'pk': 6
         }),
         'en':
         TdLanguage({
             'gw': True,
             'ld': 'ltr',
             'ang': 'English',
             'lc': 'en',
             'ln': 'English',
             'lr': 'Europe',
             'pk': 1747
         }),
         'es':
         TdLanguage({
             'gw': True,
             'ld': 'ltr',
             'ang': 'Spanish',
             'lc': 'es',
             'ln': 'espa\xf1ol',
             'lr': 'Europe',
             'pk': 1776
         }),
         'fr':
         TdLanguage({
             'gw': True,
             'ld': 'ltr',
             'ang': 'French',
             'lc': 'fr',
             'ln': 'fran\xe7ais, langue fran\xe7aise',
             'lr': 'Europe',
             'pk': 1868
         })
     }
예제 #10
0
 def test_construction_connection_string(self):
     """
     Test the construction of the connection string with multiple attributes
     """
     AppSettings(db_protocol='protocol',
                 db_user='******',
                 db_pass='******',
                 db_end_point='my.endpoint.url',
                 db_port='9999',
                 db_name='db',
                 db_connection_string_params='charset=utf8',
                 auto_setup_db=False)
     expected = "protocol://*****:*****@my.endpoint.url:9999/db?charset=utf8"
     connection_str = AppSettings.construct_connection_string()
     self.assertEqual(connection_str, expected)
 def get_templater_index(s3_commit_key:str, index_json_fname:str) -> Dict[str,Any]:
     index_json = AppSettings.cdn_s3_handler().get_json(s3_commit_key + '/' + index_json_fname)
     if not index_json:
         index_json['titles'] = {}
         index_json['chapters'] = {}
         index_json['book_codes'] = {}
     return index_json
 def validate_bible_results(self, ret, build_log_key, expect_success,
                            output_key):
     self.assertEqual(ret, expect_success)
     if expect_success:
         if output_key:
             self.assertTrue(
                 AppSettings.door43_s3_handler().key_exists(output_key))
예제 #13
0
 def test_update_manifest(self):
     repo_name = self.items['francis/fr_ulb']['repo_name']
     user_name = self.items['francis/fr_ulb']['user_name']
     tx_manifest = TxManifest.get(repo_name=repo_name, user_name=user_name)
     # Update by setting fields and calling update()
     tx_manifest.resource_id = 'udb'
     tx_manifest.title = 'Unlocked Dynamic Bible'
     tx_manifest.update()
     manifest_from_db = TxManifest.get(repo_name=repo_name, user_name=user_name)
     self.assertEqual(manifest_from_db.title, tx_manifest.title)
     # Update by giving a dict to update()
     tx_manifest.views = 5
     tx_manifest.update()
     manifest_from_db = TxManifest.get(repo_name=repo_name, user_name=user_name)
     self.assertEqual(manifest_from_db.views, 5)
     AppSettings.db_close()
예제 #14
0
 def test_setup_db_with_connection_string_parts(self):
     AppSettings(db_protocol='sqlite',
                 db_user=None,
                 db_pass=None,
                 db_end_point=None,
                 db_port=None,
                 db_name=':memory:',
                 db_connection_string_params=None)
     AppSettings.db_create_tables([User.__table__])
     user = User(name='ed',
                 fullname='Edward Scissorhands',
                 password='******')
     user.insert()
     user_from_db = User.get(name='ed')
     self.assertIsNotNone(user_from_db)
     self.assertEqual(user_from_db.password, '12345')
     AppSettings.db_close()
def get_jobID_from_commit_buildLog(project_folder_key:str, ix:int, commit_id:str) -> Optional[str]:
    """
    Look for build_log.json in the Door43 bucket
        and extract the job_id from it.

    NOTE: It seems like old builds also put build_log.json in the CDN bucket
            but the new ones don't seem to have that.

    Return None if anything fails.
    """
    file_key = f'{project_folder_key}{commit_id}/build_log.json'
    try:
        file_content = AppSettings.door43_s3_handler() \
                    .resource.Object(bucket_name=AppSettings.door43_bucket_name, key=file_key) \
                    .get()['Body'].read().decode('utf-8')
        json_content = json.loads(file_content)
        return json_content['job_id']
    except Exception as e:
        AppSettings.logger.critical(f"get_jobID_from_commit_buildLog threw an exception while getting {prefix}D43 {ix:,} '{file_key}': {e}")
        return None
 def mock_s3_obs_project(self):
     zip_file = os.path.join(self.resources_dir, 'converted_projects',
                             'en-obs-complete.zip')
     out_dir = os.path.join(self.temp_dir, 'en-obs-complete')
     unzip(zip_file, out_dir)
     project_dir = os.path.join(out_dir, 'door43', 'en-obs', '12345678')
     self.project_files = [
         f for f in os.listdir(project_dir)
         if os.path.isfile(os.path.join(project_dir, f))
     ]
     self.project_key = 'u/door43/en-obs/12345678'
     for filename in self.project_files:
         AppSettings.cdn_s3_handler().upload_file(
             os.path.join(project_dir, filename),
             '{0}/{1}'.format(self.project_key, filename))
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(out_dir, 'door43', 'en-obs', 'project.json'),
         'u/door43/en-obs/project.json')
     AppSettings.door43_s3_handler().upload_file(
         os.path.join(self.resources_dir, 'templates', 'project-page.html'),
         'templates/project-page.html')
    def mock_s3_bible_project(self,
                              test_file_name,
                              project_key,
                              multi_part=False):
        converted_proj_dir = os.path.join(self.resources_dir,
                                          'converted_projects')
        test_file_base = test_file_name.split('.zip')[0]
        zip_file = os.path.join(converted_proj_dir, test_file_name)
        out_dir = os.path.join(self.temp_dir, test_file_base)
        unzip(zip_file, out_dir)
        project_dir = os.path.join(out_dir, test_file_base) + os.path.sep
        self.project_files = file_utils.get_files(out_dir)
        self.project_key = project_key
        for filename in self.project_files:
            sub_path = filename.split(project_dir)[1].replace(
                os.path.sep, '/')  # Make sure it is a bucket path
            AppSettings.cdn_s3_handler().upload_file(
                filename, '{0}/{1}'.format(project_key, sub_path))

            if multi_part:  # copy files from cdn to door43
                base_name = os.path.basename(filename)
                if '.html' in base_name:
                    with open(filename, 'r') as f:
                        soup = BeautifulSoup(f, 'html.parser')

                    # add nav tag
                    new_tag = soup.new_tag('div', id='right-sidebar')
                    soup.body.append(new_tag)
                    html = str(soup)
                    file_utils.write_file(
                        filename, html.encode('ascii', 'xmlcharrefreplace'))

                AppSettings.door43_s3_handler().upload_file(
                    filename, '{0}/{1}'.format(project_key, base_name))

        # u, user, repo = project_key
        AppSettings.door43_s3_handler().upload_file(
            os.path.join(self.resources_dir, 'templates', 'project-page.html'),
            'templates/project-page.html')
def job(queued_json_payload:Dict[str,Any]) -> None:
    """
    This function is called by the rq package to process a job in the queue(s).
        (Don't rename this function.)

    The job is removed from the queue before the job is started,
        but if the job throws an exception or times out (timeout specified in enqueue process)
            then the job gets added to the 'failed' queue.
    """
    AppSettings.logger.info("Door43-Job-Handler received a callback" + (" (in debug mode)" if debug_mode_flag else ""))
    start_time = time.time()
    stats_client.incr(f'{callback_stats_prefix}.jobs.attempted')

    current_job = get_current_job()
    #print(f"Current job: {current_job}") # Mostly just displays the job number and payload
    #print("id",current_job.id) # Displays job number
    #print("origin",current_job.origin) # Displays queue name
    #print("meta",current_job.meta) # Empty dict

    # AppSettings.logger.info(f"Updating queue statistics…")
    our_queue= Queue(callback_queue_name, connection=current_job.connection)
    len_our_queue = len(our_queue) # Should normally sit at zero here
    # AppSettings.logger.debug(f"Queue '{callback_queue_name}' length={len_our_queue}")
    stats_client.gauge(f'"{door43_stats_prefix}.enqueue-job.callback.queue.length.current', len_our_queue)
    AppSettings.logger.info(f"Updated stats for '{door43_stats_prefix}.enqueue-job.callback.queue.length.current' to {len_our_queue}")

    #print(f"Got a job from {current_job.origin} queue: {queued_json_payload}")
    #print(f"\nGot job {current_job.id} from {current_job.origin} queue")
    #queue_prefix = 'dev-' if current_job.origin.startswith('dev-') else ''
    #assert queue_prefix == prefix
    try:
        job_descriptive_name, door43_webhook_received_at = \
                process_callback_job(prefix, queued_json_payload, current_job.connection)
    except Exception as e:
        # Catch most exceptions here so we can log them to CloudWatch
        prefixed_name = f"{prefix}Door43_Callback"
        AppSettings.logger.critical(f"{prefixed_name} threw an exception while processing: {queued_json_payload}")
        AppSettings.logger.critical(f"{e}: {traceback.format_exc()}")
        AppSettings.close_logger() # Ensure queued logs are uploaded to AWS CloudWatch
        # Now attempt to log it to an additional, separate FAILED log
        import logging
        from boto3 import Session
        from watchtower import CloudWatchLogHandler
        logger2 = logging.getLogger(prefixed_name)
        test_mode_flag = os.getenv('TEST_MODE', '')
        travis_flag = os.getenv('TRAVIS_BRANCH', '')
        log_group_name = f"FAILED_{'' if test_mode_flag or travis_flag else prefix}tX" \
                         f"{'_DEBUG' if debug_mode_flag else ''}" \
                         f"{'_TEST' if test_mode_flag else ''}" \
                         f"{'_TravisCI' if travis_flag else ''}"
        aws_access_key_id = os.environ['AWS_ACCESS_KEY_ID']
        boto3_session = Session(aws_access_key_id=aws_access_key_id,
                            aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'],
                            region_name='us-west-2')
        failure_watchtower_log_handler = CloudWatchLogHandler(boto3_session=boto3_session,
                                                    use_queues=False,
                                                    log_group=log_group_name,
                                                    stream_name=prefixed_name)
        logger2.addHandler(failure_watchtower_log_handler)
        logger2.setLevel(logging.DEBUG)
        logger2.info(f"Logging to AWS CloudWatch group '{log_group_name}' using key '…{aws_access_key_id[-2:]}'.")
        logger2.critical(f"{prefixed_name} threw an exception while processing: {queued_json_payload}")
        logger2.critical(f"{e}: {traceback.format_exc()}")
        failure_watchtower_log_handler.close()
        # NOTE: following line removed as stats recording used too much disk space
        # stats_client.gauge(user_projects_invoked_string, 1) # Mark as 'failed'
        stats_client.gauge(project_types_invoked_string, 1) # Mark as 'failed'
        raise e # We raise the exception again so it goes into the failed queue

    elapsed_milliseconds = round((time.time() - start_time) * 1000)
    stats_client.timing(f'{callback_stats_prefix}.job.duration', elapsed_milliseconds)
    if elapsed_milliseconds < 2000:
        AppSettings.logger.info(f"{prefix}Door43 callback handling for {job_descriptive_name} completed in {elapsed_milliseconds:,} milliseconds.")
    else:
        AppSettings.logger.info(f"{prefix}Door43 callback handling for {job_descriptive_name} completed in {round(time.time() - start_time)} seconds.")

    # Calculate total elapsed time for the job
    total_elapsed_time = datetime.utcnow() - \
                         datetime.strptime(door43_webhook_received_at,
                                           '%Y-%m-%dT%H:%M:%SZ')
    AppSettings.logger.info(f"{prefix}Door43 total job for {job_descriptive_name} completed in {round(total_elapsed_time.total_seconds())} seconds.")
    stats_client.timing(f'{job_handler_stats_prefix}.total.job.duration', round(total_elapsed_time.total_seconds() * 1000))

    # NOTE: following line removed as stats recording used too much disk space
    # stats_client.gauge(user_projects_invoked_string, 0) # Mark as 'succeeded'
    stats_client.gauge(project_types_invoked_string, 0) # Mark as 'succeeded'
    stats_client.incr(f'{callback_stats_prefix}.jobs.succeeded')
    AppSettings.close_logger() # Ensure queued logs are uploaded to AWS CloudWatch
예제 #19
0
 def test_init(self):
     gogs_url = 'https://my.gogs.org'
     AppSettings(gogs_url=gogs_url)
     self.assertEqual(AppSettings.gogs_url, gogs_url)
 def insert(self):
     AppSettings.db().add(self)
     AppSettings.db().commit()
     AppSettings.db().close()
예제 #21
0
def job(queued_json_payload: Dict[str, Any]) -> None:
    """
    This function is called by the rq package to process a job in the queue(s).
        (Don't rename this function.)

    The job is removed from the queue before the job is started,
        but if the job throws an exception or times out (timeout specified in enqueue process)
            then the job gets added to the 'failed' queue.
    """
    AppSettings.logger.debug("tX JobHandler received a job" +
                             (" (in debug mode)" if debug_mode_flag else ""))
    start_time = time()
    stats_client.incr('jobs.HTML.attempted')

    AppSettings.logger.info(f"Clearing /tmp folder…")
    empty_folder(
        '/tmp/',
        only_prefix='tX_')  # Stops failed jobs from accumulating in /tmp

    # AppSettings.logger.info(f"Updating queue statistics…")
    our_queue = Queue(webhook_queue_name,
                      connection=get_current_job().connection)
    len_our_queue = len(our_queue)  # Should normally sit at zero here
    # AppSettings.logger.debug(f"Queue '{webhook_queue_name}' length={len_our_queue}")
    stats_client.gauge(f'{webhook_queue_name}.queue.length.current',
                       len_our_queue)
    AppSettings.logger.info(
        f"Updated stats for '{tx_stats_prefix}.enqueue-job.queue.length.current' to {len_our_queue}"
    )

    try:
        job_descriptive_name = process_tx_job(prefix, queued_json_payload)
    except Exception as e:
        # Catch most exceptions here so we can log them to CloudWatch
        prefixed_name = f"{prefix}tX_HTML_Job_Handler"
        AppSettings.logger.critical(
            f"{prefixed_name} threw an exception while processing: {queued_json_payload}"
        )
        AppSettings.logger.critical(f"{e}: {traceback.format_exc()}")
        AppSettings.close_logger(
        )  # Ensure queued logs are uploaded to AWS CloudWatch
        # Now attempt to log it to an additional, separate FAILED log
        logger2 = logging.getLogger(prefixed_name)
        test_mode_flag = os.getenv('TEST_MODE', '')
        travis_flag = os.getenv('TRAVIS_BRANCH', '')
        log_group_name = f"FAILED_{'' if test_mode_flag or travis_flag else prefix}tX" \
                         f"{'_DEBUG' if debug_mode_flag else ''}" \
                         f"{'_TEST' if test_mode_flag else ''}" \
                         f"{'_TravisCI' if travis_flag else ''}"
        aws_access_key_id = os.environ['AWS_ACCESS_KEY_ID']
        aws_secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
        boto3_client = boto3.client(
            "logs",
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            region_name='us-west-2')
        failure_watchtower_log_handler = watchtower.CloudWatchLogHandler(
            boto3_client=boto3_client,
            use_queues=False,
            log_group_name=log_group_name,
            stream_name=prefixed_name)
        logger2.addHandler(failure_watchtower_log_handler)
        logger2.setLevel(logging.DEBUG)
        logger2.info(
            f"Logging to AWS CloudWatch group '{log_group_name}' using key '…{aws_access_key_id[-2:]}'."
        )
        logger2.critical(
            f"{prefixed_name} threw an exception while processing: {queued_json_payload}"
        )
        logger2.critical(f"{e}: {traceback.format_exc()}")
        failure_watchtower_log_handler.close()
        raise e  # We raise the exception again so it goes into the failed queue

    elapsed_milliseconds = round((time() - start_time) * 1000)
    stats_client.timing(f'job.HTML.duration', elapsed_milliseconds)
    if elapsed_milliseconds < 2000:
        AppSettings.logger.info(
            f"{prefix}tX job handling for {job_descriptive_name} completed in {elapsed_milliseconds:,} milliseconds."
        )
    else:
        AppSettings.logger.info(
            f"{prefix}tX job handling for {job_descriptive_name} completed in {round(time() - start_time)} seconds."
        )

    stats_client.incr('jobs.HTML.completed')
    AppSettings.close_logger(
    )  # Ensure queued logs are uploaded to AWS CloudWatch
 def update(self):
     AppSettings.db().merge(self)
     AppSettings.db().commit()
     AppSettings.db().close()
 def delete(self):
     AppSettings.db().delete(self)
     AppSettings.db().commit()
     AppSettings.db().close()
 def query(cls, **kwargs):
     items = AppSettings.db().query(cls).filter_by(**kwargs)
     return items
예제 #25
0
    def do_post_processing(self) -> Dict[str, Any]:
        AppSettings.logger.debug(f"ClientLinterCallback.do_post_processing()…")
        if not self.identifier:
            error = 'No identifier found'
            AppSettings.logger.error(error)
            raise Exception(error)

        # if not self.s3_results_key:
        #     error = f"No s3_results_key found for identifier = {self.identifier}"
        #     AppSettings.logger.error(error)
        #     raise Exception(error)

        id_parts = self.identifier.split('/')
        self.multipart = len(id_parts) > 3
        if self.multipart:
            raise Exception("Unsupported")
            # NOTE: Disabled 4Mar2019 coz unused
            # part_count, part_id, book = id_parts[1:4]
            # AppSettings.logger.debug('Multiple project, part {0} of {1}, linted book {2}'.
            #                  format(part_id, part_count, book))
            # s3__master_results_key = '/'.join(self.s3_results_key.split('/')[:-1])
        else:
            AppSettings.logger.debug('Single project')
            # NOTE: Disabled 4Mar2019 coz unused
            # s3__master_results_key = self.s3_results_key

        build_log = {
            'identifier': self.identifier,
            'success': self.success,
            'multipart_project': self.multipart,
            'log': self.log,
            'warnings': self.warnings,
            'errors': self.errors,
            # 's3_commit_key': self.s3_results_key
        }

        if not self.success:
            msg = "Linter failed for identifier: " + self.identifier
            build_log['warnings'].append(msg)
            AppSettings.logger.error(msg)
        else:
            AppSettings.logger.debug(
                f"Linter {self.identifier} had success with"
                f" {len(self.warnings)} warnings: {', '.join(self.warnings[:5])} …"
            )

        has_warnings = len(build_log['warnings']) > 0
        if has_warnings:
            msg = f"Linter {self.identifier} has Warnings!"
            build_log['log'].append(msg)
        else:
            msg = f"Linter {self.identifier} completed with no warnings"
            build_log['log'].append(msg)

        # NOTE: Do we need this -- disabled 25Feb2019
        # ClientLinterCallback.upload_build_log(build_log, 'lint_log.json', self.temp_dir, self.s3_results_key)

        # NOTE: Do we need this -- disabled 4Mar2019 since linting is always done first
        # results = ClientLinterCallback.deploy_if_conversion_finished(s3__master_results_key, self.identifier)
        # if results:
        #     self.all_parts_completed = True
        #     build_log = results

        # if prefix and debug_mode_flag:
        #     AppSettings.logger.debug(f"Temp folder '{self.temp_dir}' has been left on disk for debugging!")
        # else:
        #     remove_tree(self.temp_dir)  # cleanup
        AppSettings.db_close()
        return build_log

    # end of do_post_processing()


# end of ClientLinterCallback class
 def mock_s3_tn_project(self, part):
     zip_file = os.path.join(self.resources_dir, 'converted_projects',
                             'en_tn_converted.zip')
     out_dir = os.path.join(self.temp_dir, 'en_tn_converted')
     unzip(zip_file, out_dir)
     src_dir = os.path.join(out_dir, 'en_tn_converted')
     self.project_files = [
         f for f in os.listdir(src_dir)
         if os.path.isfile(os.path.join(src_dir, f))
     ]
     self.project_key = 'u/door43/en_tn/12345678'
     build_log = file_utils.load_json_object(
         os.path.join(src_dir, 'build_log.json'))
     build_log['part'] = part
     file_utils.write_file(os.path.join(src_dir, 'build_log.json'),
                           build_log)
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(src_dir, 'build_log.json'),
         '{0}/{1}/build_log.json'.format(self.project_key, part))
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(src_dir, 'index.json'),
         '{0}/{1}/index.json'.format(self.project_key, part))
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(src_dir, 'build_log.json'),
         '{0}/{1}/finished'.format(self.project_key, part))
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(src_dir, '01-GEN.html'),
         '{0}/{1}/01-GEN.html'.format(self.project_key, part))
     AppSettings.cdn_s3_handler().upload_file(
         os.path.join(src_dir, 'project.json'),
         'u/door43/en_tq/project.json')
     AppSettings.door43_s3_handler().upload_file(
         os.path.join(self.resources_dir, 'templates', 'project-page.html'),
         'templates/project-page.html')
예제 #27
0
 def test_s3_handler(self):
     self.assertIsNotNone(AppSettings.cdn_s3_handler())
예제 #28
0
        'TSV_Translation_Notes',
        'tn',
        'other',
    ), 'html'),
    ('usfm2html', Usfm2HtmlConverter, ('usfm', ), (
        'Bible',
        'Aligned_Bible',
        'Greek_New_Testament',
        'Hebrew_Old_Testament',
        'bible',
        'reg',
        'other',
    ), 'html'),
)

AppSettings(prefix=prefix)
if prefix not in ('', 'dev-'):
    AppSettings.logger.critical(
        f"Unexpected prefix: '{prefix}' — expected '' or 'dev-'")
tx_stats_prefix = f"tx.{'dev' if prefix else 'prod'}"
job_handler_stats_prefix = f"{tx_stats_prefix}.tx-job-handler"

# Get the Graphite URL from the environment, otherwise use a local test instance
graphite_url = os.getenv('GRAPHITE_HOSTNAME', 'localhost')
stats_client = StatsClient(host=graphite_url,
                           port=8125,
                           prefix=job_handler_stats_prefix)


def get_linter_module(glm_job: Dict[str, Any]) -> Tuple[Optional[str], Any]:
    """
예제 #29
0
 def setUp(self):
     """Runs before each test."""
     AppSettings(prefix='{0}-'.format(self._testMethodName))
     self.temp_dir = tempfile.mkdtemp(prefix='tX_test_Md2HtmlConverter')
     self.out_dir = ''
     self.out_zip_file = ''
    def deploy_revision_to_door43(self, build_log:Dict[str,Any]) -> bool:
        """
        Deploys a single revision of a project to door43.org

        Templates the converted files
            then uploads them and the build log to the S3 bucket
            and creates a 'deployed' file there too.

        :param dict build_log:
        :return bool:
        """
        start = time.time()
        AppSettings.logger.debug(f"Deploying, build log: {json.dumps(build_log)[:256]} …")
        assert 'multiple' not in build_log
        assert 'part' not in build_log

        user = build_log['repo_owner_username'] # was 'repo_owner'
        repo_name = build_log['repo_name']
        commit_id = build_log['commit_id'] # Hashes should already be reduced to 10 characters

        s3_commit_key = f'u/{user}/{repo_name}/{commit_id}'
        s3_repo_key = f'u/{user}/{repo_name}'

        source_dir = tempfile.mkdtemp(prefix='source_', dir=self.temp_dir)
        template_dir = tempfile.mkdtemp(prefix='template_', dir=self.temp_dir)
        output_dir = tempfile.mkdtemp(prefix='output_', dir=self.temp_dir)


        # Do the templating first
        resource_type = build_log['resource_type']
        template_key = 'templates/project-page.html'
        template_file = os.path.join(template_dir, 'project-page.html')
        AppSettings.logger.info(f"Downloading project page template from {AppSettings.door43_bucket_name} '{template_key}' to {template_file} …")
        AppSettings.door43_s3_handler().download_file(template_key, template_file)
        source_dir, success = self.template_converted_files(build_log, output_dir, repo_name,
                                            resource_type, s3_commit_key, source_dir, start,
                                            template_file)
        build_log['warnings'].extend(self.error_messages)
        if not success:
            AppSettings.logger.critical("Templating failed—returning False")
            return False


        #######################
        #
        #  Now do the deploy
        #
        #######################

        # Copy all other files over that don't already exist in output_dir, like css files
        #   Copying from source_dir to output_dir (both are folders inside main temp folder)
        for filename in sorted(glob(os.path.join(source_dir, '*'))):
            output_file = os.path.join(output_dir, os.path.basename(filename))
            if not os.path.exists(output_file) and not os.path.isdir(filename):
                copyfile(filename, output_file)

        # Save master build_log.json
        build_log['ended_at'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
        file_utils.write_file(os.path.join(output_dir, 'build_log.json'), build_log)
        AppSettings.logger.debug(f"Final build_log.json: {json.dumps(build_log)[:256]} …")

        # Clear out the door43.org bucket's commit dir
        AppSettings.logger.info(f"Deleting all files in the website bucket directory: {AppSettings.door43_bucket_name}/{s3_commit_key} …")
        AppSettings.door43_s3_handler().bucket.objects.filter(Prefix=s3_commit_key).delete()

        # Upload all files to the S3 door43.org bucket
        AppSettings.logger.info(f"Uploading all files to the website bucket directory: {AppSettings.door43_bucket_name}/{s3_commit_key} …")
        has_index_file = False
        for root, _dirs, files in os.walk(output_dir):
            for filename in sorted(files):
                filepath = os.path.join(root, filename)
                if os.path.isdir(filepath):
                    continue
                key = s3_commit_key + filepath.replace(output_dir, '').replace(os.path.sep, '/')
                AppSettings.logger.debug(f"Uploading {filename} to {AppSettings.door43_bucket_name} bucket {key} …")
                AppSettings.door43_s3_handler().upload_file(filepath, key, cache_time=0)

        redirect_to_file = "index.html"
        html_files = get_sorted_Bible_html_filepath_list(output_dir)
        if len(html_files) > 0 and os.path.join(output_dir, "index.html") not in html_files:
            redirect_to_file = html_files[0].replace(output_dir, "").lstrip("/")

        # Now we place json files and redirect index.html for the whole repo to this index.html file
        AppSettings.logger.info("Copying json files and setting up redirect…")
        try:
            AppSettings.door43_s3_handler().copy(from_key=f'{s3_repo_key}/project.json', from_bucket=AppSettings.cdn_bucket_name)
            AppSettings.door43_s3_handler().copy(from_key=f'{s3_commit_key}/manifest.json', to_key=f'{s3_repo_key}/manifest.json')
            master_exists = AppSettings.door43_s3_handler().object_exists(f'{s3_repo_key}/master/index.html')
            main_exists = AppSettings.door43_s3_handler().object_exists(f'{s3_repo_key}/main/index.html')
            if commit_id == 'master' or commit_id == 'main' or (not master_exists and not main_exists):
                AppSettings.door43_s3_handler().redirect(key=s3_repo_key, location=f"/{s3_commit_key}/{redirect_to_file}")
                AppSettings.door43_s3_handler().redirect(key=s3_repo_key + '/index.html', location=f"/{s3_commit_key}/{redirect_to_file}")
            AppSettings.door43_s3_handler().redirect(key=s3_commit_key, location=f"/{s3_commit_key}/{redirect_to_file}")
            if not has_index_file:
                AppSettings.door43_s3_handler().redirect(key=f"{s3_commit_key}/index.html", location=f"/{s3_commit_key}/{redirect_to_file}")

            self.write_data_to_file_and_upload_to_CDN(output_dir, s3_commit_key, fname='deployed', data=' ')  # flag that deploy has finished
        except Exception as e:
            AppSettings.logger.critical(f"Deployer threw an exception: {e}: {traceback.format_exc()}")

        elapsed_seconds = int(time.time() - start)
        AppSettings.logger.debug(f"Deploy completed in {elapsed_seconds} seconds.")
        self.close()
        return True