Пример #1
0
    def test_downloadCompressedDirectory(self):
        tempPath = os.path.join(os.getcwd(), "tests", "temp")
        compressPath = os.path.join(tempPath, "compress")
        extractPath = os.path.join(tempPath, "extract")

        m = MockS3Resource()
        m.bind_bucket_method(lambda name: MockS3Bucket(name))

        s = S3Interface(m, "b", tempPath)
        os.makedirs(compressPath)
        os.makedirs(extractPath)
        try:

            def func(keyName, localPath):
                ext = os.path.splitext(localPath)[1]
                self.assertEqual(keyName, "keyPrefix/docpath/docName" + ext)

            s.bucket.bind_download_file_method(func)

            for i in range(1, 10):
                with open(os.path.join(compressPath, "tempfile{0}".format(i)),
                          'w') as f:
                    f.write("testfile contents {0}".format(i))
            name = s.archiveFileOrDirectory(compressPath, "docpath_docName")
            s.downloadCompressed("keyPrefix", "docpath/docName", extractPath)
            for i in range(1, 10):
                with open(os.path.join(extractPath, "tempfile{0}".format(i)),
                          'r') as f:
                    self.assertEqual(f.read(),
                                     "testfile contents {0}".format(i))
        finally:
            shutil.rmtree(tempPath)
Пример #2
0
 def test_constructor_creates_bucket_and_assigns_tempdir(self):
     m = MockS3Resource()
     m.bind_bucket_method(lambda name: MockS3Bucket(name))
     s = S3Interface(m, "b", os.path.join(os.getcwd(), "temp"))
     self.assertIsInstance(s.bucket, MockS3Bucket)
     self.assertEqual(s.bucket.name, "b")
     self.assertEqual(s.localTempDir, os.path.join(os.getcwd(), "temp"))
Пример #3
0
    def test_uploadCompressedDirectory(self):
        tempPath = os.path.join(os.getcwd(), "tests", "temp")
        compressPath = os.path.join(tempPath, "compress")

        m = MockS3Resource()
        m.bind_bucket_method(lambda name: MockS3Bucket(name))

        s = S3Interface(m, "b", tempPath)
        os.makedirs(compressPath)
        try:

            def func(localPath, keyName):
                ext = os.path.splitext(localPath)[1]
                self.assertEqual(keyName, "keyPrefix/docName" + ext)
                result = glob.glob("{0}.*".format(
                    os.path.join(tempPath, "docName")))
                self.assertTrue(len(result) == 1)
                self.assertEqual(result[0], localPath)

            s.bucket.bind_upload_file_method(func)

            for i in range(1, 10):
                with open(os.path.join(compressPath, "tempfile{0}".format(i)),
                          'w') as f:
                    f.write("testfile contents {0}".format(i))

            s.uploadCompressed("keyPrefix", "docName", compressPath)
        finally:
            shutil.rmtree(tempPath)
Пример #4
0
def main():
    """Entry point to the database integrity tests."""
    cli_arguments = cli_parser.parse_args()
    set_log_level(cli_arguments.log_level)

    s3_tests_enabled = not cli_arguments.disable_s3_tests
    gremlin_tests_enabled = not cli_arguments.disable_gremlin_tests

    s3interface = None
    if s3_tests_enabled:
        s3configuration = S3Configuration()
        s3interface = S3Interface(s3configuration)
        s3interface.connect()

    gremlinInterface = None
    if gremlin_tests_enabled:
        gremlinConfiguration = GremlinConfiguration()
        gremlinInterface = GremlinInterface(gremlinConfiguration)

    initial_checks(s3interface, gremlinInterface)

    if cli_arguments.check:
        logging.info("Only initial check is performed, exiting")
        sys.exit()

    check_packages_in_s3(s3interface)
Пример #5
0
 def __init__(self, s3, manifestPath, localWorkingDir):
     self.manifestPath = manifestPath
     self.manifest = Manifest(manifestPath)
     self.s3interface = S3Interface(s3, self.manifest.GetBucketName(), localWorkingDir)
     metafac = InstanceMetadataFactory(self.manifest)
     self.instanceManager = InstanceManager(self.s3interface, self.manifest, metafac)
     self.manifestKey = "/".join([self.manifest.GetS3KeyPrefix(), "manifest.json"])
Пример #6
0
    def test_uploadFileCallsBucketMethod(self):
        m = MockS3Resource()
        m.bind_bucket_method(lambda name: MockS3Bucket(name))
        s = S3Interface(m, "b", os.path.join(os.getcwd(), "temp"))

        def func(localPath, keyName):
            self.assertEqual(localPath, "path")
            self.assertEqual(keyName, "key")

        s.bucket.bind_upload_file_method(func)
        s.uploadFile("path", "key")
Пример #7
0
def main():
    """Entry point to the performance tests."""
    cli_arguments = cli_parser.parse_args()
    check_environment_variables()

    coreapi_url = os.environ.get('F8A_API_URL', None)
    jobs_api_url = os.environ.get('F8A_JOB_API_URL', None)
    gremlin_api_url = os.environ.get('F8A_GREMLIN_URL', None)

    recommender_api_token = os.environ.get('RECOMMENDER_API_TOKEN')
    job_api_token = os.environ.get('JOB_API_TOKEN')

    aws_access_key_id = os.environ.get('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
    s3_region_name = os.environ.get('S3_REGION_NAME')
    deployment_prefix = os.environ.get('DEPLOYMENT_PREFIX', 'STAGE')

    core_api = CoreApi(coreapi_url, recommender_api_token)
    jobs_api = JobsApi(jobs_api_url, job_api_token)
    gremlin_api = GremlinApi(gremlin_api_url)

    s3 = S3Interface(aws_access_key_id, aws_secret_access_key, s3_region_name, deployment_prefix)

    check_system(core_api, jobs_api, s3)

    # set the flag that enables dumping JSON responses into files
    # that allow us to further analyze the data
    core_api.dump_json_responses = cli_arguments.dump
    jobs_api.dump_json_responses = cli_arguments.dump

    # if user specifies the manifest file for the stack analysis, set
    # the appropriate attribute
    core_api.stack_analysis_manifest = cli_arguments.manifest

    if cli_arguments.sla:
        run_benchmarks_sla(core_api, jobs_api, s3)
    else:
        run_benchmarks(core_api, jobs_api, gremlin_api, s3,
                       cli_arguments.stack_analysis_benchmark,
                       cli_arguments.component_analysis_benchmark,
                       cli_arguments.package_query_to_graph_benchmark,
                       cli_arguments.package_version_query_to_graph_benchmark,
                       cli_arguments.parallel,
                       cli_arguments.thread_max)
Пример #8
0
    def test_archive_and_extract_dir(self):

        tempPath = os.path.join(os.getcwd(), "tests", "temp")
        compressPath = os.path.join(tempPath, "compress")
        extractPath = os.path.join(tempPath, "extract")

        m = MockS3Resource()
        m.bind_bucket_method(lambda name: MockS3Bucket(name))
        s = S3Interface(m, "b", tempPath)

        os.makedirs(compressPath)
        os.makedirs(extractPath)
        try:
            for i in range(1, 10):
                #add files to the zip root dir
                with open(os.path.join(compressPath, "tempfile{0}".format(i)),
                          'w') as f:
                    f.write("testfile contents {0}".format(i))
                #now add some subdirectories and files
                subdir = os.path.join(compressPath, str(i))
                os.makedirs(subdir)
                with open(os.path.join(subdir, "subdir_tempfile{0}".format(i)),
                          'w') as f:
                    f.write("subdir testfile contents {0}".format(i))

            name = s.archiveFileOrDirectory(compressPath, "tempfile")
            s.unpackFileOrDirectory(name, extractPath)
            for i in range(1, 10):
                with open(os.path.join(extractPath, "tempfile{0}".format(i)),
                          'r') as f:
                    self.assertEqual(f.read(),
                                     "testfile contents {0}".format(i))
                subdir = os.path.join(extractPath, str(i))
                with open(os.path.join(subdir, "subdir_tempfile{0}".format(i)),
                          'r') as f:
                    self.assertEqual(f.read(),
                                     "subdir testfile contents {0}".format(i))
        finally:
            shutil.rmtree(tempPath)
Пример #9
0
    def test_archive_and_extract_file(self):
        tempPath = os.path.join(os.getcwd(), "tests", "temp")
        compressPath = os.path.join(tempPath, "compress")
        extractPath = os.path.join(tempPath, "extract")

        m = MockS3Resource()
        m.bind_bucket_method(lambda name: MockS3Bucket(name))
        s = S3Interface(m, "b", tempPath)

        os.makedirs(compressPath)
        os.makedirs(extractPath)
        try:
            fn = os.path.join(compressPath, "tempfile")
            with open(fn, 'w') as f:
                f.write("testfile contents")
            name = s.archiveFileOrDirectory(fn, "tempfile")
            s.unpackFileOrDirectory(name, os.path.join(extractPath,
                                                       "tempfile"))
            with open(os.path.join(extractPath, "tempfile"), 'r') as f:
                self.assertEqual(f.read(), "testfile contents")
        finally:
            shutil.rmtree(tempPath)
Пример #10
0
def main():
    """to be run on by each instance as a startup command"""
    import argparse, sys
    import boto3
    #from powershell_s3 import powershell_s3
    from s3interface import S3Interface
    from manifest import Manifest
    from instancemanager import InstanceManager
    from instancemetadatafactory import InstanceMetadataFactory
    from loghelper import LogHelper
    parser = argparse.ArgumentParser(
        description="AWS Instance bootstrapper" +
        "Loads manifest which contains data and commands to run on this instance,"
        + "downloads data from S3, runs commands, and uploads results to S3")

    parser.add_argument("--bucketName",
                        help="the name of the S3 bucket to work with",
                        required=True)
    parser.add_argument(
        "--manifestKey",
        help="the key pointing to the manifest file in the s3 bucket",
        required=True)
    parser.add_argument(
        "--instanceId",
        help="the id of this instance as defined in the manifest file",
        required=True)
    parser.add_argument(
        "--localWorkingDir",
        help=
        "a directory to store working files, it will be created if it does not exist on the instance",
        required=True)

    try:
        #boto3.set_stream_logger(name='botocore')
        args = vars(parser.parse_args())
        bootstrapper = None

        bucketName = args["bucketName"]
        manifestKey = args["manifestKey"]
        instanceId = int(args["instanceId"])
        localWorkingDir = args["localWorkingDir"]

        if not os.path.exists(localWorkingDir):
            os.makedirs(localWorkingDir)
        logPath = LogHelper.instanceLogPath(localWorkingDir, instanceId)
        LogHelper.start_logging(logPath)
        logging.info("startup")
        logging.info("creating boto3 s3 resource")
        s3 = boto3.resource('s3')

        logging.info("creating S3Interface")
        s3interface = S3Interface(s3, bucketName, localWorkingDir)

        localManifestPath = os.path.join(localWorkingDir, "manifest.json")
        logging.info("downloading manifest from S3")
        s3interface.downloadFile(manifestKey, localManifestPath)
        manifest = Manifest(localManifestPath)
        metafac = InstanceMetadataFactory(manifest)
        instancemanager = InstanceManager(s3interface, manifest, metafac)
        metadata = instancemanager.downloadMetaData(instanceId)
        bootstrapper = AWSInstanceBootStrapper(instanceId, manifest,
                                               s3interface, instancemanager,
                                               metadata)
        bootstrapper.DownloadS3Documents()
        bootstrapper.RunCommands()
        bootstrapper.UploadS3Documents()
    except Exception as ex:
        logging.exception("error in bootstrapper")
        if bootstrapper is not None:
            bootstrapper.UploadStatus()
        sys.exit(1)