def setUp(self):
        self.master_b2_api = B2Api(
            StubAccountInfo(),
            None,
            api_config=B2HttpApiConfig(_raw_api_class=RawSimulator))
        self.raw_api = self.master_b2_api.session.raw_api
        (self.master_account_id,
         self.master_key) = self.raw_api.create_account()
        self.master_b2_api.authorize_account('production',
                                             self.master_account_id,
                                             self.master_key)
        self.lock_enabled_bucket = self.master_b2_api.create_bucket(
            'lock-enabled-bucket', 'allPrivate', is_file_lock_enabled=True)
        self.lock_disabled_bucket = self.master_b2_api.create_bucket(
            'lock-disabled-bucket', 'allPrivate', is_file_lock_enabled=False)
        new_key = self.master_b2_api.create_key([
            'listKeys',
            'listBuckets',
            'listFiles',
            'readFiles',
        ], 'restricted')
        self.restricted_key_id, self.restricted_key = new_key.id_, new_key.application_key

        self.restricted_b2_api = B2Api(StubAccountInfo(), None)
        self.restricted_b2_api.session.raw_api = self.raw_api
        self.restricted_b2_api.authorize_account('production',
                                                 self.restricted_key_id,
                                                 self.restricted_key)

        self.stdout = StringIO()
        self.stderr = StringIO()
        self.console_tool = ConsoleTool(self.master_b2_api, self.stdout,
                                        self.stderr)
Esempio n. 2
0
def clean_old_files_b2():
    """Clean the old files in B2."""
    b2_token_id = os.environ['B2_STORAGE_ID']
    b2_token_key = os.environ['B2_STORAGE_KEY']

    #
    # This method is higly API intensive as it
    # will list all the files in the bucket,
    # use it with precaution!!!!!!!!!!!!!!
    #

    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    bucket_name = "kubeinit-ci"
    b2_api.authorize_account("production", b2_token_id, b2_token_key)

    bucket = b2_api.get_bucket_by_name(bucket_name)

    older_than = (10 * 24 * 3600 * 1000)  # 10 days in milliseconds
    compare_older_than = int(round(time.time() * 1000) - int(older_than))

    for file_version, folder_name in bucket.ls(recursive=True):
        # The following condition allows only to remove PR job files
        # older than 10 days, in this case we will skip all the periodic
        # jobs files and the main index file.
        if 'jobs' in file_version.file_name and 'pr' in file_version.file_name:
            if compare_older_than > int(file_version.upload_timestamp):  # This means that is older than 10 days
                print("'kubeinit_ci_utils.py' ==> Deleting files from:" + str(folder_name))
                b2_api.delete_file_version(file_version.id_, file_version.file_name)
Esempio n. 3
0
def upload_files_to_b2(job_path, prefix='jobs/'):
    """Upload the CI results to Backblaze b2."""
    return_code = 0

    # The prefix should be jobs/

    b2_token_id = os.environ['B2_STORAGE_ID']
    b2_token_key = os.environ['B2_STORAGE_KEY']

    try:
        info = InMemoryAccountInfo()
        b2_api = B2Api(info)
        bucket_name = "kubeinit-ci"
        b2_api.authorize_account("production", b2_token_id, b2_token_key)

        bucket = b2_api.get_bucket_by_name(bucket_name)
        print("'kubeinit_ci_utils.py' ==> ----Uploading logs to B2----")

        print("'kubeinit_ci_utils.py' ==> Path at terminal when executing this file")
        print(os.getcwd() + "\n")

        print("'kubeinit_ci_utils.py' ==> This file path, relative to os.getcwd()")
        print(__file__ + "\n")

        file_list = []
        path_to_upload = os.path.join(os.getcwd(), job_path)
        print("'kubeinit_ci_utils.py' ==> Path to upload: " + path_to_upload)

        for r, _d, f in os.walk(path_to_upload):
            for file in f:
                file_list.append(os.path.join(r, file))

        prefix_path = os.getcwd() + '/'
        print("'kubeinit_ci_utils.py' ==> The initial path: " + prefix_path + " will be removed")

        for entry in file_list:
            try:
                # blob = bucket.blob('jobs/' + entry.replace(prefix_path, ''))
                # blob.upload_from_filename(entry)
                file_info = {'how': 'good-file'}
                bucket.upload_local_file(
                    local_file=entry,
                    file_name=prefix + entry.replace(prefix_path, ''),
                    file_infos=file_info,
                )
            except Exception as e:
                print("'kubeinit_ci_utils.py' ==> An exception hapened adding the initial log files, some files could not be added")
                print(e)
                return_code = 1
    except Exception as e:
        print("'kubeinit_ci_utils.py' ==> An exception hapened uploading files to Backblaze B2")
        print(e)
        return_code = 1
    return return_code
Esempio n. 4
0
    def __init__(self, bucket, key_id, app_id):
        # holds all the account info in memory.
        info = InMemoryAccountInfo()
        self.api = B2Api(info)

        # authorize account through BackBlaze B2 API.
        self.b2 = self.api.authorize_account("production", key_id, app_id)
        self.bucket = self.api.get_bucket_by_name(bucket)
        self.sorted_files = self.get_sorted_files()
        self.files_per_db = self.get_files_per_db(self.sorted_files)
        self.filenames_to_obj_map = {}
        super().__init__()
Esempio n. 5
0
def clean_old_files_b2():
    """Clean the old files in B2."""
    b2_token_id = os.environ['B2_STORAGE_ID']
    b2_token_key = os.environ['B2_STORAGE_KEY']

    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    bucket_name = "kubeinit-ci"
    b2_api.authorize_account("production", b2_token_id, b2_token_key)

    bucket = b2_api.get_bucket_by_name(bucket_name)

    dt = datetime.now()
    older_than = (10 * 24 * 3600 * 1000)  # 10 days in milliseconds
    compare_older_than = int(dt.microsecond) - int(older_than)

    for file_version, folder_name in bucket.ls(recursive=True):
        if compare_older_than > int(file_version.upload_timestamp
                                    ):  # This means that is older than 10 days
            print("'kubeinit_ci_utils.py' ==> Deleting files from:" +
                  folder_name)
            b2_api.delete_file_version(file_version.id_,
                                       file_version.file_name)
Esempio n. 6
0
def render_index(destination='b2'):
    """Render and upload the index file."""
    if destination == 'gcp':
        # Google cloud Storage init
        os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = os.environ[
            'GC_STORAGE_KEY']
        bucket_name = "kubeinit-ci"
        client = storage.Client()
        data_url = 'https://storage.googleapis.com/kubeinit-ci/jobs/'
        prefix = 'jobs/'
        delimiter = None

        root_blobs = list(
            client.list_blobs(bucket_name, prefix=prefix, delimiter=delimiter))
        filtered = list(
            dict.fromkeys([
                re.sub('/.*', '', sub.name.replace(prefix, ''))
                for sub in root_blobs
            ]))

    if destination == 'b2':
        # The prefix should be jobs/

        b2_token_id = os.environ['B2_STORAGE_ID']
        b2_token_key = os.environ['B2_STORAGE_KEY']
        info = InMemoryAccountInfo()
        b2_api = B2Api(info)
        bucket_name = "kubeinit-ci"
        b2_api.authorize_account("production", b2_token_id, b2_token_key)
        bucket = b2_api.get_bucket_by_name(bucket_name)
        data_url = 'https://ci.kubeinit.org/file/kubeinit-ci/jobs/'
        prefix = 'jobs/'
        root_blobs = []
        for file_version, folder_name in bucket.ls(folder_to_list='jobs',
                                                   latest_only=True):
            print("'kubeinit_ci_utils.py' ==> Folder name: " + folder_name)
            root_blobs.append(file_version.file_name)
        filtered = list(
            dict.fromkeys([
                re.sub('/.*', '', sub.replace(prefix, ''))
                for sub in root_blobs
            ]))

    jobs = []

    print("'kubeinit_ci_utils.py' ==> Rendering CI jobs index page")
    print("'kubeinit_ci_utils.py' ==> Filtered blobs")
    print(filtered)

    print("'kubeinit_ci_utils.py' ==> Rendering page indexes")
    for idx, blob in enumerate(filtered):
        print(str(blob))
        fields = blob.split("-")
        stat = fields[9]
        if stat == '0':
            status = 'Passed'
            badge = 'success'
        elif stat == '1':
            status = 'Failed'
            badge = 'danger'
        # If not stat == 'u' it means this is a PR job
        # we check the index content to verify it didnt fail.
        elif stat == 'u':
            index_data_url = data_url + blob + '/index.html'
            resp = requests.get(url=index_data_url, timeout=5, verify=False)
            m = re.search("btn-danger", resp.text)
            if m:
                print("'kubeinit_ci_utils.py' ==> The periodic job failed...")
                status = 'Failed'
                badge = 'danger'
            else:
                print("'kubeinit_ci_utils.py' ==> The periodic job passed...")
                status = 'Passed'
                badge = 'success'
        else:
            status = 'Running'
            badge = 'warning'

        extra_data_date_url = data_url + blob + '/records/1.html'
        resp = requests.get(url=extra_data_date_url, timeout=5, verify=False)

        m = re.search(
            "[0-9][0-9][0-9][0-9]\\.[0-9][0-9]\\.[0-9][0-9]\\.[0-9][0-9]\\.[0-9][0-9]\\.[0-9][0-9]",
            resp.text)
        # stat == 'u' means that this is a periodic job
        if m and stat == 'u':
            date = str(m.group(0))
        else:
            date = fields[8]

        m = re.search(
            "https:\\/\\/gitlab\\.com\\/kubeinit\\/kubeinit\\/-\\/jobs\\/[0-9]+",
            resp.text)
        # stat == 'u' means that this is a periodic job
        if m:
            job_id = str(m.group(0))
        else:
            job_id = 'Missing field in the record data.'

        m = re.search("The pull request is: [0-9]+", resp.text)
        if m:
            pr_number = str(m.group(0).split(' ')[-1])
        else:
            pr_number = 'Periodic'

        jobs.append({
            'status': status,
            'index': idx,
            'distro': fields[0],
            'driver': fields[1],
            'masters': fields[2],
            'workers': fields[3],
            'hypervisors': fields[4],
            'launch_from': fields[5],
            'job_type': fields[6],
            'id': job_id,
            'pr_number': pr_number,
            'date': date,
            'badge': badge,
            'url': data_url + blob + '/index.html'
        })

    path = os.path.join(os.path.dirname(__file__))
    file_loader = FileSystemLoader(searchpath=path)
    env = Environment(loader=file_loader)
    template_index = "kubeinit_ci_logs.html.j2"
    print("'kubeinit_ci_utils.py' ==> The path for the template is: " + path)
    template = env.get_template(template_index)
    output = template.render(jobs=jobs)

    if destination == 'gcp':
        bucket = client.get_bucket(bucket_name)
        blob = bucket.blob('index.html')
        blob.upload_from_string(output, content_type='text/html')
    if destination == 'b2':
        tmp = tempfile.NamedTemporaryFile()
        with open(tmp.name, 'w') as f:
            f.write(output)
        file_info = {'type': 'Main index file'}
        bucket.upload_local_file(
            local_file=tmp.name,
            file_name='index.html',
            file_infos=file_info,
        )
Esempio n. 7
0
 def b2Api(self) -> B2Api:
     if not hasattr(self, "_b2Api"):
         self._accountInfo = self._getAccountInfo()
         self._b2Api = B2Api(account_info=self._accountInfo, cache=AuthInfoCache(self._accountInfo))
         self._b2Api.authorize_account(**self._authInfo)
     return self._b2Api
 def assertLegalHoldRepr(self, file_id: str, api: B2Api,
                         expected_repr: str):
     file_version = api.get_file_info(file_id)
     assert DownloadCommand._represent_legal_hold(
         file_version.legal_hold) == expected_repr
 def assertRetentionRepr(self, file_id: str, api: B2Api,
                         expected_repr: str):
     file_version = api.get_file_info(file_id)
     assert DownloadCommand._represent_retention(
         file_version.file_retention) == expected_repr
 def __post_init__(self):
     info = InMemoryAccountInfo()
     cache = InMemoryCache()
     self.api = B2Api(info, cache=cache)
     self.api.authorize_account(self.realm, self.account_id,
                                self.application_key)
Esempio n. 11
0
    def __init__(self, parsed_url):
        u"""
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        global DownloadDestLocalFile, FileVersionInfoFactory

        try:  # figure out what version of b2sdk we have
            from b2sdk import __version__ as VERSION
            v_split = VERSION.split(u'.')
            self.v_num = [int(x) for x in v_split]
        except:
            self.v_num = [0, 0, 0]

        try:  # public API v2 is recommended, if available
            from b2sdk.v2 import B2Api
            from b2sdk.v2 import InMemoryAccountInfo
            from b2sdk.v2.exception import NonExistentBucket
        except ImportError:
            try:  # if public API v2 not found, try to use public API v1
                from b2sdk.v1 import B2Api
                from b2sdk.v1 import InMemoryAccountInfo
                from b2sdk.v1 import DownloadDestLocalFile
                from b2sdk.v1.exception import NonExistentBucket

                if self.v_num < [1, 9, 0]:
                    from b2sdk.v1.file_version import FileVersionInfoFactory
            except ImportError:
                try:  # try to import the new b2sdk internal API if available (and public API isn't)
                    from b2sdk.api import B2Api
                    from b2sdk.account_info import InMemoryAccountInfo
                    from b2sdk.download_dest import DownloadDestLocalFile
                    from b2sdk.exception import NonExistentBucket
                    from b2sdk.file_version import FileVersionInfoFactory
                except ImportError as e:
                    if u'b2sdk' in getattr(e, u'name', u'b2sdk'):
                        raise
                    try:  # fall back to import the old b2 client
                        from b2.api import B2Api
                        from b2.account_info import InMemoryAccountInfo
                        from b2.download_dest import DownloadDestLocalFile
                        from b2.exception import NonExistentBucket
                        from b2.file_version import FileVersionInfoFactory
                    except ImportError:
                        if u'b2' in getattr(e, u'name', u'b2'):
                            raise
                        raise BackendException(
                            u'B2 backend requires B2 Python SDK (pip install b2sdk)'
                        )

        self.service = B2Api(InMemoryAccountInfo())
        self.parsed_url.hostname = u'B2'

        account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace(u"@", u"/").split(u'/')
            if x != u''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException(u"B2 requires a bucket name")
        self.path = u"".join([url_part + u"/" for url_part in self.url_parts])
        self.service.authorize_account(u'production', account_id, account_key)

        try:
            log.Log(
                u"B2 Backend (path= %s, bucket= %s, recommended_part_size= %s)"
                % (self.path, bucket_name,
                   self.service.account_info.get_recommended_part_size()),
                log.INFO)
        except AttributeError:
            log.Log(
                u"B2 Backend (path= %s, bucket= %s, minimum_part_size= %s)" %
                (self.path, bucket_name,
                 self.service.account_info.get_minimum_part_size()), log.INFO)

        try:
            self.bucket = self.service.get_bucket_by_name(bucket_name)
            log.Log(u"Bucket found", log.INFO)
        except NonExistentBucket:
            try:
                log.Log(u"Bucket not found, creating one", log.INFO)
                self.bucket = self.service.create_bucket(
                    bucket_name, u'allPrivate')
            except:
                raise FatalBackendException(u"Bucket cannot be created")