Beispiel #1
0
    def dowload_artifact(self, project_id, job_id, store):
        # project_id = '13539397'
        # job_id = '261939258'
        is_cache = self.config.get('cache')
        save_dir = os.path.dirname(store)
        if is_cache:
            cache_files = db.collection('artifacts').find({
                'project_id': project_id,
                'job_id': job_id,
                'app_type': 'gitlab'
            })
            cache_files = list(cache_files)
            if cache_files:
                for record in cache_files:
                    file_id = record.get('file_id')
                    if not file_id:
                        continue

                    filename = os.path.join(save_dir, record['filename'])
                    with open(filename, 'wb') as stream:
                        db.fs_bucket().download_to_stream(file_id, stream)
                        extract(filename, save_dir)
                        os.unlink(filename)

                return True

        project = self.projects.get(project_id)
        # pipeline = project.jobs.get(job_id)
        jobs = project.jobs
        job = jobs.get(job_id)
        if job.status != 'success':
            raise Exception('gitlab job status must be success, %s got'.format(job.status))

        with open(store, 'wb') as f:
            job.artifacts(streamed=True, action=f.write)

        if is_cache:
            with open(store, mode='rb') as fd:
                name = os.path.basename(store)
                file_id = db.save_file(filename=name, fileobj=fd)
                store_info = {
                    'app_type': 'jenkins',
                    'file_id': str(file_id),
                    'project_id': project_id,
                    'job_id': job_id,
                    'filename': name,
                    'created_at': time.time()
                }
                db.collection('artifacts').insert_one(store_info)

        with zipfile.ZipFile(store, 'r') as zip_ref:
            zip_ref.extractall(os.path.dirname(store))
            os.unlink(store)

        return True
Beispiel #2
0
    def build_book_from_history(self, build_id):
        history = db.collection('build_history').find_one(
            {'_id': ObjectId(build_id)})
        task_id = history.get('task_id')
        file_id = history.get('file_id')
        job_info = history.get('job_info')
        book = Book.find_one({'_id': ObjectId(job_info.get('book_id'))})
        bookspace = self.get_book_space(book.get('name'))
        bookspace = os.path.join(bookspace, md5(str(task_id)))
        self.mkdir(bookspace)
        save_file = NamedTemporaryFile(delete=False, suffix='.zip').name
        with open(save_file, 'wb') as fd:
            db.fs_bucket().download_to_stream(ObjectId(file_id), fd)

        extract(save_file, bookspace)
        os.unlink(save_file)

        return bookspace
Beispiel #3
0
 def load_book_from_db(self, name, roles=None):
     wk = Workspace()
     workspace = wk.workspace
     wk.check_workspace()
     books = db.collection('playbook').find({
         'book_name': name
     }).sort('seq_no', pymongo.ASCENDING)
     for item in books:
         if roles:
             folder = item.get('name')
             if folder and folder not in roles:
                 continue
         filename = workspace + item['path']
         if item['is_dir']:
             if os.path.isdir(filename):
                 continue
             else:
                 os.mkdir(filename, 0o600)
         else:
             if os.path.isfile(filename):
                 file_hash = self.file_md5(filename)
                 if item.get('md5') and item['md5'] == file_hash:
                     continue
             dirname = os.path.dirname(filename)
             if not os.path.exists(dirname):
                 os.makedirs(dirname)
             if item['is_edit']:
                 db.collection('playbook').update_one(
                     {'_id': item['_id']},
                     {'$set': {
                         'md5': md5(item['content'].encode('utf8'))
                     }})
                 with open(filename, 'w') as stream:
                     stream.write(item['content'])
             else:
                 with open(filename, 'wb') as stream:
                     db.fs_bucket().download_to_stream(
                         item['file_id'], stream)
     return books
Beispiel #4
0
    def setup():
        wk = Workspace()
        workspace = wk.workspace
        check_workspace()
        if not os.path.exists(workspace):
            os.makedirs(workspace, 0o755)
        books = db.collection('playbook').find().sort('path', pymongo.ASCENDING)
        # books = collection_array(books)
        start = time.time()
        for item in books:
            filename = workspace + item['path']
            if item['is_dir']:
                if os.path.isdir(filename):
                    continue
                else:
                    os.makedirs(filename, 0o755)
            else:
                if os.path.isfile(filename):
                    file_hash = file_md5(filename)
                    if item.get('md5') and item['md5'] == file_hash:
                        continue
                dirname = os.path.dirname(filename)
                if not os.path.exists(dirname):
                    os.makedirs(dirname)
                if item['is_edit']:
                    db.collection('playbook').update_one({'_id': item['_id']}, {'$set': {'md5': md5(item['content'])}})
                    with open(filename, 'w+') as stream:
                        stream.write(item['content'])
                else:
                    with open(filename, 'wb') as stream:
                        db.fs_bucket().download_to_stream(item['file_id'], stream)
        end = time.time()

        return jsonify({
            "message": "ok",
            "code": 0,
            'runtime': end - start
        })
Beispiel #5
0
    def load_book_from_db(self, name, roles=None, build_id=False):
        book = Book.find_one({'name': name})
        if not book:
            return False
        files = Model.build_model('playbook').find({'book_id': str(book['_id'])})\
            .sort([('is_edit', pymongo.ASCENDING), ('path', pymongo.ASCENDING)])
        files = list(files)
        if not files:
            return False
        if build_id:
            bookspace = os.path.join(self.book, md5(str(build_id)))
        else:
            bookspace = self.get_book_space(name)

        def parse_register(record):
            register = record.get('register')
            if not register:
                return record

            c_ids = map(lambda i: ObjectId(i), register)
            cfg_records = Configuration.find({'_id': {'$in': list(c_ids)}})
            if not cfg_records:
                return record

            try:
                variables = {}
                content = yaml.safe_load(record.get('content', ''))
                if not content:
                    return record

                vault = Vault({'vault_pass': config.vault.get('secret')})
                for cfg in cfg_records:
                    config_vars = cfg.get('variables')
                    if not config_vars:
                        continue

                    for k, v in config_vars.items():
                        key = '_'.join(
                            ['ECLOGUE', 'CONFIG',
                             cfg.get('name', ''), k])
                        is_encrypt = Vault.is_encrypted(v)
                        value = v
                        if is_encrypt:
                            value = vault.decrypt_string(value)

                        variables[key] = value

                content = dict(content)
                content.update(variables)
                record['content'] = yaml.safe_dump(content)
            except Exception as e:
                print(e)
            return record

        self.check_workspace(path=self._check_make(bookspace))
        for item in files:
            item = parse_register(item)
            if roles and item.get('folder'):
                folder = item.get('folder')
                if folder and folder not in roles:
                    continue
            filename = bookspace + item.get('path')
            if item['is_dir']:
                if os.path.isdir(filename):
                    continue

                self.mkdir(filename)
            else:
                if os.path.isfile(filename):
                    file_hash = file_md5(filename)
                    if item.get('md5') and item['md5'] == file_hash:
                        continue
                dirname = os.path.dirname(filename)
                if not os.path.exists(dirname):
                    os.makedirs(dirname)
                if item['is_edit']:
                    Model.build_model('playbooks').update_one(
                        {'_id': item['_id']},
                        {'$set': {
                            'md5': md5(item['content'])
                        }})
                    with open(filename, 'w') as stream:
                        stream.write(item['content'])
                else:
                    with open(filename, 'wb') as stream:
                        db.fs_bucket().download_to_stream(
                            item['file_id'], stream)
        return bookspace
Beispiel #6
0
    def save_artifacts(self,
                       save_dir,
                       job_name,
                       build_id=None,
                       strict_validation=False,
                       artifact_name=None):
        is_cache = self.config.get('cache')
        self.logger.info('use cached:{}'.format(is_cache))
        if is_cache:
            cache_files = db.collection('artifacts').find({
                'job_name': job_name,
                'build_id': build_id,
                'app_type': 'jenkins'
            })
            cache_files = list(cache_files)
            if cache_files:
                for record in cache_files:
                    file_id = record.get('file_id')
                    if not file_id:
                        continue

                    msg = 'load file from cached, save_dir:{}, filename:{}'.format(
                        save_dir, record['filename'])
                    self.logger.info(msg)
                    filename = os.path.join(save_dir, record['filename'])
                    with open(filename, 'wb') as stream:
                        db.fs_bucket().download_to_stream(file_id, stream)
                        extract(filename, save_dir)
                        os.unlink(filename)

                return True

        artifacts = self.get_artifacts(job_name, build_id)
        store_files = []
        for artifact in artifacts:
            if artifact_name and artifact.filename != artifact_name:
                continue

            file_path = artifact.save_to_dir(save_dir, strict_validation)
            msg = 'download artifacts from {}, save_dir:{}, filename:{}'
            msg = msg.format(self.config.get('base_url'), save_dir,
                             artifact.filename)
            self.logger.info(msg)
            extract(file_path, save_dir)
            store_files.append({
                'filename': artifact.filename,
                'path': file_path
            })

        for file in store_files:
            filename = file['filename']
            path = file['path']
            if is_cache:
                with open(path, mode='rb') as stream:

                    file_id = db.save_file(filename=filename, fileobj=stream)
                    store_info = {
                        'app_type': 'jenkins',
                        'file_id': file_id,
                        'job_name': job_name,
                        'build_id': build_id,
                        'filename': os.path.basename(filename),
                        'created_at': time.time()
                    }
                    db.collection('artifacts').insert_one(store_info)

            os.unlink(path)