def dowload_artifact(self, project_id, job_id, store): # project_id = '13539397' # job_id = '261939258' is_cache = self.config.get('cache') save_dir = os.path.dirname(store) if is_cache: cache_files = db.collection('artifacts').find({ 'project_id': project_id, 'job_id': job_id, 'app_type': 'gitlab' }) cache_files = list(cache_files) if cache_files: for record in cache_files: file_id = record.get('file_id') if not file_id: continue filename = os.path.join(save_dir, record['filename']) with open(filename, 'wb') as stream: db.fs_bucket().download_to_stream(file_id, stream) extract(filename, save_dir) os.unlink(filename) return True project = self.projects.get(project_id) # pipeline = project.jobs.get(job_id) jobs = project.jobs job = jobs.get(job_id) if job.status != 'success': raise Exception('gitlab job status must be success, %s got'.format(job.status)) with open(store, 'wb') as f: job.artifacts(streamed=True, action=f.write) if is_cache: with open(store, mode='rb') as fd: name = os.path.basename(store) file_id = db.save_file(filename=name, fileobj=fd) store_info = { 'app_type': 'jenkins', 'file_id': str(file_id), 'project_id': project_id, 'job_id': job_id, 'filename': name, 'created_at': time.time() } db.collection('artifacts').insert_one(store_info) with zipfile.ZipFile(store, 'r') as zip_ref: zip_ref.extractall(os.path.dirname(store)) os.unlink(store) return True
def get_book(self, home_path): book = Dumper.load_from_dir(home_path) collection = [] for item in book: item['book_name'] = 'default' if item.get('path') in collection: db.collection('playbook').delete_one({'_id': item['_id']}) collection.append(item.get('path')) if not item.get('is_edit') and not item.get( 'file_id') and not item.get('is_dir'): path_name = home_path + item['path'] with open(path_name, mode='rb') as fd: file_id = db.save_file(item['path'], fd) item['file_id'] = file_id item['created_at'] = int(time.time()) item['updated_at'] = datetime.datetime.now().isoformat()
def upload_playbook(_id): files = request.files record = Book.find_by_id(_id) if not record: return jsonify({ "message": "book not found", "code": 104004, }), 400 if not files: return jsonify({ 'message': 'invalid files params', 'code': 104001 }), 400 file = files['file'] filename = file.filename.lstrip('/') path_list = filename.split('/') filename = '/'.join(path_list[1:]) filename = '/' + filename home_path, basename = os.path.split(filename) file_list = set(_make_path(filename)) for dirname in file_list: check = Playbook.find_one({ 'book_id': _id, 'path': dirname, }) if not check: parent_path, name = os.path.split(dirname) parent_path = parent_path if parent_path != '/' else None parent = { 'path': dirname, 'is_dir': True, 'is_edit': False, 'book_id': _id, 'parent': parent_path, 'name': name, 'created_at': time.time(), } meta = get_meta(dirname) parent.update(meta) parent['additions'] = meta Playbook.insert_one(parent) data = { 'path': filename, 'is_dir': False, 'parent': home_path or None, 'book_id': _id } can_edit = is_edit(file) if not can_edit: file_id = db.save_file(filename=filename, fileobj=file) data['file_id'] = file_id else: content = file.stream.read() content = content.decode('utf-8') data['is_encrypt'] = Vault.is_encrypted(content) if data['is_encrypt']: # @todo vault password vault = Vault() data['content'] = vault.encrypt_string(content) data['md5'] = md5(content) else: data['content'] = content data['md5'] = md5(content) meta = get_meta(data['path']) data.update(meta) data['additions'] = meta data['is_edit'] = can_edit data['created_at'] = time.time() data['updated_at'] = time.time() Playbook.update_one({ 'path': filename, 'book_id': _id }, {'$set': data}, upsert=True) return jsonify({ "message": "ok", "code": 0, })
def upload(): files = request.files form = request.form if not form or not form.get('parent'): return jsonify({ 'message': 'illegal param', 'code': 104000, }), 400 parent_id = form.get('parent') book_id = form.get('bookId') if parent_id == '/' and book_id: book = db.collection('books').find_one({'_id': ObjectId(book_id)}) if not book: return jsonify({ "message": "record not found", "code": 104040, }), 404 parent = { 'path': '', 'book_id': book_id } else: parent = db.collection('playbook').find_one({'_id': ObjectId(parent_id)}) if not parent: return jsonify({ "message": "parent path not found", "code": 104004, }), 400 file = files['files'] filename = file.filename path = parent['path'] + '/' + filename record = { 'book_id': parent.get('book_id'), 'path': path, 'is_dir': False, } meta = get_meta(path) record.update(meta) can_edit = is_edit(file) if not can_edit: file_id = db.save_file(filename=filename, fileobj=file) record['file_id'] = file_id else: content = file.read() content = content.decode('utf-8') record['content'] = content record['is_edit'] = can_edit record['created_at'] = int(time.time()) record['updated_at'] = datetime.datetime.now().isoformat() # exist = db.collection('playbook').find_one({'path': path}) # if exist: # db.collection('playbook').update_one({}) # return jsonify({ # "message": "ok", # "code": 104005, # }), 400 db.collection('playbook').update_one({ 'path': path, 'book_id': ObjectId(parent['book_id']), }, { '$set': record, }, upsert=True) return jsonify({ "message": "ok", "code": 0, })
def upload(): files = request.files form = request.form if not form or not form.get('parent'): return jsonify({ 'message': 'illegal param', 'code': 104000, }), 400 if not files.get('files'): return jsonify({ 'message': 'illegal param', 'code': 104001, }), 400 parent_id = form.get('parent') book_id = form.get('bookId') if parent_id == '/' and book_id: book = Book.find_one({'_id': ObjectId(book_id)}) if not book: return jsonify({ "message": "record not found", "code": 104040, }), 404 current = {'path': '/', 'book_id': book_id} else: current = Playbook.find_one({'_id': ObjectId(parent_id)}) if not current: return jsonify({ "message": "current path not found", "code": 104004, }), 400 file = files['files'] filename = file.filename path = os.path.join(current['path'], filename) # parent = Playbook.find_one({'book_id': book_id, 'path': os.path.dirname(path)}) record = { 'book_id': book_id, 'path': path, 'is_dir': False, 'parent': os.path.dirname(path), } meta = get_meta(path) record.update(meta) can_edit = is_edit(file) if not can_edit: file_id = db.save_file(filename=filename, fileobj=file) record['file_id'] = file_id else: content = file.read() content = content.decode('utf-8') record['content'] = content record['is_edit'] = can_edit record['created_at'] = int(time.time()) record['updated_at'] = datetime.datetime.now().isoformat() # where = { # 'path': path, # 'book_id': ObjectId(book_id) # } # update = { # '$set': record, # } check_playbook_node(record) # Playbook.update_one(where, update=update, upsert=True) return jsonify({ 'message': 'ok', 'code': 0, })
def save_artifacts(self, save_dir, job_name, build_id=None, strict_validation=False, artifact_name=None): is_cache = self.config.get('cache') self.logger.info('use cached:{}'.format(is_cache)) if is_cache: cache_files = db.collection('artifacts').find({ 'job_name': job_name, 'build_id': build_id, 'app_type': 'jenkins' }) cache_files = list(cache_files) if cache_files: for record in cache_files: file_id = record.get('file_id') if not file_id: continue msg = 'load file from cached, save_dir:{}, filename:{}'.format( save_dir, record['filename']) self.logger.info(msg) filename = os.path.join(save_dir, record['filename']) with open(filename, 'wb') as stream: db.fs_bucket().download_to_stream(file_id, stream) extract(filename, save_dir) os.unlink(filename) return True artifacts = self.get_artifacts(job_name, build_id) store_files = [] for artifact in artifacts: if artifact_name and artifact.filename != artifact_name: continue file_path = artifact.save_to_dir(save_dir, strict_validation) msg = 'download artifacts from {}, save_dir:{}, filename:{}' msg = msg.format(self.config.get('base_url'), save_dir, artifact.filename) self.logger.info(msg) extract(file_path, save_dir) store_files.append({ 'filename': artifact.filename, 'path': file_path }) for file in store_files: filename = file['filename'] path = file['path'] if is_cache: with open(path, mode='rb') as stream: file_id = db.save_file(filename=filename, fileobj=stream) store_info = { 'app_type': 'jenkins', 'file_id': file_id, 'job_name': job_name, 'build_id': build_id, 'filename': os.path.basename(filename), 'created_at': time.time() } db.collection('artifacts').insert_one(store_info) os.unlink(path)