def send_verify_mail(): user_id = login_user.get('user_id') record = User().find_by_id(user_id) if not record: return jsonify({ 'message': 'invalid user', 'code': 104033 }), 403 email = record.get('email') token = md5(str(current_request_id)) url = config.dommain + '/users/email/verify?token=' + token message = '[Eclogue]Please click url to verify your email:<a href="{}">{}</a>'.format(url, url) smtp = SMTP() smtp.send(message, email, subtype='html') data = { 'user_id': user_id, 'token': token, 'created_at': time.time(), 'email': email, 'content': message, } db.collection('mail_verify').insert_one(data) return jsonify({ 'message': 'ok', 'code': 0 })
def dispatch(payload): hosts = payload.get('inventory') tasks = payload.get('tasks') if not hosts or not tasks: return None uid = md5(str(json.dumps(payload))) username = payload.get('username') run_id = payload.get('req_id') or str(uuid.uuid4()) params = [run_id, payload] queue_name = 'book_runtime' func = run task = Task(tiger, func=func, args=params, queue=queue_name, unique=True, lock=True, lock_key=uid) run_record = { 'uid': uid, 'run_id': run_id, 'run_by': username, 'options': payload, 'result': '', 'state': 'pending', 'created_at': time.time(), 'updated_at': time.time(), } result = Perform.insert_one(run_record) task.delay() return result.inserted_id
def load_from_dir(home_path, exclude=['*.retry'], links=False, book_name=None): bucket = [] cursor = 0 parent = home_path book_name = book_name or os.path.basename(home_path) pattern = '|'.join(exclude).replace('*', '.*?') for current, dirs, files in os.walk(home_path, topdown=True, followlinks=links): pathname = current.replace(home_path, '') or '/' if exclude: match = re.search(pattern, pathname) if match: continue dir_record = { 'book_name': book_name, 'path': pathname, 'is_dir': True, 'is_edit': False, 'seq_no': cursor, 'parent': None, 'created_at': int(time.time()), } if not current == home_path: dir_record['parent'] = parent meta = get_meta(pathname=pathname) dir_record.update(meta) parent = pathname bucket.append(dir_record) for file in files: pathname = parent.rstrip('/') + '/' + file if exclude: match = re.match(pattern, pathname) if match: continue cursor += 1 filename = current + '/' + file is_edit = Dumper.is_read(filename) file_record = dir_record.copy() file_record['is_edit'] = is_edit file_record['path'] = pathname file_record['parent'] = parent file_record['is_dir'] = False file_record['seq_no'] = cursor if is_edit: with open(filename, 'r', encoding='utf-8') as fd: file_record['content'] = fd.read() file_record['md5'] = md5(file_record['content']) file_record['is_encrypt'] = Vault.is_encrypted( file_record['content']) meta = get_meta(file_record['path']) file_record.update(meta) file_record['meta'] = meta bucket.append(file_record) cursor += 1 return bucket
def build_book_from_history(self, build_id): history = db.collection('build_history').find_one( {'_id': ObjectId(build_id)}) task_id = history.get('task_id') file_id = history.get('file_id') job_info = history.get('job_info') book = Book.find_one({'_id': ObjectId(job_info.get('book_id'))}) bookspace = self.get_book_space(book.get('name')) bookspace = os.path.join(bookspace, md5(str(task_id))) self.mkdir(bookspace) save_file = NamedTemporaryFile(delete=False, suffix='.zip').name with open(save_file, 'wb') as fd: db.fs_bucket().download_to_stream(ObjectId(file_id), fd) extract(save_file, bookspace) os.unlink(save_file) return bookspace
def install(self, workspace='job'): app_path = self.image.replace(':', '/') if workspace == 'job': home_path = self.job_space(app_path) else: home_path = self.build_space(app_path) working_dir = self.config.get('working_dir') Workspace.mkdir(home_path) filename = md5(str(uuid.uuid4())) store = home_path + '/' + filename + '.tar' with open(store, 'wb') as tar: bits, stat = self.get_archive(working_dir) for chunk in bits: tar.write(chunk) extract(store, home_path) os.unlink(store) # @todo store to mongodb gridfs if self.config.get('task_id'): pass
def load_book_from_db(self, name, roles=None): wk = Workspace() workspace = wk.workspace wk.check_workspace() books = db.collection('playbook').find({ 'book_name': name }).sort('seq_no', pymongo.ASCENDING) for item in books: if roles: folder = item.get('name') if folder and folder not in roles: continue filename = workspace + item['path'] if item['is_dir']: if os.path.isdir(filename): continue else: os.mkdir(filename, 0o600) else: if os.path.isfile(filename): file_hash = self.file_md5(filename) if item.get('md5') and item['md5'] == file_hash: continue dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) if item['is_edit']: db.collection('playbook').update_one( {'_id': item['_id']}, {'$set': { 'md5': md5(item['content'].encode('utf8')) }}) with open(filename, 'w') as stream: stream.write(item['content']) else: with open(filename, 'wb') as stream: db.fs_bucket().download_to_stream( item['file_id'], stream) return books
def edit_file(_id): if not id: return jsonify({ 'message': 'illegal param', 'code': 104000, }), 400 body = request.get_json() content = body.get('content', '') vault = body.get('vault', False) if vault: # @todo encrypt by vauld content = content update = { '$set': { 'content': content, 'md5': md5(content.encode('utf8')) } } collection = db.collection('playbook') record = collection.find_one({'_id': ObjectId(_id)}) if not record: return jsonify({ 'message': 'record not found', 'code': 104040 }), 404 record['uid'] = record['_id'] record.pop('_id', None) db.collection('file_history').insert_one(record) collection.update_one({'_id': ObjectId(_id)}, update) return jsonify({ 'message': 'ok', 'code': 0, 'data': record, })
def upload_playbook(_id): files = request.files record = Book.find_by_id(_id) if not record: return jsonify({ "message": "book not found", "code": 104004, }), 400 if not files: return jsonify({ 'message': 'invalid files params', 'code': 104001 }), 400 file = files['file'] filename = file.filename.lstrip('/') path_list = filename.split('/') filename = '/'.join(path_list[1:]) filename = '/' + filename home_path, basename = os.path.split(filename) file_list = set(_make_path(filename)) for dirname in file_list: check = Playbook.find_one({ 'book_id': _id, 'path': dirname, }) if not check: parent_path, name = os.path.split(dirname) parent_path = parent_path if parent_path != '/' else None parent = { 'path': dirname, 'is_dir': True, 'is_edit': False, 'book_id': _id, 'parent': parent_path, 'name': name, 'created_at': time.time(), } meta = get_meta(dirname) parent.update(meta) parent['additions'] = meta Playbook.insert_one(parent) data = { 'path': filename, 'is_dir': False, 'parent': home_path or None, 'book_id': _id } can_edit = is_edit(file) if not can_edit: file_id = db.save_file(filename=filename, fileobj=file) data['file_id'] = file_id else: content = file.stream.read() content = content.decode('utf-8') data['is_encrypt'] = Vault.is_encrypted(content) if data['is_encrypt']: # @todo vault password vault = Vault() data['content'] = vault.encrypt_string(content) data['md5'] = md5(content) else: data['content'] = content data['md5'] = md5(content) meta = get_meta(data['path']) data.update(meta) data['additions'] = meta data['is_edit'] = can_edit data['created_at'] = time.time() data['updated_at'] = time.time() Playbook.update_one({ 'path': filename, 'book_id': _id }, {'$set': data}, upsert=True) return jsonify({ "message": "ok", "code": 0, })
def setup(): wk = Workspace() workspace = wk.workspace check_workspace() if not os.path.exists(workspace): os.makedirs(workspace, 0o755) books = db.collection('playbook').find().sort('path', pymongo.ASCENDING) # books = collection_array(books) start = time.time() for item in books: filename = workspace + item['path'] if item['is_dir']: if os.path.isdir(filename): continue else: os.makedirs(filename, 0o755) else: if os.path.isfile(filename): file_hash = file_md5(filename) if item.get('md5') and item['md5'] == file_hash: continue dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) if item['is_edit']: db.collection('playbook').update_one({'_id': item['_id']}, {'$set': {'md5': md5(item['content'])}}) with open(filename, 'w+') as stream: stream.write(item['content']) else: with open(filename, 'wb') as stream: db.fs_bucket().download_to_stream(item['file_id'], stream) end = time.time() return jsonify({ "message": "ok", "code": 0, 'runtime': end - start })
def load_book_from_db(self, name, roles=None, build_id=False): book = Book.find_one({'name': name}) if not book: return False files = Model.build_model('playbook').find({'book_id': str(book['_id'])})\ .sort([('is_edit', pymongo.ASCENDING), ('path', pymongo.ASCENDING)]) files = list(files) if not files: return False if build_id: bookspace = os.path.join(self.book, md5(str(build_id))) else: bookspace = self.get_book_space(name) def parse_register(record): register = record.get('register') if not register: return record c_ids = map(lambda i: ObjectId(i), register) cfg_records = Configuration.find({'_id': {'$in': list(c_ids)}}) if not cfg_records: return record try: variables = {} content = yaml.safe_load(record.get('content', '')) if not content: return record vault = Vault({'vault_pass': config.vault.get('secret')}) for cfg in cfg_records: config_vars = cfg.get('variables') if not config_vars: continue for k, v in config_vars.items(): key = '_'.join( ['ECLOGUE', 'CONFIG', cfg.get('name', ''), k]) is_encrypt = Vault.is_encrypted(v) value = v if is_encrypt: value = vault.decrypt_string(value) variables[key] = value content = dict(content) content.update(variables) record['content'] = yaml.safe_dump(content) except Exception as e: print(e) return record self.check_workspace(path=self._check_make(bookspace)) for item in files: item = parse_register(item) if roles and item.get('folder'): folder = item.get('folder') if folder and folder not in roles: continue filename = bookspace + item.get('path') if item['is_dir']: if os.path.isdir(filename): continue self.mkdir(filename) else: if os.path.isfile(filename): file_hash = file_md5(filename) if item.get('md5') and item['md5'] == file_hash: continue dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) if item['is_edit']: Model.build_model('playbooks').update_one( {'_id': item['_id']}, {'$set': { 'md5': md5(item['content']) }}) with open(filename, 'w') as stream: stream.write(item['content']) else: with open(filename, 'wb') as stream: db.fs_bucket().download_to_stream( item['file_id'], stream) return bookspace
def import_book_from_dir(self, home_path, book_id, exclude=None, links=False, prefix='/'): """ import dir file to db @todo """ book_id = str(book_id) exclude = exclude or ['*.retry'] bucket = [] cursor = 0 home_path = home_path.rstrip('/') parent = home_path book_record = Book.find_by_id(book_id) model = Model.build_model('playbook') playbooks = model.find({'book_id': book_id}) paths = map(lambda i: i['path'], playbooks) paths = list(paths) pattern = '|'.join(exclude).replace('*', '.*?') home_path = '/'.join([home_path, '']) for current, dirs, files in os.walk(home_path, topdown=True, followlinks=links): pathname = current.replace(home_path, '') if pathname != '/': pathname = os.path.join(prefix, pathname) if exclude: match = re.search(pattern, pathname) if match: continue if pathname in paths: index = paths.index(pathname) paths.pop(index) dir_record = { 'book_id': str(book_record.get('_id')), 'path': pathname, 'is_dir': True, 'is_edit': False, 'seq_no': cursor, 'parent': None, 'created_at': int(time.time()), } if not current == home_path: dir_record['parent'] = parent meta = get_meta(pathname) dir_record.update(meta) dir_record['additions'] = meta parent = pathname bucket.append(dir_record) for file in files: pathname = parent.rstrip('/') + '/' + file if exclude: match = re.match(pattern, pathname) if match: continue cursor += 1 filename = current + '/' + file can_edit = is_edit(filename) file_record = dir_record.copy() file_record['is_edit'] = can_edit file_record['path'] = pathname file_record['parent'] = parent file_record['is_dir'] = False file_record['seq_no'] = cursor if is_edit: with open(filename, 'r', encoding='utf-8') as fd: file_record['content'] = fd.read() file_record['md5'] = md5(file_record['content']) file_record['is_encrypt'] = Vault.is_encrypted( file_record['content']) meta = get_meta(file_record['path']) file_record['additions'] = meta file_record.update(meta) bucket.append(file_record) cursor += 1 is_entry = filter(lambda i: i.get('role') == 'entry', bucket) is_entry = list(is_entry) # if not entry set book status to disable if not is_entry: Book.update_one({'_id': ObjectId(book_id)}, {'$set': { 'status': 0 }}) for path in paths: model.delete_one({'book_id': book_id, 'path': path}) mapping = {} map(lambda i: {mapping['path']: i}, playbooks) for item in bucket: record = mapping.get(item['path']) if not record: model.insert_one(item) continue else: # inherit old additions if record['additions']: item['additions'].update(record['additions']) model.update_one({'_id': record['_id']}, {'$set': item}) return bucket
def edit_file(_id): """ edit playbook file @todo add lock :param _id: ObjectId string :return: json """ params = request.get_json() or request.form if not params: return jsonify({ 'message': 'invalid params', 'code': 154000, }), 400 edit_type = params.get('type') if edit_type == 'upload': return upload_file(_id) name = params.get('name') role = params.get('role') description = params.get('description') status = params.get('status', 1) maintainer = params.get('maintainer', []) can_edit = params.get('is_edit') is_dir = params.get('is_dir') is_encrypt = params.get('is_encrypt') folder = params.get('folder') register = params.get('register') content = params.get('content') record = Playbook.find_by_id(_id) if not record: return jsonify({ 'message': 'record not found', 'code': 154041, }), 404 data = { 'status': status, } if name: data['name'] = name if role: data['role'] = role if folder: data['folder'] = folder if content: data['content'] = content data['md5'] = md5(content) if description: data['description'] = description if maintainer: data['maintainer'] = maintainer if can_edit is not None: data['is_edit'] = bool(is_edit) data['is_encrypt'] = bool(is_encrypt) if is_dir is not None: data['is_dir'] = bool(is_dir) if register: c_ids = map(lambda i: ObjectId(i), register) where = {'_id': {'$in': c_ids}} register_config = Configuration.find(where) if not register_config: return jsonify({ 'message': 'invalid register config', 'code': 154042, }), 404 data['register'] = register Playbook.update_one({'_id': ObjectId(_id)}, {'$set': data}, upsert=True) data['_id'] = _id book = Book.find_one({'_id': ObjectId(record['book_id'])}) wk = Workspace() wk.write_book_file(book.get('name'), record) return jsonify({ 'message': 'ok', 'code': 0, 'data': data, })
def import_book_from_dir(self, home_path, book_id, exclude=['*.retry'], links=False): bucket = [] cursor = 0 parent = home_path book_record = Book.find_one({'_id': ObjectId(book_id)}) pattern = '|'.join(exclude).replace('*', '.*?') for current, dirs, files in os.walk(home_path, topdown=True, followlinks=links): pathname = current.replace(home_path, '') or '/' if exclude: match = re.search(pattern, pathname) if match: continue dir_record = { 'book_id': str(book_record.get('_id')), 'path': pathname, 'is_dir': True, 'is_edit': False, 'seq_no': cursor, 'parent': None, 'created_at': int(time.time()), } if not current == home_path: dir_record['parent'] = parent meta = Workspace.get_meta(pathname=pathname) dir_record.update(meta) dir_record['additions'] = meta parent = pathname bucket.append(dir_record) for file in files: pathname = parent.rstrip('/') + '/' + file if exclude: match = re.match(pattern, pathname) if match: continue cursor += 1 filename = current + '/' + file can_edit = is_edit(filename) file_record = dir_record.copy() file_record['is_edit'] = can_edit file_record['path'] = pathname file_record['parent'] = parent file_record['is_dir'] = False file_record['seq_no'] = cursor if is_edit: with open(filename, 'r', encoding='utf-8') as fd: file_record['content'] = fd.read() file_record['md5'] = md5(file_record['content']) file_record['is_encrypt'] = Vault.is_encrypted( file_record['content']) meta = self._get_role(file_record['path']) file_record.update(meta) file_record['additions'] = meta bucket.append(file_record) cursor += 1 is_entry = filter(lambda i: i.get('role') == 'entry', bucket) is_entry = list(is_entry) if not is_entry: path = '/entry.yml' entry = { 'book_id': str(book_record.get('_id')), 'path': path, 'is_dir': False, 'is_edit': True, 'seq_no': 0, 'content': '', 'parent': None, 'created_at': int(time.time()), } meta = self._get_role(path) entry.update(meta) entry['additions'] = meta bucket.append(entry) return bucket