def deploy_docker(): """ docker升级 :return: """ create_influxdb() from app_projects.deploy.get_config import copy_template copy_template() os.system('nomad stop saas-container') q.put({"type": "Docker升级", "speed": "stop", "percentage": "{}".format(2)}) node_list = os.popen("consul members | awk '{ print $1 }'").read().split( '\n')[1:-1] f = l = 13 for item in node_list: if l >= 87: f += (100 - 13) / len(node_list) - 5 l += (100 - 13) / len(node_list) + 5 q.put({ "type": "Docker升级", "speed": "stop", "percentage": "{}".format(generate_random(f, l)) }) ssh_popen_1(item, 'docker rmi panacube:3.0') os.system('scp {docker_path} {node}:/var/admin'.format( docker_path=PathDir.docker_path(), node=item)) ssh_popen_1( item, 'cd /var/admin && docker load -i {}'.format( config.get('version').get('dockerImage'))) # 启动docker镜像 os.system('nomad run /etc/kolla/nomad/saas-container.hcl')
def get_security_group_id(self): """新建安全组""" CREATE_SECURITY_GROUPS = "http://{}/awstack-resource/v1/security_groups".format( GetModel.awcloud_ip()) security_group = {"name": "panacube3.0", "description": "3.0项目部署"} q.put({"type": "云平台", "speed_p": "stop", "percentage": "69"}) resp = requests.post(CREATE_SECURITY_GROUPS, data=json.dumps(security_group), headers=self.append_headers(), verify=False) # ['id] group_obj = resp.json() if 'code' in group_obj: if group_obj['code'] == '02310100': _url = "http://{}/awstack-resource/v1/security_groups".format( GetModel.awcloud_ip()) data = requests.get(_url, headers=self.append_headers(), verify=False) js_data = data.json()['data']['data'] _id = None for i in js_data: if i['name'] == 'default': self.set_rule(i['id'], OPEN_PORT) _id = i['id'] return {'id': _id} if group_obj['data'] is not None: self.set_rule(group_obj['data']['data']['id'], OPEN_PORT) return group_obj
def upload_storage_image(): from app_projects.tools.image import update_storage_image q.put({"type": "镜像上传", "size": "上传大小", "speed": "start", "percentage": "0"}) update_storage_image() q.put({"type": "镜像上传", "size": "上传大小", "speed": "stop", "percentage": "100"}) global default_dict default_dict['upload_storage_image'] = 'yes' deployment_complete() return json.dumps({"code": 0, 'message': 'Successful'})
def deplot_docker_env(): q.put({"type": "Docker升级", "size": "上传大小", "speed": "start", "percentage": "0"}) from app_projects.tools.env_deploy import deploy_docker deploy_docker() q.put({"type": "Docker升级", "size": "上传大小", "speed": "stop", "percentage": "100"}) push_message() global default_dict default_dict['deploy_docker'] = 'yes' deployment_complete() return json.dumps({"code": 0, 'message': 'Successful'})
def start_bigdata(): """ 开启大数据 :return: """ global default_dict q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(0)}) deploy_bigdata() default_dict['bigData'] = 'yes' deployment_complete() q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(100)}) return json.dumps({"code": 0, 'message': 'Successful'})
def push_bigdata_db(): # get_nodes() time.sleep(4) panaocs = GetModel.get_panaocs() panaocs['netmask'] = exchange_mask(panaocs['netmask']) # del panaocs['panacube_ip'] data = [ { "param_name": "bigdata", "param_value": panaocs, "description": None, "level": 1, "update_time": "0000-00-00 00:00:00.000000", "parent_id": None }, { "param_name": "panaocs", "param_value": panaocs, "description": None, "level": 1, "update_time": "0000-00-00 00:00:00.000000", "parent_id": None }, { "param_name": "node", "param_value": GetModel.get_node(), "description": None, "level": 1, "update_time": "0000-00-00 00:00:00.000000", "parent_id": None } ] if GetModel.deploy_type() == 'docker': if os.path.exists('/usr/local/udsafe/parent_bigdata_info'): node_list = os.popen("consul members | awk '{ print $1 }'").read().split('\n')[1:-1] for node in node_list: ssh_popen_1(node, del_param_name) os.system(del_param_name) for _list in data: os.system(write_db.format(_list['param_name'], json.dumps(_list['param_value']))) else: from app_projects.models.m_insert import insert_sqls, delete_sql if os.path.exists('/usr/local/udsafe/parent_bigdata_info'): delete_sql() insert_sqls(data) current_app.logger.info('写入数据配置开始') current_app.logger.info(data) current_app.logger.info('写入数据配置结束') q.put({"type": "大数据", "speed_p": "stop", "percentage": "100"})
def kvm_image_upload(): """ 上传panacube镜像 需要2.0的账号密码 :return: """ id2 = ClearAwCloud(GetModel.get_auth_header()).upload_image( PathDir.image_path(None), 'panacube3.0') q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(4, 15)) }) current_app.logger.info('upload kvm image: {}'.format(id2)) model.update_or_create('panacube_img_id', id2['id']) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(16, 32)) })
def create_cloud(): """ 创建云平台 :return: """ from app_projects.tools.image import kvm_image_upload q.put({"type": "云主机部署", "size": "上传大小", "speed": "start", "percentage": "{}".format(0)}) kvm_image_upload() from app_projects.tools.env_deploy import deploy_kvm deploy_kvm() global default_dict if 'bigData' in default_dict: push_bigdata_db() default_dict['cloudPlatform'] = 'yes' deployment_complete() w_kvm = { 'ip': GetModel.kvm_ip() } q.put({"type": "云主机部署", "size": "上传大小", "speed": "stop", "percentage": "{}".format(100)}) return json.dumps({"code": 0, 'message': 'Successful'})
def create_flavor(self): """创建规格""" CREATE_FLAVOR = "http://{}/awstack-resource/v1/flavor".format( GetModel.awcloud_ip()) q.put({"type": "云平台", "speed_p": "stop", "percentage": "49"}) resp = requests.post(CREATE_FLAVOR, data=json.dumps(flavor_info), headers=self.append_headers(), verify=False) js_data = resp.json() current_app.logger.info( 'create flavor return data: {}'.format(js_data)) if js_data['code'] == '02013201': d = requests.get("http://{}/awstack-resource/v1/flavors".format( GetModel.awcloud_ip()), headers=self.headers, verify=False) for i in d.json()['data']['data']: if i['name'] == 'panacube3.0': return {"id": i['id']} return js_data
def update_storage_image(): """ 更新智能存储镜像 :return: """ q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(1, 6)) }) if GetModel.deploy_type() == 'kvm': awcloud = ClearAwCloud(GetModel.get_auth_header()) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(17, 36)) }) image_obj = awcloud.upload_image(PathDir.image_path()) print '上传的镜像信息: %s' % image_obj image_id = image_obj['id'] q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(38, 52)) }) model.update_or_create('storage_img_id', image_id) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(57, 78)) }) f = l = 79 while True: resp = awcloud.update_image_info(image_id) f += 4 l += 10 if l >= 90: q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(f, l)) }) else: f -= 2 l -= 3 q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(f, l)) }) if resp.get('status') != 415: if resp['code'] == '0': current_app.logger.info( 'update storage image property successful') else: current_app.logger.info( 'update storage image property fail') break else: time.sleep(2) os.system("sh {set_tag} {openstack} {id}".format( set_tag=PathDir.tag_shell_path(), openstack=PathDir.openstack(), id=image_id)) else: awcloud = ClearAwCloud(GetModel.get_auth_header()) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "23" }) model.update_or_create('awcloud_ip', GetModel.awcloud_ip()) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "48" }) image_id = awcloud.upload_image(PathDir.image_path()).get('id') q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(56, 88)) }) model.update_or_create('storage_img_id', image_id) while True: resp = awcloud.update_image_info(image_id) if resp.get('status') != 415: if resp['code'] == '0': current_app.logger.info( 'update storage image property successful') else: current_app.logger.info( 'update storage image property fail') break else: time.sleep(2) os.system("sh {set_tag} {openstack} {id}".format( set_tag=PathDir.tag_shell_path(), openstack=PathDir.openstack(), id=image_id)) from app_projects.deploy.get_config import move_storage_code storage_code_name = move_storage_code() node_list = os.popen("consul members | awk '{ print $1 }'").read().split( '\n')[1:-1] for item in node_list: os.system("scp -r /var/deploy/%s %s:/usr/local/udsafe/%s" % (storage_code_name, item, storage_code_name)) q.put({ "type": "镜像上传", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(89, 96)) })
def deploy_kvm(update=False): """ 部署虚拟机 :return: """ print update, '正在执行操作' if update is False: panacube_ip = GetModel.kvm_ip() else: with open('/usr/local/udsafe/automatedkvm.txt', 'r') as f: panacube_ip = f.read() if update is False: awcloud_obj = ClearAwCloud(GetModel.get_auth_header()) q.put({ "type": "云主机部署", "speed": "conducting", "percentage": "{}".format(generate_random(33, 37)) }) res_data = awcloud_obj.run() # 创建云主机 if res_data['code'] != u'0': return json.dumps({"code": 1, 'message': res_data['message']}) # 替换配置文件ip if update is True: parent_speed = 98 _type = '云主机升级' else: parent_speed = 86 _type = '云主机部署' q.put({ "type": _type, "speed": "conducting", "percentage": "{}".format(generate_random(81, 86)) }) is_docker = get_cmd("cat {} | grep 'IS_DOCKER = ' | grep -v os".format( PathDir.local_settings())).split('=')[-1] os.system("""sed -i "s/{}/ {}/" {file}""".format( is_docker, "False", file=PathDir.local_settings())) saas_ip = get_cmd("cat {} | grep 'SAAS_IP = ' | grep -v os".format( PathDir.local_settings())).split('= ')[-1] os.system("""sed -i "s/{}/{}/" {file} | grep -v os""".format( saas_ip, "\\'{}\\'".format(GetModel.awcloud_ip()), file=PathDir.local_settings())) mysql_ip = get_cmd("cat {} | grep 'MYSQL_IP = ' | grep -v open".format( PathDir.local_settings())) os.system( """sed -i "s/{}/ MYSQL_IP = {}/" {file} | grep -v os""".format( mysql_ip, "\\'{}\\'".format(panacube_ip), file=PathDir.local_settings())) mysql_port = get_cmd("cat {} | grep 'MYSQL_PORT = ' | grep -v os".format( PathDir.local_settings())).split('= ')[-1] os.system("""sed -i "s/{}/{}/" {file}""".format( mysql_port, 3306, file=PathDir.local_settings())) if update is False: cloud_id = res_data.get('data').get('data')[0] f = l = 86 while True: if l >= 97: f -= 7 l -= 15 else: f += 7 l += 15 if l == f: f -= 3 d = generate_random(f, l) if d > parent_speed: parent_speed = d if parent_speed > 100: parent_speed = 98 q.put({ "type": "云主机部署", "speed": "conducting", "percentage": "{}".format(parent_speed) }) current_app.logger.info('正在等待虚拟机启动') if awcloud_obj.get_cloud_status(cloud_id) is True: current_app.logger.info('虚拟机启动成功') break time.sleep(3) chmod_id_rsa = 'chmod 600 {id_rsa}'.format(id_rsa=PathDir.panacube_idrsa()) os.system(chmod_id_rsa) # 添加mysql配置文件 # append_mariadb = mariadb_con.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip) # os.system(append_mariadb) copy_panacube = "scp -i {id_rsa} -r {project_path} root@{panacube_ip}:/home/udsafe/".format( id_rsa=PathDir.panacube_idrsa(), project_path=PathDir.project_path(), panacube_ip=panacube_ip) time.sleep(60) os.system(copy_panacube) # 重启mariadb os.system( restart_mariadb.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip)) # 更新数据库 os.system( update_db.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip, sql_name=config.get('pancube_sql')['sql_name'])) if 'TrueOS' in get_all_path()['bigdataPath']: gateway_system = 'FreeBSD' else: gateway_system = 'CentOS' # os.system(del_param_name.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip)) os.system( set_bigdata_type.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip, gateway_system=gateway_system)) print '更新3.0 packages安装包' os.system( update_pip_packages.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip)) print 'pip 安装结束' # 重启服务 os.system( restart_supervisord_all.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip)) # if update is False: # 更新数据库 # os.system(update_db.format(id_rsa=PathDir.panacube_idrsa(), panacube_ip=panacube_ip)) if update is False: # 添加消息推送 push_message() # 添加大数据配置 push_bigdata_db()
def update_cloud(): q.put({"type": "云主机升级", "size": "上传大小", "speed": "stop", "percentage": "{}".format(6)}) from app_projects.tools.env_deploy import deploy_kvm deploy_kvm(True) q.put({"type": "云主机升级", "size": "上传大小", "speed": "stop", "percentage": "{}".format(100)}) return json.dumps({"code": 0, 'message': 'Successful'})
def deploy_bigdata(): """ 上传lxc安装包,安装lxc环境 :d 网关 :y 预留集群ip数 :network_name 网卡名字 :netmask 子网掩码 :bigdata_size 存储大小 """ # 获取cpu总数 count_cpu = GetModel.cpu_count() panaocs_data = db_update_panaocs() node = GetModel.get_node() current_app.logger.info('node 节点信息 >>> {}'.format(node)) insert_node = {} deploy_path = PathDir.deploy() exclude_path = PathDir.exclude_list() q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(5, 13))}) _start = 14 _stop = 17 for item in node: # 将node节点数据写入数据库 insert_node[item['node-name']] = { "cpu_start": int(count_cpu) - int(item['cpu']) + 1, "mem": item['mem'], "cpu_stop": count_cpu } set_system_info(item['ip'], item['cpu'], item['mem']) q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 namenode = ssh_popen_1(item['ip'], "hostname") install_path = '/var/deploy/install.py' if namenode == "node-1": current_app.logger.info('节点一开始移动deploy到/var目录') from app_projects.deploy.get_config import move_file move_file() # if os.path.exists('/var/deploy'): # shutil.rmtree('/var/deploy') # shutil.copytree(deploy_path, '/var/deploy') # else: # shutil.copytree(deploy_path, '/var/deploy') shutil.copy(PathDir.install(), '/var/deploy') current_app.logger.info('node-1 项目copy完成') q.put({"type": "大数据", "size": "", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 else: current_app.logger.info("{node} move deploy to /var".format(node=item['node-name'])) os.system('rsync -av -e ssh {deploy} --exclude-from={exclude_path} {node_name}:/var/'.format( exclude_path=exclude_path, deploy='/var/deploy', node_name=item['node-name'] )) q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 current_app.logger.info( 'rsync -av -e ssh {deploy} --exclude-from={exclude_path} {node_name}:/var/'.format( exclude_path=exclude_path, deploy='/var/deploy', node_name=item['node-name'] ) ) current_app.logger.info(item['node-name'] + '>>>文件移动完成') q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 # install_cmd = 'sh /var/deploy/install.sh -d {namenode} {bigdata_size} {network_name} {ip} {netmask} {d} {y}' install_cmd = 'python {py_shell} {size} {network_name} {ip} {netmask} {geteway} {repo_ip} {pack_path}' dev_path = None if isinstance(item.get('dev_path'), list) and item.get('dev_path'): dev_path = ','.join(item.get('dev_path')) install_cmd = 'python {py_shell} {size} {network_name} {ip} {netmask} {geteway} {repo_ip} {pack_path} --disk={disk}' install_yum_or_create_network = install_cmd.format( py_shell=install_path, size=item.get('bigdata', None), network_name=item.get('network_name'), ip=item.get('network_ip'), # lxc网卡ip netmask=item.get('netmask'), disk=dev_path, geteway=panaocs_data.get('network_segment').replace('*', '1'), repo_ip=panaocs_data.get('network_segment').replace('*', str(panaocs_data.get('repo_ip'))), pack_path='/var/deploy' ) q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 current_app.logger.info('deploy bigdata install params: {}'.format(install_yum_or_create_network)) ssh_popen_1(item['ip'], "\cp /var/deploy/rsync.pass /usr/local/udsafe/") ssh_popen_1(item['ip'], "\cp /var/deploy/lxc-create-bigdata.py /usr/local/udsafe/") if namenode == "node-1": os.system(install_yum_or_create_network) else: os.system(ssh_node.format(item['ip'], install_yum_or_create_network)) q.put({"type": "大数据", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) _start += 4 _stop += 11 r = ssh_popen_1(item['ip'], "lxc-ls") if r == "repo": current_app.logger.info(""" —————————————————————————————————————————————————— | 已经有数据了 | —————————————————————————————————————————————————— """) current_app.logger.info('更新写入数据库的node数据 >>> {}'.format(insert_node)) model.update_or_create('node', insert_node) current_app.logger.info('update node information') q.put({"type": "大数据", "size": "上传大小", "speed": "start", "percentage": "{}".format(generate_random(_start, _start))}) from app_projects.deploy.get_config import copy_template copy_template() push_bigdata_db()
def run(self): q.put({ "type": "云主机部署", "percentage": "37", "speed": "conducting", "size": "上传大小" }) url = "http://{}/awstack-resource/v1/server".format( GetModel.awcloud_ip()) q.put({ "type": "云主机部署", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(37, 58)) }) self.create_project() flavor_id = self.create_flavor()['id'] q.put({ "type": "云主机部署", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(59, 71)) }) current_app.logger.info("flavor ID: {}".format(flavor_id)) server_info = { "admin_pass": "", "name": "panacube3", "hostname": "panacube3", "fixed_ip": GetModel.kvm_ip(), "count": 1, "network_id": GetModel.network_id(), "keypair_id": "", "use_local": False, "flavor": flavor_id, "security_id": self.get_security_group_id()['id'], "dataVolumes": [], "block_device_mapping": { "disk_bus": "" }, # 硬盘控制器映射 "os_type": "linux", "image_id": GetModel.image_id(None), # 镜像ID "volumeSize": 100, # 硬盘大小 "volume_type": self.get_volume_type()[0]['volumeTypeId'] # 硬盘ID } current_app.logger.info("create cloud info: {}".format(server_info)) resp = requests.post(url, data=json.dumps(server_info), headers=self.append_headers(), verify=False) current_app.logger.info('create cloud return data: {}'.format( resp.json())) if resp.json()['code'] == '02010303': raise ValueError("创建云主机所选规格不足") current_app.logger.info('创建云主机{}'.format(resp.json())) q.put({ "type": "云主机部署", "size": "上传大小", "speed": "conducting", "percentage": "{}".format(generate_random(72, 80)) }) with open('/usr/local/udsafe/automatedkvm.txt', 'w') as f: f.write(GetModel.kvm_ip()) return resp.json()