def get_nodes(ver="7", arch="x86_64", count=4): """ Utility function to get nodes from CICO infrastructure. Request given count of CentOS of particular version and architecture. CICO returns JSON, with hostnames of nodes provisioned. duffy.key is need for requesting nodes. Args: ver (str): Version of CentOS to be installed on node viz ["6", "7"]. Defaults to "7". arch (str): Architecture of CentOS to be install on node. Defaults to "x86_64". count (int): Number of CentOS nodes to be requested. Defaults to 4. Returns: List of hostnames received from CICO. Note: This function also appends DUFFY_SSID to a local file env.properties . Also prints the output received from CICO. """ out = run_cmd('export CICO_API_KEY=`cat ~/duffy.key` && ' 'cico node get --arch %s --release %s --count %s ' '--format json' % (arch, ver, count)) _print('Get nodes output: %s' % out) hosts = json.loads(out) with open('env.properties', 'a') as f: f.write('DUFFY_SSID=%s' % hosts[0]['comment']) f.close() return [host['hostname'] for host in hosts]
def get_nodes(ver="7", arch="x86_64", count=4): """ Utility function to get nodes from CICO infrastructure. Request given count of CentOS of particular version and architecture. CICO returns JSON, with hostnames of nodes provisioned. duffy.key is need for requesting nodes. Args: ver (str): Version of CentOS to be installed on node viz ["6", "7"]. Defaults to "7". arch (str): Architecture of CentOS to be install on node. Defaults to "x86_64". count (int): Number of CentOS nodes to be requested. Defaults to 4. Returns: List of hostnames received from CICO. Note: This function also appends DUFFY_SSID to a local file env.properties . Also prints the output received from CICO. """ out = run_cmd( 'export CICO_API_KEY=`cat ~/duffy.key` && ' 'cico node get --arch %s --release %s --count %s ' '--format json' % (arch, ver, count)) _print('Get nodes output: %s' % out) hosts = json.loads(out) with open('env.properties', 'a') as f: f.write('DUFFY_SSID=%s' % hosts[0]['comment']) f.close() return [host['hostname'] for host in hosts]
def get_nodes(ver="7", arch="x86_64", count=4): out = run_cmd( 'export CICO_API_KEY=`cat ~/duffy.key` && ' 'cico node get --arch %s --release %s --count %s ' '--format json' % (arch, ver, count)) _print('Get nodes output: %s' % out) hosts = json.loads(out) with open('env.properties', 'a') as f: f.write('DUFFY_SSID=%s' % hosts[0]['comment']) f.close() return [host['hostname'] for host in hosts]
def run_cmd(self, cmd, user=None, host=None, stream=False): """ Run command on local or remote machine (over SSH). Args: cmd (str): Command to execute user (str): Remote user to execute command as host (str): Remote host stream (bool): Whether to stream output or not Returns: Output string Raises: Exception if command execution fails """ host_info = self.hosts.get(self.node) return run_cmd(cmd, user=user or host_info['remote_user'], host=host or host_info['host'], private_key=host_info.get('private_key'), stream=stream)
def setup_controller(controller): # provision controller run_cmd( "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no " "~/.ssh/id_rsa root@%s:/root/.ssh/id_rsa" % controller ) run_cmd( "yum install -y git epel-release && " "yum install -y python-pip && " "yum install -y gcc libffi-devel python-devel openssl-devel && " "yum install -y python2-jenkins-job-builder && " "pip install ansible==2.1.1", host=controller) run_cmd( "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -r " "./ root@%s:/root/container-pipeline-service" % controller)
s = f.read() _print('\n'.join(s.splitlines()[3:])) if __name__ == '__main__': try: nodes = get_nodes(count=5) data = setup(nodes, options={ 'nfs_share': NFS_SHARE }) test(data) teardown() except Exception as e: _print('Build failed: %s' % e) _print(run_cmd('cat /srv/pipeline-logs/cccp.log', host=nodes[1])) if DEBUG: _print('Reserving nodes for debugging...') _print('=' * 10 + 'Node Info' + '=' * 10) print_nodes() try: _print('Sleeping for %s seconds for debugging...' % 7200) import time time.sleep(int(7200)) except Exception as e: _print(e) pass with open('env.properties', 'a') as f: f.write('\nBUILD_FAILED=true\n') sys.exit(1)
def run(): os.environ.pop('CCCP_CI_PROVISIONED', None) os.environ.pop('CCCP_CI_HOSTS', None) nodes = get_nodes(count=5) jenkins_master_host = nodes[0] jenkins_slave_host = nodes[1] openshift_host = nodes[2] scanner_host = nodes[3] controller = nodes.pop() nodes_env = ( "\nJENKINS_MASTER_HOST=%s\n" "JENKINS_SLAVE_HOST=%s\n" "OPENSHIFT_HOST=%s\n" "CONTROLLER=%s\n" "SCANNER_HOST=%s\n" ) % (jenkins_master_host, jenkins_slave_host, openshift_host, controller, scanner_host) with open('env.properties', 'a') as f: f.write(nodes_env) hosts_data = { 'openshift': { 'host': openshift_host, 'remote_user': '******' }, 'jenkins_master': { 'host': jenkins_master_host, 'remote_user': '******' }, 'jenkins_slave': { 'host': jenkins_slave_host, 'remote_user': '******' }, 'controller': { 'host': controller, 'user': '******', 'workdir': '/root/container-pipeline-service', # relative to this workdir 'inventory_path': 'hosts' } } _print(hosts_data) generate_ansible_inventory(jenkins_master_host, jenkins_slave_host, openshift_host, scanner_host) run_cmd('iptables -F', host=openshift_host) run_cmd('iptables -F', host=jenkins_slave_host) setup_controller(controller) provision(hosts_data['controller']) os.environ['CCCP_CI_PROVISIONED'] = "true" os.environ['CCCP_CI_HOSTS'] = json.dumps(hosts_data) run_cmd('~/venv/bin/nosetests ci/tests', stream=True) os.environ.pop('CCCP_CI_PROVISIONED', None) os.environ.pop('CCCP_CI_HOSTS', None)
def clean_nodes(): _print("Cleaning nodes") _print(run_cmd( 'export CICO_API_KEY=`cat ~/duffy.key` && ' 'cico node done %s' % DUFFY_SSID))
except Exception as e: _print('Build failed as pep8 checks failed tests failed.') _if_debug() sys.exit(1) try: # deploy service on given set of nodes # TODO: export deployment logs in a file data = setup(nodes, options={ 'nfs_share': NFS_SHARE }) except Exception as e: _print('Build failed during deployment:\n%s' % e) _if_debug() # first cat the deployment logs, nodes[4]=controller node _print(run_cmd('cat %s' % DEPLOY_LOGS_PATH, host=nodes[4])) sys.exit(1) try: # run cccp-index job and run test CI projects, nodes[0]=jenkins_master run_cccp_index_job(jenkins_master=nodes[0]) except Exception as e: _print("Error running cccp-index job and test builds:\n%s" % e) _if_debug() # then cat the cccp.log, nodes[1]=jenkins slave _print(run_cmd('cat /srv/pipeline-logs/cccp.log', host=nodes[1])) sys.exit(1) try: # run the given tests _print("Running the tests..")
if __name__ == '__main__': sys.path.append( os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) argc = len(sys.argv) if argc == 1 or argc > 1 and '-h' in sys.argv: print HELP_MESSAGE sys.exit(0) elif sys.argv[1] not in ('setup', 'test', 'teardown', 'sync'): print HELP_MESSAGE sys.exit(0) cmd = sys.argv[1] if cmd == 'setup': cmd_str = 'onevm show %s | grep -i eth0_ip | cut -d "\\"" -f 2' user = getpass.getuser() nodes = [ run_cmd(cmd_str % vmid, user=user).strip() for vmid in sys.argv[2:7] ] data = setup(nodes, options={'nfs_share': '/nfsshare'}) with open('test.json', 'w') as f: f.write(json.dumps(data)) elif cmd == 'test': test_path = ' '.join(sys.argv[2:]) if argc > 2 else '' with open('test.json') as f: data = json.load(f) test(data, test_path) elif cmd == 'teardown': teardown() elif cmd == 'sync': with open('test.json') as f: data = json.load(f)
def clean_nodes(): _print("Cleaning nodes") _print( run_cmd('export CICO_API_KEY=`cat ~/duffy.key` && ' 'cico node done %s' % DUFFY_SSID))
os.path.abspath( os.path.join(os.path.dirname(__file__), '..') ) ) argc = len(sys.argv) if argc == 1 or argc > 1 and '-h' in sys.argv: print HELP_MESSAGE sys.exit(0) elif sys.argv[1] not in ('setup', 'test', 'teardown', 'sync'): print HELP_MESSAGE sys.exit(0) cmd = sys.argv[1] if cmd == 'setup': cmd_str = 'onevm show %s | grep -i eth0_ip | cut -d "\\"" -f 2' user = getpass.getuser() nodes = [run_cmd(cmd_str % vmid, user=user).strip() for vmid in sys.argv[2:7]] data = setup(nodes, options={ 'nfs_share': '/nfsshare' }) with open('test.json', 'w') as f: f.write(json.dumps(data)) elif cmd == 'test': test_path = ' '.join(sys.argv[2:]) if argc > 2 else '' with open('test.json') as f: data = json.load(f) test(data, test_path) elif cmd == 'teardown': teardown() elif cmd == 'sync': with open('test.json') as f:
# run the pep8 checks on source code run_pep8_gate(nodes[4]) except Exception as e: _print('Build failed as pep8 checks failed tests failed.') _if_debug() sys.exit(1) try: # deploy service on given set of nodes # TODO: export deployment logs in a file data = setup(nodes, options={'nfs_share': NFS_SHARE}) except Exception as e: _print('Build failed during deployment:\n%s' % e) _if_debug() # first cat the deployment logs, nodes[4]=controller node _print(run_cmd('cat %s' % DEPLOY_LOGS_PATH, host=nodes[4])) sys.exit(1) try: # run cccp-index job and run test CI projects, nodes[0]=jenkins_master run_cccp_index_job(jenkins_master=nodes[0]) except Exception as e: _print("Error running cccp-index job and test builds:\n%s" % e) _if_debug() # then cat the cccp.log, nodes[1]=jenkins slave _print(run_cmd('cat /srv/pipeline-logs/cccp.log', host=nodes[1])) sys.exit(1) try: # run the given tests _print("Running the tests..")