def start_timeout(self, ip, minutes, username): deploy.sendfile(WARNING_PY, "/tmp/warning.py", ips=[ip]) warning_command = REMOTE_START_WARN.format( duration=(minutes - 5 if minutes > 5 else 1), username=username) kill_command = REMOTE_START_KILL.format(duration=minutes) deploy.deploy([warning_command], ips=[ip]) deploy.deploy([kill_command], ips=[ip])
def do_POST(self): """Handle post request on /""" if not str(self.path).startswith('/payload'): self._send_response() return content_length = int(self.headers['Content-Length']) request_body = self.rfile.read(content_length) try: form = cgi.FieldStorage( BytesIO(request_body), headers=self.headers, environ={ 'REQUEST_METHOD': 'POST', 'CONTENT_TYPE': self.headers['Content-Type'], }, ) except Exception as e: logger.info(f"Exception,\n" f"{e}") self._send_response() return payload_str = form.getvalue('payload') secret = environ['GITHUB_WEBHOOKS_SECRET'] github_sig = self.headers.get('X-Hub-Signature', '') deploy(github_sig, secret, request_body, payload_str) logger.info(f"POST request,\n" f"Path: {str(self.path)}\n" f"Headers:\n{str(self.headers)}" f"Keys:{form.keys()}\n" f"payload data:{form.getlist('payload')}\n") self._send_response('{"status": "success"}')
def dp(targets, conf_file): ''' Deploy the project. ''' if not targets: iprint('require targets!, exit', color='r') deploy.deploy(targets, conf_file)
def deploy_fn(body): response = None end_session = False directives = [{'type': 'Dialog.Delegate'}] req = body.get('request', {}) dialog_state = req.get('dialogState') intent_status = req.get('intent', {}).get('confirmationStatus') if dialog_state == 'COMPLETED': directives = [] end_session = True response = 'OK! You can ask me to deploy your app again sometime.' if intent_status == 'CONFIRMED': response = "Yeehaw! You're app is now being deployed." deploy( name='insert-app-name', path='insert-app-src-dir', image='insert-image-name', # Get values from "minikube docker-env". env={ 'DOCKER_TLS_VERIFY': '', 'DOCKER_HOST': '', 'DOCKER_CERT_PATH': '', 'DOCKER_API_VERSION': '', }, ) return respond(response, end_session=end_session, directives=directives)
def run(self): if os.path.exists(TEST_TARGET): # Previous run may have failed. os.system("rm -rf " + TEST_TARGET) os.mkdir(TEST_TARGET) # Note: set 'quiet' to True for debugging failures. deploy('test-disease', 'dev', os.path.join(os.getcwd(), TEST_TARGET), quiet=True) self.check(self.target_file_contains("index.html", "google-analytics"), "The deployed index file should contain analytics code") self.check(self.target_file_contains("index.html", "<body"), "The deployed index file should contain the basic HTML") self.check(self.target_file_exists(JAVASCRIPT_BUNDLE), "Javascript should get compiled as part of deployment.") self.check( not self.target_file_contains(JAVASCRIPT_BUNDLE, "debugger"), "Compiled JS should not contain 'debugger' statements.") self.check( not self.target_file_exists("js/main.js"), "Original Javascript files shouldn't be copied to " "the target.")
def run(): _parseArgs() cmd = args[ARG_CMD] workroot = args[ARG_WORKROOT] platform = args[ARG_PLATFORM] svnver = args[ARG_SVNVER] if svnver == None: import utsvn svnver = utsvn.getVersion(workroot) pkgusage = args[ARG_PKGUSAGE] download = False if args[ARG_DOWNLOAD] != None: download = args[ARG_DOWNLOAD].lower() == 'true' onlybuiltin = False if args[ARG_ONLYBUILTIN] != None: onlybuiltin = args[ARG_ONLYBUILTIN].lower() == 'true' onlybinary = False if args[ARG_ONLYBINARY] != None: onlybinary = args[ARG_ONLYBINARY].lower() == 'true' sendDingding = False if args[ARG_SEND_DINGDING] != None: sendDingding = args[ARG_SEND_DINGDING].lower() == 'true' pkgdesc = None if args[ARG_PKG_DESC] != None: pkgdesc = args[ARG_PKG_DESC] uabdesc = None if args[ARG_UAB_DESC] != None: uabdesc = args[ARG_UAB_DESC] print(cmd, workroot, pkgusage, platform, svnver, download, onlybuiltin, onlybinary) common.workroot = workroot common.platform = platform common.svnversion = svnver common.pkgusage = pkgusage if cmd == CMD_BUILD_BINARY: import binarybuild binarybuild.build() elif cmd == CMD_DEPLOY: import deploy deploy.deploy(download, onlybinary) elif cmd == CMD_COPY2STREAM: import copy2streaming copy2streaming.copy(onlybuiltin) elif cmd == CMD_POSTBUILD_PKG: import post_build_pkg post_build_pkg.build(sendDingding, pkgdesc) elif cmd == CMD_POSTBUILD_UAB: import post_build_uab post_build_uab.build(sendDingding, uabdesc)
def start_lightdm(self, ip, session): f = open("/tmp/lightdm.conf", "w") f.write(LIGHTDM_CONF.format(username=session['username'])) f.close() deploy.sendfile("/tmp/lightdm.conf", "/etc/lightdm/autologin.conf", ips=[ip]) deploy.deploy(REMOTE_START_SESSION, ips=[ip])
def deploy_fun(): iface = input("input interface name [eth0]:") try: if not len(iface): iface = "eth0" deploy(iface) except: print("Abort!")
def testCheckAllEntriesEntry(self): deploy.deploy(_TEST_CONFIG) test_key = 'testCheckAllEntriesEntry' test_db = ProvedDB(_TEST_CONFIG, 'json') for test_case in self.TEST_DATA[test_key]: test_db.create(test_case) self.assertEqual(test_db.check_all_entries(), True, 'There should pass the checking') test_db.delete(list(self.TEST_DATA[test_key][1])[0]) self.assertEqual(test_db.check_all_entries(), True, 'There should pass the checking')
def post(self): if self.request.remote_ip not in GitHub_POST_IPs: self.send_error(status_code=403) return self.finish() # is this necessary? payload = json_decode(self.get_argument('payload')) repo_name = payload['repository']['name'] deploy.deploy(repo_name)
def setup_deployment(config, repo): PYTHON_DIR = "/var/www/python" repo_dir = "{0}/guidcoin-{1}".format(PYTHON_DIR, config) with cd(PYTHON_DIR): run("git clone {0} guidcoin-{1}".format(repo, config)) with cd(repo_dir): run("virtualenv --system-site-packages venv") deploy(config)
def main(): elevate_to_admin() utils.log('Starting one_click_deploy') run_confirmation() options = configure.get_config_values() source.download_code(options) compile.compile(options) deploy.deploy(options) utils.log('Done!')
def deploy(self, destination, signing_key): self.check_constraints() node_groups = groups.NodeGroups(self._groups_path()) releases = [] for release_name in self.releases: release_path = self._release_path(release_name) bismark_release = release.open_bismark_release(release_path) releases.append(bismark_release) deploy.deploy(self._root, destination, signing_key, releases, self._experiments, node_groups)
def compact_with_list(names): names.sort(key=functools.cmp_to_key(bofxvi_compare)) datas = [] dirpath = "data" for name in names: times = name[:name.index(".")].split("-")[-3:] date = "{0}/{1} {2}:00".format(times[0], times[1], times[2]) print(date) with open(os.path.join(dirpath, name), encoding="utf-8") as f: r = csv.reader(f) for index, row in enumerate(r): if index > 0: datas.append(turn_data_into_target(row, date)) deploy.deploy(target_type, datas)
def on_shutdown(self, *args): self.get_widget("button_quit").set_sensitive(False) self.alive = True self.log("Starting to connect. %d addresses in range. Will try to ask all these to shut down." % len(deploy.REMOTE_IPS)) try: deploy.REMOTE_IPS.remove(self.external_ip) except: pass deploy.deploy(["poweroff"]) self.log("Contacted clients and asked them to shutdown. Please confirm manually by observing the computers.") self.get_widget("button_quit").set_sensitive(True)
def run(self): if os.path.exists(TEST_TARGET): # Previous run may have failed. os.system("rm -rf " + TEST_TARGET) os.mkdir(TEST_TARGET) # Note: set 'quiet' to True for debugging failures. deploy(TEST_TARGET, quiet=True) self.check(self.target_file_contains("index.html", "google-analytics"), "The deployed index file should contain analytics code") self.check(self.target_file_contains("index.html", "<body>"), "The deployed index file should contain the basic HTML") self.check( self.target_file_contains("location_info.data", "Berlin,DE"), "The location info file should contain geo information")
def getList(self): self.action = "sp" date = time.strftime("%m/%d %H:00", time.localtime()) file = time.strftime("%m-%d-%H", time.localtime()) response = requests.get(self.url_pattern.format(self.action,0,self.event_num)) response.encoding = 's_jisx0213' html = etree.HTML(response.text,etree.HTMLParser()) headers = html.xpath('//table/thead/*/th/text()') dataxmls = html.xpath('//table/tbody/tr') #print(dataxmls) datas = [] for data in dataxmls: temp = data.xpath('td//text()') temp.append(date) datas.append(temp) #print(datas) deploy.deploy(headers,datas,os.path.join(dir,self.event_title + "-" + file + ".csv")) #Get().getList()
def main(): # Get stage revision fd = open(os.path.join(STAGING, 'changeset')) changeset = fd.read() fd.close() os.chdir(REPOSITORY) # Update the repo to what the stage is currently running subprocess.check_call(['hg', 'up', '-r', changeset]) # Deploy that code deploy('PRODUCTION') # Update the database schema os.chdir(os.path.join( os.environ['REPOSITORY'],'masterapp', 'masterapp', 'model', 'manage') ) subprocess.check_call(['python', 'mysqlprodmgr.py', 'upgrade'])
def on_shutdown(self, *args): self.get_widget("button_quit").set_sensitive(False) self.alive = True self.log( "Starting to connect. %d addresses in range. Will try to ask all these to shut down." % len(deploy.REMOTE_IPS)) try: deploy.REMOTE_IPS.remove(self.external_ip) except: pass deploy.deploy(["poweroff"]) self.log( "Contacted clients and asked them to shutdown. Please confirm manually by observing the computers." ) self.get_widget("button_quit").set_sensitive(True)
def main(): library.register_log_handlers() library.convert_sigterm_to_exception() parser = argparse.ArgumentParser( description= 'Run irods_consortium_continuous_integration_build_hook.py on icat') parser.add_argument('--deployment_name', type=str, required=True) parser.add_argument('--zone_bundle_input', type=str, required=True) parser.add_argument('--version_to_packages_map', type=str, nargs='+', required=True) parser.add_argument( '--install_dev_package', type=library.make_argparse_true_or_false('--install_dev_package'), required=True) parser.add_argument('--git_repository', type=str, required=True) parser.add_argument('--git_commitish', type=str, required=True) parser.add_argument('--python_script', type=str, required=True) parser.add_argument('--leak_vms', type=library.make_argparse_true_or_false('--leak_vms'), required=True) parser.add_argument('--passthrough_arguments', default=[], nargs=argparse.REMAINDER) args = parser.parse_args() version_to_packages_map = list_to_dict(args.version_to_packages_map) passthru_args_dict = list_to_dict(args.passthrough_arguments) mungefs_packages_dir = 'None' if '--mungefs_packages_dir' in passthru_args_dict: mungefs_packages_dir = passthru_args_dict['--mungefs_packages_dir'] with open(args.zone_bundle_input) as f: zone_bundle = json.load(f) deployed_zone_bundle = deploy.deploy( zone_bundle, args.deployment_name, version_to_packages_map, mungefs_packages_dir, install_dev_package=args.install_dev_package) with destroy.deployed_zone_bundle_manager( deployed_zone_bundle, on_exception=not args.leak_vms, on_regular_exit=not args.leak_vms): ansible_result = checkout_git_repo_and_run_python_script_on_icat( deployed_zone_bundle, args.git_repository, args.git_commitish, args.python_script, args.passthrough_arguments) if library.ansible_run_failed(ansible_result): sys.exit(1)
def main(): library.register_log_handlers() library.convert_sigterm_to_exception() parser = argparse.ArgumentParser( description='Run tests on resource server') parser.add_argument('--deployment_name', type=str, required=True) parser.add_argument('--zone_bundle_input', type=str, required=True) parser.add_argument('--version_to_packages_map', type=str, nargs='+', required=True) parser.add_argument( '--install_dev_package', type=library.make_argparse_true_or_false('--install_dev_package'), required=True) parser.add_argument('--leak_vms', type=library.make_argparse_true_or_false('--leak_vms'), required=True) parser.add_argument('--test_type', type=str, required=True, choices=[ 'standalone_icat', 'topology_icat', 'topology_resource', 'federation' ]) parser.add_argument('--irods_plugin_packages_directory', type=str, required=True) parser.add_argument('--mungefs_packages_dir', type=str) parser.add_argument('--output_directory', type=str, required=True) args = parser.parse_args() version_to_packages_map = list_to_dict(args.version_to_packages_map) with open(args.zone_bundle_input) as f: zone_bundle = json.load(f) deployed_zone_bundle = deploy.deploy( zone_bundle, args.deployment_name, version_to_packages_map, args.mungefs_packages_dir, install_dev_package=args.install_dev_package) with destroy.deployed_zone_bundle_manager( deployed_zone_bundle, on_exception=not args.leak_vms, on_regular_exit=not args.leak_vms): install_plugin_and_run_tests(deployed_zone_bundle, args.irods_plugin_packages_directory, args.test_type, args.output_directory)
def main(): azure_credentials_ml = os.environ.get("INPUT_AZURE_CREDENTIALS_ML", default={}) azure_credentials_common = os.environ.get("INPUT_AZURE_CREDENTIALS_COMMON", default={}) train_action = os.environ.get('INPUT_TRAIN') deploy_action = os.environ.get('INPUT_DEPLOY') repo = os.environ.get('REPOSITORY_NAME') model_version = os.environ.get("INPUT_MODEL_VERSION", default=None) try: #azure_credentials_ml = json.loads(azure_credentials_ml) azure_credentials_common = json.loads(azure_credentials_common) except JSONDecodeError: print( "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS" ) tenant_id = azure_credentials_common['tenantId'] app_id = azure_credentials_common['clientId'] app_secret = azure_credentials_common['clientSecret'] subscription_id = azure_credentials_common['subscriptionId'] os.environ['ARM_CLIENT_ID'] = app_id os.environ['ARM_CLIENT_SECRET'] = app_secret os.environ['ARM_SUBSCRIPTION_ID'] = subscription_id os.environ['ARM_TENANT_ID'] = tenant_id github_password = os.environ.get('INPUT_GITHUB_PASSWORD') print(f'::debug::Train Action is {train_action}') if train_action == 'yes': os.system( f'/app/shell/deploy.sh {tenant_id} {app_id} {app_secret} {subscription_id} {repo} {model_version} {github_password}' ) os.system('/app/shell/destroy.sh') print(f'::debug::Deploy Action is {deploy_action}') if deploy_action == 'yes': deploy()
def test_deploy(mocker): """Tests deploy.deploy""" version_return = subprocess.CompletedProcess([], returncode=0, stdout=b'version') mocker.patch.dict( os.environ, { deploy.CIRCLECI_ENV_VAR: '1', 'CIRCLE_BRANCH': 'master', 'PROD_PYPI_USERNAME': '******', 'PROD_PYPI_PASSWORD': '******' }) mocker.patch('os.listdir', return_value=[]) mock_shell = mocker.patch('deploy._shell', autospec=True, side_effect=[ None, None, None, version_return, None, None, None, None, None, None, None ]) deploy.deploy('PROD') assert mock_shell.call_args_list == [ # NOTE: This git config call would normally have its value rendered by cookiecutter mocker.call( 'git config --global user.email "{{cookiecutter.support_email}}"'), mocker.call('git config --global user.name "Circle CI"'), mocker.call('git config push.default current'), mocker.call('make version', stdout=subprocess.PIPE), mocker.call('git tag -f -a version -m "Version version"'), mocker.call( 'sed -i.bak "s/^__version__ = .*/__version__ = \'version\'/" */version.py' ), mocker.call('python setup.py sdist bdist_wheel'), mocker.call('git add ChangeLog AUTHORS */version.py'), mocker.call( 'git commit --no-verify -m "Merge autogenerated files [skip ci]"'), mocker.call("twine upload 'dist/*'"), mocker.call('git push --follow-tags'), ]
def compact(Event="BOFXV"): print("Compacting {0}...".format(Event)) names = [] datas = [] for dirpath, dirnames, filenames in os.walk('data'): if dirpath.startswith('data'): for fn in filenames: if fn.startswith(Event): names.append(fn) dirpath = 'data' names.sort(key=functools.cmp_to_key(bofxvi_compare)) names = filter_log_list(dirpath, names) for name in names: times = name[:name.index(".")].split("-")[-3:] date = "{0}/{1} {2}:00".format(times[0], times[1], times[2]) print(date) with open(os.path.join(dirpath, name), encoding="utf-8") as f: r = csv.reader(f) for index, row in enumerate(r): if index > 0: datas.append(turn_data_into_target(row, date)) deploy.deploy(target_type, datas) print("Compact Over")
def main(): library.register_log_handlers() library.convert_sigterm_to_exception() parser = argparse.ArgumentParser(description='Run topology upgrade tests on resource server') parser.add_argument('--deployment_name', type=str, required=True) parser.add_argument('--zone_bundle_input', type=str, required=True) parser.add_argument('--version_to_packages_map', type=str, nargs='+', required=True) parser.add_argument('--mungefs_packages_root_dir', type=str, required=False, default=None) parser.add_argument('--leak_vms', type=library.make_argparse_true_or_false('--leak_vms'), required=True) parser.add_argument('--use_ssl', action='store_true') parser.add_argument('--upgrade_packages_root_directory', type=str, required=True) parser.add_argument('--test_type', type=str, required=True, choices=['standalone_icat', 'topology_icat', 'topology_resource', 'federation']) parser.add_argument('--output_directory', type=str, required=True) args = parser.parse_args() version_to_packages_map = list_to_dict(args.version_to_packages_map) if not args.output_directory: args.zone_bundle_output with open(args.zone_bundle_input) as f: zone_bundle = json.load(f) zone_bundle_name = args.deployment_name + '.json' zone_bundle_output = os.path.join(args.output_directory, zone_bundle_name) deployed_zone_bundle = deploy.deploy(zone_bundle, args.deployment_name, version_to_packages_map, args.mungefs_packages_root_dir, zone_bundle_output) with destroy.deployed_zone_bundle_manager(deployed_zone_bundle, on_exception=not args.leak_vms, on_regular_exit=not args.leak_vms): upgrade.upgrade(deployed_zone_bundle, args.upgrade_packages_root_directory) if args.use_ssl: enable_ssl.enable_ssl(deployed_zone_bundle) tests_passed = test.test(deployed_zone_bundle, args.test_type, args.use_ssl, False, args.output_directory) gather.gather(deployed_zone_bundle, args.output_directory) if not tests_passed: sys.exit(1)
#!/usr/bin/python import datetime import os import subprocess, shlex import re import sys import time from deploy import deploy, sendfile if __name__ == '__main__': COMMANDS = ["""dpkg --configure -a && apt-get remove -y {pkg:s}""".format(pkg=sys.argv[1]),] print "Going to run:\n\n" print "\n".join(COMMANDS) print "\n\n" print "MAKE SURE ALL CLIENT COMPUTERS ARE TURNED ON!" print "In 10s this script will start to ask all clients to download and rerun the postinstallation." time.sleep(10) deploy(COMMANDS)
import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web from app import create_app from test import Test from deploy import deploy cmd = sys.argv[1] if cmd == 'deploy': tornado.options.define("server", default="", type=str, help="服务器域名") tornado.options.parse_command_line(sys.argv[1:]) server = tornado.options.options.server deploy(server) elif cmd == 'runserver': tornado.options.define( "mode", default="develop", help="以何种方式运行(develop/test/product), 默认为 develop") tornado.options.define( "port", default=8000, type=int, help="侦听端口, 默认为 8000") tornado.options.parse_command_line(sys.argv[1:]) if __name__ == "__main__": def sig_handler(sig, frame): logging.warning('Caught signal: %s', sig) tornado.ioloop.IOLoop.instance().add_callback(shutdown)
#!/usr/bin/python import datetime import os import subprocess, shlex import re import sys import time from deploy import deploy, sendfile if __name__ == '__main__': print "MAKE SURE ALL CLIENT COMPUTERS ARE TURNED ON!" print "In 5s this script will start sending a printer configuration file to alle clients." time.sleep(5) sendfile("printers.conf", "/etc/cups/printers.conf") sendfile("printer-1.ppd", "/etc/cups/ppd/printer-1.ppd") deploy(["/etc/init.d/cups restart"])
"tag" : "ex_askusingform", "src" : ["py_askusingform.py"], "tgt" : "../examples/ex_askusingform.py" }, "ex_cli example" : { "tag" : "ex_cli_ex1", "src" : ["py_cli.py"], "tgt" : "../examples/ex_cli.py" }, "ex_expr example" : { "tag" : "ex_expr", "src" : ["py_expr.py"], "tgt" : "../examples/ex_expr.py" }, "ex_custview.py example" : { "tag" : "py_custviewerex1", "src" : ["py_custview.py"], "tgt" : "../examples/ex_custview.py" } } import deploy for name in deploys: data = deploys[name] print "Deploying %s" % name deploy.deploy(data["tag"], data["src"], data["tgt"])
model = model.cuda() if args.type != "deploy": print(args) name = args.model + "_" + args.type + '_' + args.loss + '_' + \ str (args.size [0]) + '_' + str (args.dilation) + str (args.erosion) # weight0_str = str (args.weight [0]) # if (len (weight0_str) > 5): # weight0_str = weight0_str [0:5] # name = name + '_' + weight0_str print('Training: ', args.type, '\tloss: ' + args.loss) if args.weight is not None: print('weight: ', args.weight) print('dilation: ', args.dilation, '\terosion: ', args.erosion) print('output size: ', args.size) train(model, args, name) else: print("Deploy!\n") assert (args.load != None) if args.save_path is None: if args.data_type != "train": args.data_path = "../Data/test/A/test-input.tif" args.save_path = "../Data/test/deploy/" + args.name + ".tif" else: args.data_path = "../Data/train/A/train-input.tif" args.save_path = "../Data/train/deploy/" + args.name + ".tif" deploy(model, args)
parser.add_argument('--version_to_packages_map', type=str, required=True, nargs='+') parser.add_argument('--mungefs_packages_root_dir', type=str, required=False, default=None) parser.add_argument('--test_type', type=str, required=True, choices=['standalone_icat', 'topology_icat', 'topology_resource', 'federation']) parser.add_argument('--use_ssl', action='store_true') parser.add_argument('--use_mungefs', action='store_true') parser.add_argument('--upgrade_test', nargs='+') parser.add_argument('--leak_vms', type=library.make_argparse_true_or_false('--leak_vms'), required=False) parser.add_argument('--output_directory', type=str, required=True) args = parser.parse_args() version_to_packages_map = list_to_dict(args.version_to_packages_map) with open(args.zone_bundle_input) as f: zone_bundle = json.load(f) zone_bundle_output = os.path.join(args.output_directory, 'deployed_zone_bundle.json') deployed_zone_bundle = deploy.deploy(zone_bundle, args.deployment_name, version_to_packages_map, args.mungefs_packages_root_dir, zone_bundle_output) with destroy.deployed_zone_bundle_manager(deployed_zone_bundle, on_exception=not args.leak_vms, on_regular_exit=not args.leak_vms): if args.upgrade_test: for pd in args.upgrade_test: upgrade.upgrade(deployed_zone_bundle, pd) if args.use_ssl: enable_ssl.enable_ssl(deployed_zone_bundle) tests_passed = test.test(deployed_zone_bundle, args.test_type, args.use_ssl, args.use_mungefs, args.output_directory) gather.gather(deployed_zone_bundle, args.output_directory) if not tests_passed: sys.exit(1)
def setUpClass(cls): deploy.deploy(_TEST_CONFIG)
parser.add_argument('--leak_vms', type=library.make_argparse_true_or_false('--leak_vms'), required=False) parser.add_argument('--output_directory', type=str, required=True) args = parser.parse_args() version_to_packages_map = list_to_dict(args.version_to_packages_map) with open(args.zone_bundle_input) as f: zone_bundle = json.load(f) zone_bundle_output = os.path.join(args.output_directory, 'deployed_zone_bundle.json') deployed_zone_bundle = deploy.deploy(zone_bundle, args.deployment_name, version_to_packages_map, args.mungefs_packages_root_dir, zone_bundle_output) with destroy.deployed_zone_bundle_manager( deployed_zone_bundle, on_exception=not args.leak_vms, on_regular_exit=not args.leak_vms): if args.upgrade_test: for pd in args.upgrade_test: upgrade.upgrade(deployed_zone_bundle, pd) if args.use_ssl: enable_ssl.enable_ssl(deployed_zone_bundle) tests_passed = test.test(deployed_zone_bundle, args.test_type, args.use_ssl, args.use_mungefs, args.output_directory) gather.gather(deployed_zone_bundle, args.output_directory)
#!/usr/bin/python import datetime import os import subprocess, shlex import re import sys import time from deploy import deploy, sendfile if __name__ == '__main__': COMMANDS = [ """poweroff""", ] print "MAKE SURE ALL CLIENT COMPUTERS ARE TURNED ON!" print "In 20s this script will start to ask all clients to shutdown!" time.sleep(20) deploy(COMMANDS)
def kill_timeout(self, ip): deploy.sendfile("/tmp/lightdm.conf", "/etc/lightdm/autologin.conf", ips=[ip]) deploy.deploy([REMOTE_STOP_KILL, REMOTE_STOP_WARN], ips=[ip])
import requests from deploy import deploy import json from sklearn.preprocessing import MinMaxScaler pip_packages = ['pandas==1.1.5', 'azureml-defaults', 'joblib==0.17.0'] conda_packages = ['scikit-learn==0.23.2'] to_deploy = False if to_deploy: url = deploy(ws_name='myworkspace', model_name='knnClassifierModel', path_to_model='model/knn.pkl', environment_name='env', register_environment=True, pip_packages=pip_packages, conda_packages=conda_packages, cpu_cores=1, memory_gb=1, path_to_entry_script='score.py', service_name='test_deploy_service') else: # replace with endpoint url = '' scaler = MinMaxScaler() data = {'data': scaler.fit_transform([[0.9, 0.1, 0.1, 0.9]]).tolist()} headers = {'Content-Type': 'application/json'} r = requests.post(url, str.encode(json.dumps(data)), headers=headers) print(r.status_code) print(r.json())
def kill_lightdm(self, ip): self.kill_timeout(ip) deploy.deploy(REMOTE_STOP_SESSION, ips=[ip])
REPOPATH = os.path.join(os.environ['REPOSITORY'], 'masterapp') #Change to repository os.chdir(REPOPATH) #Update repository subprocess.check_call(['hg', 'up']) #Update compressed javascript os.chdir('./helpers') sys.path.append('.') import compressor compressor.main() #Install changes in stage server deploy('STAGING', '-d' in sys.argv or '--debug' in sys.argv) #Update database schema os.chdir(os.path.join( os.environ['REPOSITORY'], 'masterapp', 'masterapp', 'model', 'manage') ) subprocess.check_call(['python', 'mysqlmgr.py', 'upgrade']) #Record the current version. repo = localrepository(ui(), os.environ['REPOSITORY']) fd = open(os.path.join(os.environ['STAGING'], 'changeset'), 'w') fd.write(str(repo.changectx())) fd.close()
def start_timeout(self, ip, minutes, username): deploy.sendfile(WARNING_PY, "/tmp/warning.py", ips=[ip]) warning_command = REMOTE_START_WARN.format(duration=(minutes-5 if minutes > 5 else 1), username=username) kill_command = REMOTE_START_KILL.format(duration=minutes) deploy.deploy([warning_command], ips=[ip]) deploy.deploy([kill_command], ips=[ip])
for prefix in prefix_list: futures.append(executor.submit(sc.work, prefix)) wait(futures) get_script_time = time.time() print('end -', get_script_time - start_time, 'sec') """ Preprocessing """ start_time = time.time() os.makedirs('score', exist_ok=True) os.makedirs('norm', exist_ok=True) os.makedirs('graph', exist_ok=True) filepath_list = os.listdir('raw_script') for filepath in filepath_list: preprocessing.work(filepath, image=False) end_time = time.time() print(end_time - start_time, 'sec') """ keras model configure """ os.makedirs('training', exist_ok=True) os.makedirs('test', exist_ok=True) os.makedirs('deploy', exist_ok=True) os.makedirs('deploy_graph', exist_ok=True) ml.classify_data() # classify training, test, deploy data model = ml.random_search() deploy.deploy(model, image=False)
"tools/wavtrim.c", "tools/sapi_voice.vbs" ] deploy.useupx = False deploy.bundlecopy = { "icons/rbutilqt.icns": "Contents/Resources/", "Info.plist": "Contents/" } deploy.progexe = { "win32": "release/RockboxUtility.exe", "darwin": "RockboxUtility.app", "linux2": "RockboxUtility" } deploy.regreplace = { "rbutil/rbutilqt/version.h": [["SVN \$.*\$", "SVN r%REVISION%"], ["(^#define BUILDID).*", "\\1 \"-%BUILDID%\""]], "rbutil/rbutilqt/Info.plist": [["SVN \$.*\$", "SVN r%REVISION%"]], } # OS X 10.6 defaults to gcc 4.2. Building universal binaries that are # compatible with 10.4 requires using gcc-4.0. deploy.qmakespec = {"win32": "", "darwin": "macx-g++40", "linux2": ""} deploy.make = {"win32": "mingw32-make", "darwin": "make", "linux2": "make"} # all files of the program. Will get put into an archive after building # (zip on w32, tar.bz2 on Linux). Does not apply on Mac which uses dmg. # progexe will get added automatically. deploy.programfiles = [] deploy.nsisscript = "" deploy.deploy()
# 4 - create site site01 = site("tomcat-01", "PRO", "server01", "http://svn.server.local/trunk/", "/usr/local/tomcat/tomcat01/webapps", "0.0.0.0", "8080", "http://app01.server01.local") #def __init__(self, aplicacion, entorno, ubicacion, svn_path, path, ip, puerto, url): # 5 - create backup #def __init__(self, site, path): backup01 = backup("tomcat-01", "/var/backups/tomcat-01/") # 6 - create deploy #def __init__(self, site, entorno, backup_id): deploy01 = deploy("tomcat-01", "PRO", backup01.backup_id) # 6 - add data to deploy deploy01.ejecutor = "Daniel" # persist data session.add(site01) session.add(backup01) session.add(deploy01) # 10 - commit and close session session.commit() session.close()
deploy.progexe = { "win32" : "release/RockboxUtility.exe", "darwin" : "RockboxUtility.app", "linux2" : "RockboxUtility" } deploy.regreplace = { "rbutil/rbutilqt/version.h" : [["\$Rev\$", "%REVISION%"], ["(^#define BUILDID).*", "\\1 \"%BUILDID%\""]], "rbutil/rbutilqt/Info.plist" : [["\$Rev\$", "%REVISION%"]], } # OS X 10.6 defaults to gcc 4.2. Building universal binaries that are # compatible with 10.4 requires using gcc-4.0. deploy.qmakespec = { "win32" : "", "darwin" : "macx-g++40", "linux2" : "" } deploy.make = { "win32" : "mingw32-make", "darwin" : "make", "linux2" : "make" } # all files of the program. Will get put into an archive after building # (zip on w32, tar.bz2 on Linux). Does not apply on Mac which uses dmg. # progexe will get added automatically. deploy.programfiles = [ ] deploy.nsisscript = "" deploy.deploy()
"src": ["py_askusingform.py"], "tgt": "../../formchooser/formchooser.py" }, "ex_askusingform": { "tag": "ex_askusingform", "src": ["py_askusingform.py"], "tgt": "../examples/ex_askusingform.py" }, "ex_cli example": { "tag": "ex_cli_ex1", "src": ["py_cli.py"], "tgt": "../examples/ex_cli.py" }, "ex_expr example": { "tag": "ex_expr", "src": ["py_expr.py"], "tgt": "../examples/ex_expr.py" }, "ex_custview.py example": { "tag": "py_custviewerex1", "src": ["py_custview.py"], "tgt": "../examples/ex_custview.py" } } import deploy for name in deploys: data = deploys[name] print "Deploying %s" % name deploy.deploy(data["tag"], data["src"], data["tgt"])
# 根据区块哈希,获取区块的详情 @app.route('/query_info/block_detail', methods=['GET']) def get_block_detail(): blockHash = request.args.get('blockHash', '') txresponse = get_block_detail_data(blockHash) return json.dumps(txresponse) # 根据交易哈希,获取交易详情和交易回执 @app.route('/query_info/transaction_detail', methods=['GET']) def get_transaction_detail(): transactionHash = request.args.get('transactionHash', '') result = get_transaction_detail_data(transactionHash) return json.dumps(result) # 上传json格式数据,发送交易上链 @app.route('/sendTrans/rawTrans', methods=['POST']) def send_transaction(): requestData = json.loads(request.get_data().decode()) txhash = send_transaction_get_txhash(requestData) # print("receipt:",receipt) return txhash if __name__ == "__main__": deploy() app.run(port=5555, debug=True, host="0.0.0.0")
def setUp(self): self.object = deploy() self.config = config()