def runSetup(): print("Initiating setup.....") try: import setup setup.run() #import gps except: print("Run setup.....[Fail]")
def main(): # Parse arguments parser = argparse.ArgumentParser( description='Script to setup experiment dataset') parser.add_argument('--sqlite-path', required=True, help='The path to the SQLite database') parser.add_argument( '--num-roots', default=1000, help= 'The number of root packages (sorted by number of downloads). Default = 1000', type=int) args = parser.parse_args() setup.run(args)
def dailyBuild(fmatvecBranch, hdf5serieBranch, openmbvBranch, mbsimBranch, ext): # linux64-dailydebug contldd = setup.run("build-linux64-dailydebug" + ext, 6, printLog=False, detach=True, addCommands=["--forceBuild"], fmatvecBranch=fmatvecBranch, hdf5serieBranch=hdf5serieBranch, openmbvBranch=openmbvBranch, mbsimBranch=mbsimBranch) # win64-dailyrelease contwdr = setup.run("build-win64-dailyrelease" + ext, 3, printLog=False, detach=True, addCommands=["--forceBuild"], fmatvecBranch=fmatvecBranch, hdf5serieBranch=hdf5serieBranch, openmbvBranch=openmbvBranch, mbsimBranch=mbsimBranch) retwdr = setup.waitContainer(contwdr) # linux64-dailyrelease contldr = setup.run("build-linux64-dailyrelease" + ext, 3, printLog=False, detach=True, addCommands=["--forceBuild"], fmatvecBranch=fmatvecBranch, hdf5serieBranch=hdf5serieBranch, openmbvBranch=openmbvBranch, mbsimBranch=mbsimBranch) retldr = setup.waitContainer(contldr) retldd = setup.waitContainer(contldd) # return return 0 if retldd == 0 and retldr == 0 and retwdr == 0 else 1
# Init the loading screen / get test case data gui = GUI() fname = gui.initInputFile() # Processing and get setting board_size, list_bomb = reader(fname) board = generate_board(board_size, list_bomb) facts = generate_facts(board_size, list_bomb) gui.set_size(board_size) gui.set_board(board) # Run the Knowledge Based System # Parse Result history, logs = run(CLP_FILE, facts, board_size) for log in logs: pprint(log) init = [[-1 for i in range(board_size)] for i in range(board_size)] init[0][0] = 0 history.insert(0, init) logs.insert(0, ["START"]) # render the gui gui.render() history.append([[0 for i in range(board_size)] for i in range(board_size)]) logs.append(["DONE"]) position = 0 gui.update(history[position], logs[position]) while True:
# slipstream credentials into config file from local.include import load_with_includes passthrough = ['node_access_key', 'node_secret_key', 'bucket', 'egg_file_name', 'package_name'] config = load_with_includes('vmesh-config.txt', passthrough=passthrough) if local.get_arg('config_only'): print config sys.exit(0) if local.get_arg('local'): ret = launch_local(config) sys.exit(ret) # build the egg old_argv = sys.argv # trick setup into thinking it was executed at the command line sys.argv = ['setup.py', '-q', 'bdist_egg'] dist = setup.run() egg_path = os.path.join('dist', dist.get_fullname()) egg_path += '-py%d.%d.egg' % (sys.version_info[0], sys.version_info[1]) # upload the egg b = local.s3.get_bucket(bucketname) if not b: b = local.s3.create_bucket(bucketname) k = b.get_key(eggfilename) if not k: k = b.new_key(eggfilename) print 'Uploading %s as s3:%s/%s' % (egg_path, bucketname, eggfilename) def report_progress(at, total): print '\r%d%%' % ((at/total)*100), sys.stdout.flush()
assert lux.VERSION[3] == 'final' with open('CHANGELOG.rst', 'r') as f: changelog = f.read() top = changelog.split('\n')[0] version_date = top.split(' - ') assert len(version_date) == 2, 'Top of CHANGELOG.rst must be version and date' version, datestr = version_date dt = datetime.strptime(datestr, '%Y-%b-%d').date() assert dt == date.today() assert version == 'Ver. %s' % lux.__version__ with open('package.json', 'r') as f: pkg = json.loads(f.read()) pkg['version'] = lux.__version__ pkg['description'] = lux.__doc__ with open('package.json', 'w') as f: f.write(json.dumps(pkg, indent=4)) # Run setup.py script = os.path.abspath(setup.__file__) argv = [script, 'sdist'] + sys.argv[1:] setup.run(argv=argv) print('%s %s ready!' % (pkg['name'], lux.__version__))
def test_sudo(self): self.assertEquals(run('sudo -v'), 0)
def main(port): import setup setup.run(app) app.run('0.0.0.0', port, debug=True)
def init(): BaseNode.metadata.create_all(bind=engine) import setup setup.run()
import setup with setup.run(":memory:") as c: for x in c.execute("select id, name from xs").fetchall(): for t in c.execute("select y_id, memo from ts where x_id = ?", (x["id"], )).fetchall(): for y in c.execute("select id, name from ys where id = ?", (t["y_id"], )).fetchall(): print("@", x["id"], y["id"], x["name"], y["name"], t["memo"])
return 0 if retldd == 0 and retldr == 0 and retwdr == 0 else 1 ret = 0 # build the master branch combi first (if it exists) if service.models.DailyBranches.objects.filter( fmatvecBranch="master", hdf5serieBranch="master", openmbvBranch="master", mbsimBranch="master").count() > 0: ret = ret + dailyBuild("master", "master", "master", "master", "") # build doc contd = setup.run("builddoc", 2, printLog=False, detach=True, addCommands=["--forceBuild"]) ret = ret + abs(setup.waitContainer(contd)) # now build all others for db in service.models.DailyBranches.objects.all(): # skip the master branch combi (already done first, see above) if db.fmatvecBranch == "master" and db.hdf5serieBranch == "master" and db.openmbvBranch == "master" and db.mbsimBranch == "master": continue # build the db branch combi ret = ret + dailyBuild(db.fmatvecBranch, db.hdf5serieBranch, db.openmbvBranch, db.mbsimBranch, "-nonedefbranches") sys.exit(ret)
ciq.openmbvBranch is not None and ciq.openmbvBranch!="" and \ ciq.mbsimBranch is not None and ciq.mbsimBranch!="": print("Found something to build: "+ciq.fmatvecBranch+", "+ciq.hdf5serieBranch+", "+\ ciq.openmbvBranch+", "+ciq.mbsimBranch+" starting in, at most, "+str(waitTime)) sys.stdout.flush() if django.utils.timezone.now() - ciq.recTime > waitTime: print("Start build: "+ciq.fmatvecBranch+", "+ciq.hdf5serieBranch+", "+\ ciq.openmbvBranch+", "+ciq.mbsimBranch) sys.stdout.flush() # run linux64-ci ciq.delete() enforceConfigure = False # if this is set to True the CI build enforces a configure run which is sometime required. ret = setup.run("build-linux64-ci", args.jobs, printLog=False, enforceConfigure=enforceConfigure, fmatvecBranch=ciq.fmatvecBranch, hdf5serieBranch=ciq.hdf5serieBranch, openmbvBranch=ciq.openmbvBranch, mbsimBranch=ciq.mbsimBranch) sys.exit(ret) elif os.environ[ "MBSIMENVTAGNAME"] == "staging" and ciq.buildCommitID is not None and ciq.buildCommitID != "": print("Start build of staging build-system: " + ciq.buildCommitID) sys.stdout.flush() # run rebuild build-system ciq.delete() ret = setup.run("builddocker", args.jobs, printLog=False, builddockerBranch=ciq.buildCommitID) sys.exit(ret)
def main(PORT, debug): import setup setup.run(app) app.run('0.0.0.0', PORT, debug=debug)
def __init__(self, cfg): setup.run(self, cfg) # Initializes necessary variables. self.proteinDB = [] # Archive of all the scores evaluated
def test_for_retrieve_error_code(self): try: run('git add') except InvalidCommand, e: self.assertEquals(e.message, "Failure on command: git add")
from flask import Flask app = Flask(__name__) import setup setup.run(app) app.run('0.0.0.0', '8443', debug=True)
def test_wget_ok(self): self.assertEquals(run(mock_link("wget http://c758482.r82.cf2.rackcdn." "com/Sublime%20Text%202.0.1%20x64.tar.bz2")), 0)
import os from datetime import datetime, date import setup from clean import rmfiles rmfiles() script = os.path.abspath(setup.__file__) assert setup.mod.VERSION[3] == 'final' with open('CHANGELOG.rst', 'r') as f: changelog = f.read() top = changelog.split('\n')[0] version_date = top.split(' - ') assert len(version_date) == 2, 'Top of CHANGELOG.rst must be version and date' version, datestr = version_date dt = datetime.strptime(datestr, '%Y-%b-%d').date() assert dt == date.today() assert version == 'Ver. %s' % setup.mod.__version__ setup.run(argv=[script, 'sdist']) print('%s %s ready!' % (setup.package_name, setup.mod.__version__))
import os import sys import json import spotipy import webbrowser import spotipy.util as util from json.decoder import JSONDecodeError import setup spotifyObject = setup.run() import methods import pprint #print("output:", methods.currentlyPlaying(spotifyObject)) #print("output:", methods.userInfo(spotifyObject)) #print("output:", methods.searchArtist(spotifyObject, "tuxx")) a = methods.searchSong(spotifyObject, "post malone") pprint.pprint(a, width=1)
failed = True finally: print("Save build result output in database", file=f) if not failed: print("and restart services.", file=f) sys.stdout.flush() info, _ = service.models.Info.objects.get_or_create(id=args.commitID) info.longInfo = f.getvalue() info.save() service.models.Info.objects.exclude(id=args.commitID).delete( ) # remove everything except the current runnig Info object django.db.connections.close_all( ) # close all connections before restarting if not failed: print("Restart now.") print("Stopping service now.") sys.stdout.flush() if setup.run( 'service', 6, daemon="stop", keepBuildDockerContainerRunning=True) != 0: raise RuntimeError("Stopping service failed.") print("Restarting service now.") sys.stdout.flush() if setup.run('service', 6, daemon="start") != 0: raise RuntimeError("Starting service failed.") print("All done.") sys.stdout.flush()
import setup with setup.run(":memory:") as c: # select T.x_id, T.y_id, X.name, Y.name, T.memo from ts as T JOIN xs as X on T.x_id = X.id JOIN ys as Y on T.y_id = Y.id for t in c.execute("select id, x_id, y_id, memo from ts").fetchall(): for x in c.execute("select name from xs where id = ?", (t["x_id"], )).fetchall(): for y in c.execute("select name from ys where id = ?", (t["y_id"], )).fetchall(): print("@", t["x_id"], t["y_id"], x["name"], y["name"], t["memo"])
state[m2n] = HIGH_SPEED state[m1n] = -HIGH_SPEED if self.state is START: self.state = TURN_STAGE self.start_angle = angle if self.state is TURN_STAGE: if abs(angle - self.start_angle) > ANGLE_THRESHOLD: self.state = TURN_END_STAGE if self.state is TURN_END_STAGE: state[m2n] = LOW_SPEED state[m1n] = -LOW_SPEED if self.direction is 'l' and state['onL']: self.state = INACTIVE state['restart_homing'](state['p']) self.cb() if self.direction is 'r' and state['onR']: self.state = INACTIVE state['restart_homing'](state['p']) self.cb() return state if __name__ == '__main__': import setup from stateMachines import Path, LineFollowing, Forward, Center from testPaths import TestPaths p = Path(TestPaths.leftRight, state_machines=[LineFollowing(), Forward(), Center(), Turn()], repeat=True) setup.run(p)
'https://zenodo.org/record/1014028/files/all_c3_h_us_nivus.txt', 'https://zenodo.org/record/1014028/files/all_c3_h_us_nivus.txt', 'https://zenodo.org/record/1014028/files/all_c3_h_us_nivus.txt', 'https://zenodo.org/record/1014028/files/all_c3_h_us_nivus.txt', 'https://zenodo.org/record/1014028/files/all_c3_h_us_nivus.txt', 'https://zenodo.org/record/1014028/files/all_s3_h_us_maxbotix.txt', 'https://zenodo.org/record/1014028/files/all_s3_h_us_maxbotix.txt', 'https://zenodo.org/record/1014028/files/all_s3_h_us_maxbotix.txt', 'https://zenodo.org/record/1014028/files/all_s6_h_us_maxbotix.txt', 'https://zenodo.org/record/1014028/files/all_s6_h_us_maxbotix.txt', 'https://zenodo.org/record/1014028/files/all_s6_h_us_maxbotix.txt' ] camera_time_offset_url = 'https://zenodo.org/record/1039631/files/temporal_offsets_of%20cameras.txt' ## Set up folder structure setup.run(working_dir) work_types = [ 'train' ] # select what to do from: extract, label, train, test, predict (order is important) if 'extract' in work_types: ## Fetch videos from repositories (only downloaded if necessary) video_folders = [] # for url in video_archive_urls: # video_folders.append(fetch_videos.sync(os.path.join(working_dir, s.stages[0]), url)) # Get information about temporal offset of videos, so they can be compared to sensor data time_offset = extract_frames.load_video_time_offsets( camera_time_offset_url)
# lib.constants.py # Rushy Panchal and George Georges # Cellular Automata Project # Contains the main constants ### Imports import xmlparse as xml import pysqlite as sql import setup import json import os ### Main constants SETUP_STATUS = setup.run() # set up the program files CUR_DIR = os.getcwd() USER_DIR = os.path.join(CUR_DIR, "user") LIB_DIR = os.path.join(CUR_DIR, "lib") IMAGES_DIR = os.path.join(CUR_DIR, "img") DEFAULT_DIR = os.path.join(LIB_DIR, "defaults") DATA_PATH = os.path.join(LIB_DIR, "data.xml") SETTINGS_PATH = os.path.join(USER_DIR, "settings.json") DEFAULT_SETTINGS_PATH = os.path.join(DEFAULT_DIR, "settings.json") DATABASE_PATH = os.path.join(USER_DIR, "history.db") IMAGE_PATHS = {name: os.path.join(IMAGES_DIR, name) for name in os.listdir(IMAGES_DIR)} # retrieve data from the various files DATA = xml.dictionary(DATA_PATH, xml.FILE, contains = xml.ALL)
assert lux.VERSION[3] == 'final' with open('CHANGELOG.rst', 'r') as f: changelog = f.read() top = changelog.split('\n')[0] version_date = top.split(' - ') assert len(version_date) == 2, 'Top of CHANGELOG.rst must be version and date' version, datestr = version_date dt = datetime.strptime(datestr, '%Y-%b-%d').date() assert dt == date.today() assert version == 'Ver. %s' % lux.__version__ with open('package.json', 'r') as f: pkg = json.loads(f.read()) pkg['version'] = lux.__version__ pkg['description'] = lux.__doc__ with open('package.json', 'w') as f: f.write(json.dumps(pkg, indent=4)) # Run setup.py script = os.path.abspath(setup.__file__) sys.argv = [script, 'sdist'] + sys.argv[1:] setup.run() print('%s %s ready!' % (pkg['name'], lux.__version__))
def test_system_valid_command(self): self.assertEquals(run('echo "teste"'), 0)
else: util.cli = "riff" if args.push_secret is None or len(args.push_secret) <= 0: util.push_secret = "" else: if args.image_prefix is None or len(args.image_prefix) <= 0: raise Exception("An --image-prefix must be provided when using --push-secret") util.push_secret = args.push_secret util.image_prefix = args.image_prefix if args.pull_secret is None or len(args.pull_secret) <= 0: util.pull_secret = "" else: util.pull_secret = args.pull_secret if args.manifest is None or len(args.manifest) <= 0: if args.pfs: raise Exception("A manifest must be provided for PFS") util.manifest = "stable" else: util.manifest = args.manifest import setup, teardown, functions, eventing setup.run() functions.run() eventing.run() teardown.run() elapsed_time = time.time() - start_time elapsed_min = int(elapsed_time / 60) elapsed_sec = int(elapsed_time - (elapsed_min * 60)) print("DONE in {m} min {s} sec".format(m=elapsed_min, s=elapsed_sec))
def test_for_a_valid_system_call(self): self.assertEquals(run('ls -a'), 0)
waitForWWW(10) # create cert if not existing or renew if already existing subprocess.check_call(["/usr/bin/certbot-2", "--agree-tos", "--email", "*****@*****.**", "certonly", "-n", "--webroot", "-w", "/var/www/html/certbot", "--cert-name", "mbsim-env", "-d", os.environ["MBSIMENVSERVERNAME"]]) # adapt web server config to use the letsencrypt certs for line in fileinput.FileInput("/etc/httpd/conf.d/ssl.conf", inplace=1): if line.lstrip().startswith("SSLCertificateFile "): line="SSLCertificateFile /etc/letsencrypt/live/mbsim-env/cert.pem\n" if line.lstrip().startswith("SSLCertificateKeyFile "): line="SSLCertificateKeyFile /etc/letsencrypt/live/mbsim-env/privkey.pem\n"+\ "SSLCertificateChainFile /etc/letsencrypt/live/mbsim-env/chain.pem\n" print(line, end="") # reload web server config subprocess.check_call(["httpd", "-k", "graceful"]) if os.environ["MBSIMENVTAGNAME"]=="staging": # for staging service run the CI at service startup (just for testing a build) print("Starting linux-ci build.") setup.run("build-linux64-ci", args.jobs, printLog=False, detach=True, addCommands=["--forceBuild"], fmatvecBranch="master", hdf5serieBranch="master", openmbvBranch="master", mbsimBranch="master") # wait for the web server to finish (will never happen) and return its return code print("Service up and running.") sys.stdout.flush() httpd.wait() sys.exit(httpd.returncode)
webapprun = None def terminate(a, b): if webapprun is None: return webapprun[0].stop() signal.signal(signal.SIGUSR1, terminate) libc = ctypes.CDLL("libc.so.6") PR_SET_PDEATHSIG = 1 if libc.prctl(PR_SET_PDEATHSIG, signal.SIGUSR1, 0, 0, 0) != 0: raise auth_plugins.AuthenticationError(log_msg="Cannot call prctl.") # start vnc and other processes in a new container (being reachable as hostname) networkID = sys.argv[1] token = sys.argv[2] hostname = sys.argv[3] webapprun = setup.run("webapprun", -1, addCommands=["--token", token], networkID=networkID, hostname=hostname, wait=False, printLog=False) ret = setup.runWait(webapprun, printLog=False) sys.exit(ret)