def create_patch(oldDir, newDir, outDir): patchDir = os.path.join(outDir, "patch_temp") if not os.path.exists(oldDir): print "Directory to the old version is invalid! Aborting." return None if not os.path.exists(newDir): print "Directory to the new version is invalid! Aborting." return None if not validate_environment(): return None if not os.path.exists(patchDir): os.mkdir(patchDir) elif not is_empty_directory(patchDir): print "patch_temp directory is not empty! Aborting." return None walk_old_dir(oldDir, newDir, patchDir) walk_new_dir(oldDir, newDir, patchDir) merge_index(patchDir) zip_directory(patchDir, patchDir + ".7z") return patchDir + ".7z"
def create_patch(oldDir, newDir, outDir): patchDir = os.path.join(outDir, 'patch_temp') if not os.path.exists(oldDir): print 'Directory to the old version is invalid! Aborting.' return None if not os.path.exists(newDir): print 'Directory to the new version is invalid! Aborting.' return None if not validate_environment(): return None if not os.path.exists(patchDir): os.mkdir(patchDir) elif not is_empty_directory(patchDir): print 'patch_temp directory is not empty! Aborting.' return None walk_old_dir(oldDir, newDir, patchDir) walk_new_dir(oldDir, newDir, patchDir) merge_index(patchDir) zip_directory(patchDir, patchDir + '.7z') return patchDir + '.7z'
def zip_full_game(): print 'zipping game' binDir = os.path.join(TEMP_DIR, 'bin') if os.path.exists(binDir): print 'WARNING: Replacing directory ', binDir shutil.rmtree(binDir) outFile = os.path.join(OUT_DIR, 'latest.7z') if os.path.exists(outFile): print 'WARNING: Replacing archive ', outFile os.remove(outFile) os.rename(NEW_DIR, binDir) zip_directory(binDir, outFile) os.rename(binDir, NEW_DIR)
def run_simulation(mongo_host, binary_id, executable, arguments): client = pymongo.MongoClient(mongo_host) gfs = gridfs.GridFS(client.opp) binary_zip = gfs.get(binary_id).read() # housekeeping shutil.rmtree(MODEL_DIR, ignore_errors=True) os.makedirs(MODEL_DIR) os.chdir(MODEL_DIR) # unzip binary_zip utils.unzip_bytes(binary_zip) subprocess.call(["chmod", "+x", executable]) # eh # execute binary with args subprocess.call([executable] + arguments) # zip results results_zip = utils.zip_directory("results") # cleaning up shutil.rmtree(MODEL_DIR, ignore_errors=True) job_id = rq.get_current_job().get_id() gfs.put(results_zip, _id=job_id)
def build_model(mongo_host, source_id): client = pymongo.MongoClient(mongo_host) gfs = gridfs.GridFS(client.opp) source_zip = gfs.get(source_id).read() # housekeeping shutil.rmtree(MODEL_DIR, ignore_errors=True) os.makedirs(MODEL_DIR) os.chdir(MODEL_DIR) # unzip source_zip utils.unzip_bytes(source_zip) # run make, first clean just to be sure subprocess.call(["make", "clean"]) subprocess.call(["make", "MODE=release"]) # zip built model binary_zip = utils.zip_directory(".") # cleaning up shutil.rmtree(MODEL_DIR, ignore_errors=True) job_id = rq.get_current_job().get_id() gfs.put(binary_zip, _id=job_id)
def run_simulation(source_zip, executable, arguments): # housekeeping shutil.rmtree(MODEL_DIR, ignore_errors=True) os.makedirs(MODEL_DIR) os.chdir(MODEL_DIR) # unzip source_zip utils.unzip_bytes(source_zip) # run make, first clean just to be sure subprocess.call(["make", "clean"]) subprocess.call(["make", "MODE=release"]) # execute binary with args subprocess.call([executable] + arguments) # zip results results_zip = utils.zip_directory("results") # cleaning up shutil.rmtree(MODEL_DIR, ignore_errors=True) # return zip return results_zip
def run(target, outpath, output_directory=False): print "Marking the directory as built by the build process" utils.mark_as_built(target) print "Generating pyo files" utils.generate_pyo_files(target) print "disabling i18n" utils.append_to_local_settings(target, "USE_I18N = False") utils.append_to_local_settings(target, "USE_L10N = False") print "Disabling DEBUG mode" utils.append_to_local_settings(target, "DEBUG = False") print "Deleting some blacklisted files" utils.delete_blacklisted_files(target) print "Collecting static files" utils.collectstatic(target) print "Deleting extra static files" utils.delete_blacklisted_files(target, removejuststatic=True) # Django doesn't like deleted py files. Have to investigate more # print "Deleting the .py files" # utils.delete_py_files(target) print "Pregenerating the database" utils.generate_db(target) print "Adding in a default facility" utils.create_default_facility(target) # zip up the tmp directory by getting kalite's grandparent top_build_directory = target.parent.parent outpath = outpath / "ka-lite" if output_directory: print "Copying built KA Lite to {}".format(outpath) shutil.copytree(str(top_build_directory), str(outpath)) else: print "Zipping up everything" utils.zip_directory(top_build_directory, out=outpath)
default="localhost", help="""the address of the Redis server to use (default: localhost)""") args = parser.parse_args() print("Connecting to Redis at '" + args.redis_host + "'...") redis_conn = Redis( host=args.redis_host) # Tell RQ what Redis connection to use q = Queue(connection=redis_conn) # no args implies the default queue runs = get_runs_from_filter(args.configuration, args.runfilter) print("Matched runs: " + ", ".join(runs)) model_source_zip = utils.zip_directory( ".", exclude_dirs=["results", "frames", "out"]) print("Size of sources: " + str(len(model_source_zip)) + "B") jobs = [] print("Enqueueing " + str(len(runs)) + " jobs...") for r in runs: j = q.enqueue(worker.run_simulation, model_source_zip, args.executable, ["-c", args.configuration, "-r", r]) j.meta['runnumber'] = r j.save_meta() jobs.append(j) print("Waiting for results...") while jobs: