def handle_request(r): ftp.init(r) buildlogs.init(r) build.build_all(r, build_rpm) report.send_report(r, is_src = False) ftp.flush() notify.send(r)
def handle_request(r): ftp.init(r) buildlogs.init(r) build.build_all(r, build_rpm) report.send_report(r, is_src=False) ftp.flush() notify.send(r)
def parse_cmd(cmd): words = cmd.split() if words[0] == 'build': if len(words) == 2: if words[1] == 'all': build_all() else: print('ignored ...')
def handle_request(r): os.mkdir(path.srpms_dir + '/' + r.id) os.chmod(path.srpms_dir + '/' + r.id, 0755) ftp.init(r) buildlogs.init(r) build.build_all(r, build_srpm) report.send_report(r, is_src=True) report.send_cia_report(r, is_src=True) store_binary_request(r) ftp.flush() notify.send(r)
def handle_request(r): os.mkdir(path.srpms_dir + '/' + r.id) os.chmod(path.srpms_dir + '/' + r.id, 0755) ftp.init(r) buildlogs.init(r) build.build_all(r, build_srpm) report.send_report(r, is_src = True) report.send_cia_report(r, is_src = True) store_binary_request(r) ftp.flush() notify.send(r)
def main(): # create temp directory if it doesn't exist os.makedirs('temp', exist_ok=True) config_parameters = config_parser.read_config() quartus_path = 'C:/intelFPGA/18.1/quartus/bin64' for x in config_parameters: if "QUARTUS PATH" in x[0].upper(): quartus_path = x[1] # exit if quartus is not installed in the expected location if not os.path.isdir(quartus_path): print(r'Quartus is not installed in the expected location: ', quartus_path) print( 'If you are in the TLA, You can use the computers by the soldering irons, or the lab across the hallway' ) print( 'Additionally, if you are attempting a custom install, make sure to install quartus prime' ) exit(1) # list vhd files to include in the quartus project vhd_list = gp.find_vhd_files(dir='src') if vhd_list == []: print('no vhd files were found') exit(1) # create quartus directory if it doesn't exist, if another process is using # The directory we need to exit try: shutil.rmtree('internal/QuartusWork') except FileNotFoundError: pass except Exception as e: print("Could not delete QuartusWork", e) exit(1) os.makedirs('internal/QuartusWork') gp.write_qsf(vhd_list, dir='internal/QuartusWork') gp.write_qpf(dir='internal/QuartusWork') gp.write_sdc(dir='internal/QuartusWork') build_success = build.build_all() if not build_success: print(no_timings_message) exit(1) parse_success = parse_timings.parse_timings() if not parse_success: exit(1) # Use Popen to start notepad in a non-blocking manner subprocess.Popen(['Notepad', 'temp/timing.txt'])
def build(): import build ret = build.build_all() if ret != 0: print "ERROR: build_all failed" return ret
import sys, os, glob # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) sys.path.insert(1, os.path.abspath('.')) import labtronyx # -- Build labtronyx ------------------------------------------------------ import build build.build_all() # -- Build dynamic documentation ------------------------------------------ import docbuild docbuild.main() on_rtd = os.environ.get('READTHEDOCS', None) == 'True' # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from build import build_all from rpcbenchmark import RPCBenchmarkServer from SimpleXMLRPCServer import SimpleXMLRPCServer from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler # first we should build everything build_all() print "Starting XML-RPC server" # Restrict to a particular path. class RequestHandler(SimpleXMLRPCRequestHandler): rpc_paths = ('/rpc',) # Create server server = SimpleXMLRPCServer(("0.0.0.0", 8000), requestHandler=RequestHandler) server.register_introspection_functions() server.register_instance(RPCBenchmarkServer())
def builder(target): if target == 'all': projects = list_projects(None) # todo refactor this build_all(projects, config) else: print('todo: ')
def main(): opts = Options() opts.process_opts() check_prerequisites(opts) print(opts) log("Top directory: " + opts.top_dir) # if opts.print_platform_info: print("Required blender + python archive name:") print("Base: " + opts.prebuilt_blender_w_python_base) print("Override: " + opts.prebuilt_blender_w_python_override) print("Resulting bundle name:") print(opts.result_bundle_archive_path) return # clean if opts.clean: if os.path.exists(opts.work_dir): log("Cleaning '" + opts.work_dir + "'") shutil.rmtree(opts.work_dir) else: log("Nothing to clean in '" + opts.work_dir + "'") sys.exit(0) # 1) get all the sources if opts.do_repos: repositories.get_or_update(opts) # 2) build # returns dictionary repo name -> where it was built if opts.do_build: # generate version file repositories.create_version_file(opts) # keys are REPO_NAME_MCELL and REPO_NAME_CELLBLENDER install_dirs = build.build_all(opts) else: # testing will use defaults install_dirs = {} # 3) create bundle # overwrite install_dirs with new values if opts.do_bundle: if opts.only_cellblender_mcell: cellblender_mcell_plugin.create_package(opts) install_dirs = cellblender_mcell_plugin.extract_resulting_package( opts) elif opts.only_pypi_wheel: install_dirs = pypi_wheel.create_pypi_wheel(opts) else: bundle.create_bundle(opts) # also extract it right away if testing is needed install_dirs = bundle.extract_resulting_bundle(opts) # 4) test if opts.do_test: test_all(opts, install_dirs) # 5) store the release if opts.store_build: if opts.release_version != INTERNAL_RELEASE_NO_VERSION: # release if os.path.exists(opts.mcell_build_infrastructure_releases_dir): log("Copying release '" + opts.result_bundle_archive_path + "' to '" + opts.mcell_build_infrastructure_releases_dir + "'.") shutil.copy(opts.result_bundle_archive_path, opts.mcell_build_infrastructure_releases_dir) else: fatal_error("Could not find directory '" + opts.mcell_build_infrastructure_releases_dir + "', release was not stored but can be found as '" + opts.result_bundle_archive_path + "'.") else: if os.path.exists(opts.mcell_build_infrastructure_builds_dir): log("Copying release '" + opts.result_bundle_archive_path + "' to '" + opts.mcell_build_infrastructure_builds_dir + "'.") shutil.copy(opts.result_bundle_archive_path, opts.mcell_build_infrastructure_builds_dir) else: fatal_error("Could not find directory '" + opts.mcell_build_infrastructure_builds_dir + "', release was not stored but can be found as '" + opts.result_bundle_archive_path + "'.") log("--- All tasks finished successfully ---")