Beispiel #1
0
def publish_cmd(args):
    label = None if args.prerelease == False else "pre"
    
    project = Project(os.getcwd(), "release", label)

    print "Preparing release {}".format(project.release_tag_name())
    if not args.force:
        helpers.assert_clean()
        helpers.assert_branch(project.release_branch())
        helpers.set_version(project.build_version(), project.label())

    print("Building: {}".format(project.build_product()))
    build.build(project)

    print("Signing product with identity \"{}\"".format(project.codesign_identity()))
    sign.sign_everything_in_app(project.build_product(), project.codesign_identity())

    print("Packaging {} to {} as {}".format(project.build_product(), project.image_path(), project.image_name()))
    package(project.build_product(), project.image_path(), project.image_name())

    print("Generating release notes...")
    generate_release_notes(project)

    print("Committing and tagging \"{}\"".format(project.release_tag_name()))
    if not args.force: # We didn't set_version, so nothing should have changed
        commit_release(project)
    tag_release(project.release_tag_name(), args.force)

    publish_release(project, args.prerelease, args.draft, args.dry_run)
Beispiel #2
0
def test_cgen(testcase):
    # Build the program.
    text = '\n'.join(testcase.input)
    program = '_program.py'
    try:
        build.build(text, program, target=build.C, alt_lib_path='lib')
        # Run the program.
        outfile = './_program'
        outb = subprocess.check_output([outfile], stderr=subprocess.STDOUT)
        # Split output into lines.
        out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()]
        # Remove temp file.
        os.remove(outfile)
    except errors.CompileError as e:
        out = e.messages
    # Include line-end comments in the expected output.
    # Note: # characters in string literals can confuse this.
    for s in testcase.input:
        m = re.search(' #(.*)', s)
        if m:
            testcase.output.append(m.group(1).strip())
    # Verify output.
    assert_string_arrays_equal(testcase.output, out,
                               'Invalid output ({}, line {})'.format(
                                   testcase.file, testcase.line))
def main():
    build.build(
        mods=["StatusReporter"],
        files_to_hide=[
            "Config/UTStatusReporter.ini",
        ],
        merges=[],
        cooking=["StatusReporter"]
    )
Beispiel #4
0
 def run_test(self, testcase):
     a = []
     try:
         src = '\n'.join(testcase.input)
         build(src, 'main', True, test_temp_dir, True)
     except CompileError as e:
         a = normalize_error_messages(e.messages)
     assert_string_arrays_equal(
         testcase.output, a,
         'Invalid type checker output ({}, line {})'.format(
             testcase.file, testcase.line))
Beispiel #5
0
	def make (self, packageBuildInfo):
		print "    Building %s" % self.baseConfig.getBuildDir()

		# Create config with full build dir path
		config = BuildConfig(os.path.join(packageBuildInfo.getTmpBasePath(), self.baseConfig.getBuildDir()),
							 self.baseConfig.getBuildType(),
							 self.baseConfig.getArgs(),
							 srcPath = os.path.join(packageBuildInfo.dstBasePath, "src"))

		assert not os.path.exists(config.getBuildDir())
		build(config, self.generator, self.targets)
def autoinstall(path, destination):
  sys.path.insert(0, basedir)
  import build

  output = StringIO.StringIO()
  build.build(path, output)

  if ":" in destination:
    host, port = destination.split(":", 1)
  else:
    host, port = "localhost", destination
  urllib.urlopen('http://%s:%s/' % (host, port), data=output.getvalue())
Beispiel #7
0
def run():
    try:

        # Read CL arguments
        parser = argparse.ArgumentParser()
        parser.add_argument('repository', help='Repository of the built image, e.g. timmy/testy_image')
        parser.add_argument('-p', '--port', help='Port in docker configuration for the daemon. Defaulted to :4243.', 
            type=int, default=4243)
        parser.add_argument('-w', '--workspace', help='Location of project with Dockerfile at its root. Defaulted to $WORKSPACE.', 
            default=os.getenv('WORKSPACE'))
        parser.add_argument('-t', '--tag', help='Tag for the image. Defaulted to y-m-d-h-m-s (now).', 
            default=datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
        parser.add_argument('-cp', '--config_path', help='Path to the docker config file. Default is /home/jenkins/.dockercfg', 
            default='/home/jenkins/.dockercfg')


        # Look up gateway IP for docker daemon
        code, lines = shell("netstat -nr | grep '^0\.0\.0\.0' | awk '{print $2}'")
        assert code == 0, 'failure to lookup our gateway IP ?'
        host = lines[0]
        #assert host != '', 'Could not find gateway IP'

        args = parser.parse_args()
        repo = args.repository
        print "Repository set to {0}".format(repo)

        port = args.port
        host = "{0}:{1}".format(host, port)
        print "Daemon host set to {0}".format(host)

        workspace = args.workspace
        assert os.getenv('WORKSPACE') != '', 'No $WORKSPACE environment variable. Are you calling this from the Jenkins master?'
        print "Workspace set to {0}".format(workspace)

        tag = args.tag
        print "Tag set to {0}".format(tag)

        config_path = args.config_path
        print "Path to docker config set to {0}".format(config_path)

        authenticator = Authenticator()

        # Enter workflow below
        build(workspace, host, repo, tag, authenticator=authenticator)
        push(host, repo, tag, authenticator=authenticator)

        return 0

    except Exception as e:

        print e
        return 1
Beispiel #8
0
def test_semanal_error(testcase):
    """Perform a test case."""
    try:
        src = '\n'.join(testcase.input)
        build(src, 'main', True, test_temp_dir)
        raise AssertionError('No errors reported in {}, line {}'.format(
            testcase.file, testcase.line))
    except CompileError as e:
        # Verify that there was a compile error and that the error messages
        # are equivalent.
        assert_string_arrays_equal(
            testcase.output, normalize_error_messages(e.messages),
            'Invalid compiler output ({}, line {})'.format(testcase.file,
                                                           testcase.line))
Beispiel #9
0
 def run_test(self, testcase):
     a = []
     try:
         src = '\n'.join(testcase.input)
         build.build(src, 'main',
                     target=build.TYPE_CHECK,
                     test_builtins=True,
                     alt_lib_path=test_temp_dir)
     except CompileError as e:
         a = normalize_error_messages(e.messages)
     assert_string_arrays_equal(
         testcase.output, a,
         'Invalid type checker output ({}, line {})'.format(
             testcase.file, testcase.line))
Beispiel #10
0
def test_semanal(testcase):
    """Perform a semantic analysis test case. The testcase argument contains a
    description of the test case (inputs and output).
    """
    try:
        src = '\n'.join(testcase.input)
        result = build.build(src, 'main',
                             target=build.SEMANTIC_ANALYSIS,
                             test_builtins=True,
                             alt_lib_path=test_temp_dir)
        a = []
        # Include string representations of the source files in the actual
        # output.
        for fnam in sorted(result.files.keys()):
            f = result.files[fnam]
            # Omit the builtins module and files with a special marker in the
            # path.
            # TODO the test is not reliable
            if (not f.path.endswith(os.sep + 'builtins.py')
                    and not os.path.basename(f.path).startswith('_')
                    and not os.path.splitext(
                        os.path.basename(f.path))[0].endswith('_')):
                a += str(f).split('\n')
    except CompileError as e:
        a = e.messages
    assert_string_arrays_equal(
        testcase.output, a,
        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
                                                                testcase.line))
Beispiel #11
0
 def run_test(self, testcase):
     a = []
     try:
         line = testcase.input[0]
         mask = ''
         if line.startswith('##'):
             mask = '(' + line[2:].strip() + ')$'
         
         src = '\n'.join(testcase.input)
         map = build(src, 'main', True, testconfig.test_temp_dir, True)[3]
         kk = map.keys()
         keys = []
         for k in kk:
             if k.line is not None and k.line != -1 and map[k]:
                 if (re.match(mask, short_type(k))
                         or (isinstance(k, NameExpr)
                             and re.match(mask, k.name))):
                     keys.append(k)
         for key in sorted(keys,
                           key=lambda n: (n.line, short_type(n),
                                          str(n) + str(map[n]))):
             ts = str(map[key]).replace('*', '') # Remove erased tags
             ts = ts.replace('__main__.', '')
             a.append('{}({}) : {}'.format(short_type(key), key.line, ts))
     except CompileError as e:
         a = e.messages
     assert_string_arrays_equal(
         testcase.output, a,
         'Invalid type checker output ({}, line {})'.format(testcase.file,
                                                            testcase.line))
Beispiel #12
0
def view_build(request, typename):
    ownerid = request.user.profile.corpid
    corpname = request.user.profile.corp

    t = get_object_or_404(Type, typeName=typename)
    basket = build(ownerid, t)
    if basket is None or len(basket) == 0:
        return direct_to_template(request, 'industry/build_direct.html',
                                  extra_context={
                'tab': 'build',
                'corpname': corpname,
                'typename': t.typeName,
                'totalcost': cost(ownerid, t)})
    components = []
    for type_, quantity in basket.items():
        pu = cost(ownerid, type_)
        components.append((type_, quantity, pu, pu * quantity))
    components.sort(key=lambda x: x[0].typeName)
    totalcost = sum(x[1] * x[2] for x in components)
    return direct_to_template(request, 'industry/build_detail.html',
                              extra_context={
            'tab': 'build',
            'corpname': corpname,
            'typename': t.typeName,
            'component_list': components,
            'totalcost': totalcost,
            'source': basket.note,
            })
Beispiel #13
0
def test_semanal(testcase):
    """Perform a semantic analysis test case. The testcase argument contains a
    description of the test case (inputs and output).
    """
    try:
        src = '\n'.join(testcase.input)
        trees, symtable, infos, types = build(src, 'main', True, test_temp_dir)
        a = []
        # Include string representations of the source files in the actual
        # output.
        for t in trees:
            # Omit the builtins module and files with a special marker in the
            # path.
            # TODO the test is not reliable
            if (not t.path.endswith(os.sep + 'builtins.py')
                    and not os.path.basename(t.path).startswith('_')
                    and not os.path.splitext(
                        os.path.basename(t.path))[0].endswith('_')):
                a += str(t).split('\n')
    except CompileError as e:
        a = e.messages
    assert_string_arrays_equal(
        testcase.output, a,
        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
                                                                testcase.line))
Beispiel #14
0
 def builder(**kwargs):
     clean = kwargs.pop("clean", livereload != "dirtyreload")
     live_server = livereload in ["dirtyreload", "livereload"]
     config = build.build(
         live_server=live_server, site_dir=tempdir, pages=pages, page_keys=page_keys, clean_site_dir=clean, **kwargs
     )
     return config
Beispiel #15
0
def test_transform(testcase):
    """Perform a runtime checking transformation test case."""
    expected = remove_comment_lines(testcase.output)

    func_names = get_func_names(expected)

    try:
        # Construct input as a single single.
        src = '\n'.join(testcase.input)
        # Parse and type check the input program.
        result = build.build(src, program_path='main',
                             target=build.ICODE,
                             alt_lib_path=test_temp_dir)
        a = []
        for fn in func_names:
            a.append('def {}:'.format(fn))
            try:
                funccode = result.icode[fn]
            except KeyError:
                raise RuntimeError('no icode for %s (%s)' % (
                    fn, list(result.icode.keys())))
            code = icode.render(funccode)
            a.extend(code)
    except CompileError as e:
        a = e.messages
    assert_string_arrays_equal_wildcards(
        expected, a,
        'Invalid source code output ({}, line {})'.format(testcase.file,
                                                          testcase.line))
Beispiel #16
0
def test_transform(testcase):
    """Perform a runtime checking transformation test case."""
    expected = remove_comment_lines(testcase.output)
    try:
        # Construct input as a single single.
        src = "\n".join(testcase.input)
        # Parse and type check the input program. Perform transform manually
        # so that we can skip some files.
        result = build.build(program_text=src, program_path="main", target=build.TRANSFORM, alt_lib_path=test_temp_dir)
        a = []
        first = True
        # Transform each file separately.
        for fnam in sorted(result.files.keys()):
            f = result.files[fnam]
            # Skip the builtins module and files with '_skip.' in the path.
            if not f.path.endswith("/builtins.py") and "_skip." not in f.path:
                if not first:
                    # Display path for files other than the first.
                    a.append("{}:".format(remove_prefix(f.path, test_temp_dir)))

                # Pretty print the transformed tree.
                v2 = PrettyPrintVisitor()
                f.accept(v2)
                s = v2.output()
                if s != "":
                    a += s.split("\n")
            first = False
    except CompileError as e:
        a = e.messages
    assert_string_arrays_equal_wildcards(
        expected, a, "Invalid source code output ({}, line {})".format(testcase.file, testcase.line)
    )
Beispiel #17
0
def test_op_gen(testcase):
    """Perform a type operation support data and code genereation test case."""
    any a
    expected = remove_comment_lines(testcase.output)
    try:
        src = '\n'.join(testcase.input)
        # Parse and type check the input program.
        trees, symtable, infos, types = build(src, 'main', False,
                                              test_temp_dir, True)
        a = []
        first = True
        # Transform each file separately.
        for t in trees:
            # Skip the builtins module and files with '_skip.' in the path.
            if not t.path.endswith('/builtins.py') and '_skip.' not in t.path:
                if not first:
                    # Display path for files other than the first.
                    a.append('{}:'.format(
                        remove_prefix(t.path, test_temp_dir)))
                
                # Transform parse tree and produce the code for operations.
                # Note that currently we generate this for each file
                # separately; this needs to be fixed eventually.
                v = DyncheckTransformVisitor(types, symtable, True)
                t.accept(v)
                s = generate_runtime_support(t)
                if s != '':
                    a += s.split('\n')
            first = False
    except CompileError as e:
        a = e.messages
    assert_string_arrays_equal_wildcards(
        expected, a,
        'Invalid source code output ({}, line {})'.format(testcase.file,
                                                          testcase.line))
Beispiel #18
0
    def getLatestBuild(self):
        try:
            builddf_obj=buildDefinition(self.rally,self.data)
            builddf=builddf_obj.getBuildDefinitionByName()
            data_with_bddf_ref=deepcopy(self.data)
            data_with_bddf_ref['build'].update({'BuildDefinition':builddf._ref})
            build_obj=build(self.rally,data_with_bddf_ref)
            bds=build_obj.getAllBuilds()
            sorted_bds=sorted(bds, key=lambda x: x.CreationDate, reverse=True)
            bd=sorted_bds[0]
            #build_number=bd.number
            self.data['ts']['Build']=bd.Number
            self.logger.info("Latest build name %s number %s is obtained" % (bd.Name, bd.Number))
            
            if bd.Status=="SUCCESS":
                return
            else: raise Exception('Build failed')

        except Exception, details:

            #x=inspect.stack()
            if 'test_' in inspect.stack()[1][3] or 'test_' in inspect.stack()[2][3]:
                raise
            else:
                #print Exception,details
                self.logger.error('ERROR: %s \n' % details,exc_info=True)
                sys.exit(1)                
Beispiel #19
0
    def updateBuildInfo(self):
        try:
            
            builddf_obj=buildDefinition(self.rally,self.data)
            builddfs=builddf_obj.getAllBuildDefinitions()
            for builddf in builddfs:
                if builddf.Name == self.data['builddf']['Name']:
                    break
            else:   
                new_builddf=builddf_obj.createBuildDefinition()
                self.logger.info("New build definition name %s is created" % (new_builddf.Name))
            
            data_with_bddf_ref=deepcopy(self.data)
            data_with_bddf_ref['build'].update({'BuildDefinition':builddf._ref})
            build_obj=build(self.rally,data_with_bddf_ref)
            bd=build_obj.createBuild()
            #self.data['ts']['Build']=build.Number
            self.logger.info("Build name %s number %s is created" % (bd.Name, bd.Number))

            if bd.Status=="SUCCESS":
                return
            else: raise Exception('Build failed')

        except Exception, details:

            #x=inspect.stack()
            if 'test_' in inspect.stack()[1][3] or 'test_' in inspect.stack()[2][3]:
                raise
            else:
                #print Exception,details
                self.logger.error('ERROR: %s \n' % details,exc_info=True)
                sys.exit(1)        
Beispiel #20
0
def main():
    global version_str
    build_num = 0
    if len(sys.argv) > 1:
        try:
            build_num = int(sys.argv[1])
        except:
            pass
    ret = 0
    ret = build(build_num)
    if ret != 0:
        sys.exit(ret)
    
    print '=================================== Release Start ================================='
    error_level = 0
    try:
        os.chdir(mcp_dir)
        reset_logger()
        print 'reobf start'
        reobfuscate(None, False, True, True)
        print 'reobf end'
        reset_logger()
        os.chdir(forge_dir)
    except SystemExit, e:
        print 'Reobfusicate Exception: %d ' % e.code
        error_level = e.code
Beispiel #21
0
def main():

    # Change into our root directory.
    ROOT = os.path.abspath(os.path.dirname(sys.argv[0]))
    os.chdir(ROOT)

    # Parse the arguments.
    ap = argparse.ArgumentParser(description="Build an android package.")
    ap.add_argument("command", help="The command to run. One of install_sdk, configure, or build.")
    ap.add_argument("argument", nargs='*', help="The arguments to the selected command.")
    
    args = ap.parse_args()

    iface = interface.Interface()
    
    def check_args(n):
        if len(args.argument) != n:
            iface.fail("The {} command expects {} arguments.".format(args.command, n))
            
        return args.argument
    
    if args.command == "installsdk":
        check_args(0)
        install_sdk.install_sdk(iface)
        
    elif args.command == "configure":
        directory, = check_args(1)
        configure.configure(iface, directory)
        
    elif args.command == "setconfig":
        directory, var, value = check_args(3)
        configure.set_config(iface, directory, var, value)
        
    elif args.command == "build":
        if len(args.argument) < 2:
            iface.fail("The build command expects at least 2 arguments.")
            
        build.build(iface, args.argument[0], args.argument[1:])

    elif args.command == "logcat":
        subprocess.call([ plat.adb, "logcat", "-s", "python:*"] + args.argument)

    elif args.command == "test":
        iface.success("All systems go!")
        
    else:
        ap.error("Unknown command: " + args.command)
Beispiel #22
0
def test_cgen_compile(testcase):
    # Build the program.
    text = '\n'.join(testcase.input)
    try:
        build.build(text, '_program.py', target=build.C, alt_lib_path='lib',
                    flags=[build.COMPILE_ONLY])
        outfile = '_program.c'
        f = open(outfile)
        out = [s.rstrip('\n\r') for s in f.readlines()]
        f.close()
        os.remove(outfile)
    except errors.CompileError as e:
        out = e.messages
    # Verify output.
    assert_string_arrays_equal_wildcards(testcase.output, out,
                               'Invalid output ({}, line {})'.format(
                                   testcase.file, testcase.line))
Beispiel #23
0
 def build(self):
     parser = argparse.ArgumentParser(
         description='Builds the site from a template and config file')
     # Arguments
     parser.add_argument('--site,s', dest='site', default='./_site')
     parser.add_argument('--template,t', dest='template', default='./_template')
     parser.add_argument('--config,c', dest='config', default='./_config.yml')
     parser.add_argument('--data,d', dest='data', default='./_data')
     # now that we're inside a subcommand, ignore the first
     # TWO argvs, ie the command (git) and the subcommand (commit)
     args = parser.parse_args(sys.argv[2:])
     if not os.path.isdir(args.template):
         print '\033[91m\r', 'Site template "', args.template, '" does not exist', '\033[0m'
         return
     if not os.path.isfile(args.config):
         print '\033[91m\r', 'Config file "', args.config, '" does not exist', '\033[0m'
         return
     build.build(args.template, args.site, args.config, args.data)
Beispiel #24
0
def deploy():
    # Compile gettext messages
    local('msgfmt ./messages.po -o ./messages.mo')

    # Build site in separate folder
    build_dir = '_build-%s' % int(time.time())
    local('cp -r ./static ./%s' % build_dir)
    local('lessc --compress '
          './%(build_dir)s/less/styles.less > '
          './%(build_dir)s/css/styles.css' % {'build_dir': build_dir})
    build(build_dir)

    prev_build_dir = os.path.exists('./www') and os.readlink('./www')

    # Link new build to ./www 
    local('ln -snf ./%s ./www' % build_dir)
    # Remove old build if it exists
    if prev_build_dir:
        local('rm -rf %s' % prev_build_dir)
Beispiel #25
0
def main():
    path, args = process_options()

    try:
        mainfile = open(path)
        text = mainfile.read()
        mainfile.close()
    except IOError as ioerr:
        fail("mypy: can't read file '{}': {}".format(path,
                                                     ioerr.strerror))
    
    try:
        outputdir = os.path.join(os.path.dirname(path), '__mycache__')
        tempdir = False
        if not os.path.isdir(outputdir):
            try:
                os.mkdir(outputdir)
            except OSError:
                # Could not create a directory under program directory; must
                # fall back to a temp directory. It will be removed later.
                outputdir = tempfile.mkdtemp()
                tempdir = True

        try:
            # Parse and type check the program and dependencies.
            trees, symtable, infos, types = build(text, path, False, None,
                                                  True)
        
            # Translate each file in the program to Python.
            # TODO support packages
            for t in trees:
                if not is_stub(t.path):
                    out_path = os.path.join(outputdir,
                                            os.path.basename(t.path))
                    log('translate {} to {}'.format(t.path, out_path))
                    v = PythonGenerator(pyversion)
                    t.accept(v)
                    outfile = open(out_path, 'w')
                    outfile.write(v.output())
                    outfile.close()

            # Run the translated program.
            status = subprocess.call(
                [interpreter,
                 '{}/{}'.format(outputdir,os.path.basename(path))] +
                args)
            sys.exit(status)
        finally:
            if tempdir:
                shutil.rmtree(outputdir)
    except CompileError as e:
        for m in e.messages:
            print(m)
        sys.exit(1)
def main():
    if not os.path.exists("Src/{}".format(PACKAGE_NAME)):
        print("Please create a symlink from 'Src/SpectatorUI' to 'Src/{}'".format(PACKAGE_NAME), file=sys.stderr)
        raise SystemExit(1)

    build.build(
        mods=[PACKAGE_NAME],
        files_to_hide=[
            "Config/UTSpectatorUI_Bookmarks.ini", 
            "Config/UTSpectatorUI.ini",
        ],
        merges=[
            ("Unpublished/CookedPC/SpectatorUI_Content.upk", "Unpublished/CookedPC/Script/{}.u".format(PACKAGE_NAME)),
        ],
        cooking=[
            PACKAGE_NAME
        ]
    )
    
    patch_package()
Beispiel #27
0
def main():

  with open('test/data.csv', newline='') as f:
    _recs = [tuple(_l) for _l in csv.reader(f)]

  for _r in _recs:
    print('%s : %s' % (len(_r), _r))

  tree = build(_recs, (0, 1, 2, 3, 4))

  tree._print()
Beispiel #28
0
def deploy():
    # Compile gettext messages
    local("msgfmt ./messages.po -o ./messages.mo")

    # Build site in separate folder
    build_dir = "_build-%s" % int(time.time())
    local("cp -r ./static ./%s" % build_dir)
    local(
        "lessc --compress "
        "./%(build_dir)s/less/styles.less > "
        "./%(build_dir)s/css/styles.css" % {"build_dir": build_dir}
    )
    build(build_dir)

    prev_build_dir = os.path.exists("./www") and os.readlink("./www")

    # Link new build to ./www
    local("ln -snf ./%s ./www" % build_dir)
    # Remove old build if it exists
    if prev_build_dir:
        local("rm -rf %s" % prev_build_dir)
Beispiel #29
0
def buildNativeLibrary (config, abiName):
	def makeNDKVersionString (version):
		minorVersionString = (chr(ord('a') + version[1]) if version[1] > 0 else "")
		return "r%d%s" % (version[0], minorVersionString)

	def getBuildArgs (config, abiName):
		toolchain = 'ndk-%s' % makeNDKVersionString((config.env.ndk.version[0], 0))
		return ['-DDEQP_TARGET=android',
				'-DDEQP_TARGET_TOOLCHAIN=%s' % toolchain,
				'-DCMAKE_C_FLAGS=-Werror',
				'-DCMAKE_CXX_FLAGS=-Werror',
				'-DANDROID_NDK_HOST_OS=%s' % config.env.ndk.hostOsName,
				'-DANDROID_NDK_PATH=%s' % config.env.ndk.path,
				'-DANDROID_ABI=%s' % abiName,
				'-DDE_ANDROID_API=%s' % config.nativeApi,
				'-DGLCTS_GTF_TARGET=%s' % config.gtfTarget]

	nativeBuildPath	= getNativeBuildPath(config, abiName)
	buildConfig		= BuildConfig(nativeBuildPath, config.nativeBuildType, getBuildArgs(config, abiName))

	build(buildConfig, config.cmakeGenerator, ["deqp"])
Beispiel #30
0
def distribute_rpm(pkg_name):
    remote_path = '%s:/repo/blik-products/' % DISTR_SERVER

    pkg_file, version = build(pkg_name, RHEL_DISTR)

    local_path = '%s/%s'% (LOCAL_DIST, pkg_file)

    print ('Copying package %s to %s'%(pkg_file,remote_path))
    ret = os.system('scp %s %s'%(local_path, remote_path))

    if ret:
        print ('ERROR: distribution package %s failed!'%pkg_name)
        sys.exit(1)
Beispiel #31
0
DIST = r'E:\Steam\steamapps\workshop\content\289070\1671978687'


def remove_copy(src, dst):
    print('Trying to copy mod to steam folder...')
    try:
        if os.path.exists(dst):
            shutil.rmtree(dst)
        shutil.copytree(src, dst)
        print('Copy success.')
    except PermissionError:
        print('PermissionError')
        pass
    except NotADirectoryError:
        print('NotADirectoryError')
        pass
    except OSError as e:
        ignorable = (
            e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
            # or -> getattr(e, "winerror", None) == 267
        )
        if ignorable:
            print('Error')
        else:
            raise


if __name__ == "__main__":
    if build():
        remove_copy(MOD_PATH, DIST)
Beispiel #32
0
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
import os
from build import build

with open("README.md", "r") as fh:
    long_description = fh.read()

cuda_home = os.getenv("CUDA_HOME", "/usr/local/cuda")
print(f"Using CUDA_HOME={cuda_home}")
build(cuda_home=cuda_home)

setup(
    name='torch_radon',
    version="0.0.1",
    author="Matteo Ronchetti",
    author_email="*****@*****.**",
    description="Radon transform in PyTorch",
    long_description=long_description,
    long_description_content_type="text/markdown",
    url="https://github.com/matteo-ronchetti/torch-radon",
    packages=['torch_radon'],
    package_dir={
        'torch_radon': './torch_radon',
    },
    ext_modules=[
        CUDAExtension(
            'torch_radon_cuda',
            [os.path.abspath('src/pytorch.cpp')],
            include_dirs=[os.path.abspath('include')],
            library_dirs=[os.path.abspath("objs")],
Beispiel #33
0
import socketserver
import webbrowser
from http.server import SimpleHTTPRequestHandler

import build


class Handler(SimpleHTTPRequestHandler):
    def do_GET(self):
        self.path = "/build" + self.path
        return super(Handler, self).do_GET()


print("Building the website...")
build.build()
print("Now serving on http://localhost:8000/")
socketserver.TCPServer.allow_reuse_address = True
socketserver.TCPServer(("127.0.0.1", 8000), Handler).serve_forever()
    # run indexer
    args = [
        b, "--nthreads", 1, "--paths-meta", ":".join(meta_dirs),
        "--path-global", global_dir
    ]

    if opts.gdb:
        args = ["gdb", "--args"] + args
    run(args)


if __name__ == "__main__":
    # parse options
    parser = optparse.OptionParser()
    parser.add_option("--dataset", default=conf.SG_DATASET)
    parser.add_option("--nmic", default=conf.SG_NMIC)
    parser.add_option("--debug",
                      dest="debug",
                      action="store_true",
                      default=conf.SG_DEBUG)
    parser.add_option("--gdb", dest="gdb", action="store_true", default=False)
    parser.add_option("--print-only",
                      action="store_true",
                      dest="print_only",
                      default=conf.SG_PRINT_ONLY)
    (opts, args) = parser.parse_args()

    print("# Generating graph")
    build.build(False, False)
    generate_index(opts)
    def build(self, deps=None, nesting_count=0, mail=None):

        build_error = False

        nesting_count += 1

        if self.mail_active(mail, nesting_count):
            mail['output'].clear()
            mail['log'] = ''
            mail['reports'] = []
            mail['failure'] = None

        log.trace('_bset: %2d: %s: make' % (nesting_count, self.bset))
        log.notice('Build Set: %s' % (self.bset))

        current_path = os.environ['PATH']

        start = datetime.datetime.now()

        mail_report = False
        have_errors = False
        interrupted = False

        #
        # If this is the outter most buildset it's files are installed. Nested
        # build sets staged their installed file. The staged files are install
        # when the outtter most build finishes.
        #
        if nesting_count != 1:
            if self.installing():
                self.macros['install_mode'] = 'staging'

        #
        # Only the outter build set can have staging to install. Get the staging
        # root via the config because it could require a valid config.
        #
        have_staging = False

        try:
            configs = self.load()

            log.trace('_bset: %2d: %s: configs: %s' %
                      (nesting_count, self.bset, ', '.join(configs)))

            if nesting_count == 1 and len(configs) > 1:
                #
                # Prepend staging areas, bin directory to the
                # path. Lets the later package depend on the earlier
                # ones.
                #
                pathprepend = ['%{stagingroot}/bin'] + \
                    macro_expand(self.macros, '%{_pathprepend}').split(':')
                pathprepend = [pp for pp in pathprepend if len(pp)]
                if len(pathprepend) == 1:
                    self.macros['_pathprepend'] = pathprepend[0]
                else:
                    self.macros['_pathprepend'] = ':'.join(pathprepend)

            sizes_valid = False
            builds = []
            for s in range(0, len(configs)):
                b = None
                try:
                    #
                    # Each section of the build set gets a separate set of
                    # macros so we do not contaminate one configuration with
                    # another.
                    #
                    opts = copy.copy(self.opts)
                    macros = copy.copy(self.macros)
                    if configs[s].endswith('.bset'):
                        log.trace('_bset: %2d: %s %s' %
                                  (nesting_count, configs[s], '=' *
                                   (74 - len(configs[s]))))
                        bs = buildset(configs[s], self.configs, opts, macros)
                        bs.build(deps, nesting_count, mail)
                        if self.installing():
                            have_staging = True
                        del bs
                    elif configs[s].endswith('.cfg'):
                        if mail:
                            mail_report = True
                        log.trace('_bset: %2d: %s %s' %
                                  (nesting_count, configs[s], '=' *
                                   (74 - len(configs[s]))))
                        try:
                            b = build.build(
                                configs[s],
                                self.opts.get_arg('--pkg-tar-files'), opts,
                                macros)
                        except:
                            build_error = True
                            raise
                        if b.macros.get('%{_disable_reporting}'):
                            mail_report = False
                        if deps is None:
                            self.build_package(configs[s], b)
                            self.report(configs[s],
                                        b,
                                        copy.copy(self.opts),
                                        copy.copy(self.macros),
                                        mail=mail)
                            # Always produce an XML report.
                            self.report(configs[s],
                                        b,
                                        copy.copy(self.opts),
                                        copy.copy(self.macros),
                                        format='xml',
                                        mail=mail)
                            if s == len(configs) - 1 and not have_errors:
                                self.bset_tar(b)
                        else:
                            deps += b.config.includes()
                        builds += [b]
                        #
                        # Dump post build macros.
                        #
                        log.trace('_bset:   : macros post-build')
                        log.trace(str(b.macros))
                    else:
                        raise error.general('invalid config type: %s' %
                                            (configs[s]))
                except error.general as gerr:
                    have_errors = True
                    if b is not None:
                        if self.build_failure is None:
                            self.build_failure = b.name()
                        self.write_mail_header('')
                        self.write_mail_header('= ' * 40)
                        self.write_mail_header('Build FAILED: %s' % (b.name()))
                        self.write_mail_header('- ' * 40)
                        self.write_mail_header(str(log.default))
                        self.write_mail_header('- ' * 40)
                        if self.opts.keep_going():
                            log.notice(str(gerr))
                            if self.opts.always_clean():
                                builds += [b]
                        else:
                            raise
                    else:
                        raise
            #
            # Installing or staging ...
            #
            log.trace('_bset: %2d: %s: deps:%r no-install:%r' % \
                      (nesting_count, self.install_mode(),
                       deps is None, self.opts.no_install()))
            log.trace('_bset: %2d: %s: builds: %s' % \
                      (nesting_count, self.install_mode(),
                       ', '.join([b.name() for b in builds])))
            if deps is None and not self.opts.no_install() and not have_errors:
                for b in builds:
                    log.trace('_bset:   : %s: %r' %
                              (self.install_mode(), b.installable()))
                    if b.installable():
                        prefix = b.config.expand('%{_prefix}')
                        buildroot = path.join(b.config.expand('%{buildroot}'),
                                              prefix)
                        if self.staging():
                            prefix = b.config.expand('%{stagingroot}')
                        self.install(self.install_mode(), b.name(), buildroot,
                                     prefix)

            #
            # Sizes ...
            #
            if len(builds) > 1:
                size_build = 0
                size_installed = 0
                size_build_max = 0
                for b in builds:
                    s = b.get_build_size()
                    size_build += s
                    if s > size_build_max:
                        size_build_max = s
                    size_installed += b.get_installed_size()
                size_sources = 0
                for p in builds[0].config.expand('%{_sourcedir}').split(':'):
                    size_sources += path.get_size(p)
                size_patches = 0
                for p in builds[0].config.expand('%{_patchdir}').split(':'):
                    size_patches += path.get_size(p)
                size_total = size_sources + size_patches + size_installed
                build_max_size_human = build.humanize_number(
                    size_build_max + size_installed, 'B')
                build_total_size_human = build.humanize_number(size_total, 'B')
                build_sources_size_human = build.humanize_number(
                    size_sources, 'B')
                build_patches_size_human = build.humanize_number(
                    size_patches, 'B')
                build_installed_size_human = build.humanize_number(
                    size_installed, 'B')
                build_size = 'usage: %s' % (build_max_size_human)
                build_size += ' total: %s' % (build_total_size_human)
                build_size += ' (sources: %s' % (build_sources_size_human)
                build_size += ', patches: %s' % (build_patches_size_human)
                build_size += ', installed %s)' % (build_installed_size_human)
                sizes_valid = True
            #
            # Cleaning ...
            #
            if deps is None and \
                    (not self.opts.no_clean() or self.opts.always_clean()):
                for b in builds:
                    if not b.disabled():
                        log.notice('cleaning: %s' % (b.name()))
                        b.cleanup()
            #
            # Log the build size message
            #
            if len(builds) > 1:
                log.notice('Build Sizes: %s' % (build_size))
            #
            # Clear out the builds ...
            #
            for b in builds:
                del b

            #
            # If builds have been staged install into the finaly prefix.
            #
            if have_staging and not self.opts.no_install() and not have_errors:
                stagingroot = macro_expand(self.macros, '%{stagingroot}')
                have_stagingroot = path.exists(stagingroot)
                log.trace('_bset: %2d: install staging, present: %s' % \
                          (nesting_count, have_stagingroot))
                if have_stagingroot:
                    prefix = macro_expand(self.macros, '%{_prefix}')
                    self.install(self.install_mode(), self.bset, stagingroot,
                                 prefix)
                    staging_size = path.get_size(stagingroot)
                    if not self.opts.no_clean() or self.opts.always_clean():
                        log.notice('clean staging: %s' % (self.bset))
                        log.trace('removing: %s' % (stagingroot))
                        if not self.opts.dry_run():
                            if path.exists(stagingroot):
                                path.removeall(stagingroot)
                    log.notice('Staging Size: %s' % \
                               (build.humanize_number(staging_size, 'B')))
        except error.general as gerr:
            if not build_error:
                log.stderr(str(gerr))
            raise
        except KeyboardInterrupt:
            interrupted = True
            raise
        except:
            self.build_failure = 'RSB general failure'
            interrupted = True
            raise
        finally:
            end = datetime.datetime.now()
            os.environ['PATH'] = current_path
            build_time = str(end - start)
            if self.mail_single_report() and nesting_count == 1:
                mail_report = True
            if interrupted or self.macros.defined('mail_disable'):
                mail_report = False
            if mail_report and mail is not None:
                if self.installing():
                    self.write_mail_header('Build Time: %s' % (build_time),
                                           True)
                    self.write_mail_header('', True)
                    self.write_mail_header(mail['header'], True)
                    self.write_mail_header('')
                    log.notice('Mailing report: %s' % (mail['to']))
                    mail['log'] += self.get_mail_header()
                    if sizes_valid:
                        mail['log'] += 'Sizes' + os.linesep
                        mail['log'] += '=====' + os.linesep + os.linesep
                        mail['log'] += \
                            'Maximum build usage: ' + build_max_size_human + os.linesep
                        mail['log'] += \
                            'Total size: ' + build_total_size_human + os.linesep
                        mail['log'] += \
                            'Installed : ' + build_installed_size_human + os.linesep
                        mail[
                            'log'] += 'Sources: ' + build_sources_size_human + os.linesep
                        mail[
                            'log'] += 'Patches: ' + build_patches_size_human + os.linesep
                    mail['log'] += os.linesep
                    mail['log'] += 'Output' + os.linesep
                    mail['log'] += '======' + os.linesep + os.linesep
                    mail['log'] += os.linesep.join(mail['output'].get())
                    mail['log'] += os.linesep + os.linesep
                    mail['log'] += 'Report' + os.linesep
                    mail['log'] += '======' + os.linesep + os.linesep
                mail['reports'] += [self.get_mail_report()]
                if self.build_failure is not None:
                    mail['failure'] = self.build_failure
                if self.mail_active(mail, nesting_count):
                    try:
                        self.mail_send(mail)
                    except error.general as gerr:
                        log.notice('Mail Send Failure: %s' % (gerr))

            log.notice('Build Set: Time %s' % (build_time))
def main():
    CPP_EXTRA_OPTIONS = []
    PYTHON_EXTRA_OPTIONS = []

    options = parse_options()

    build_dir = os.path.join(PROJECT_ROOT, 'builds',
                             'debug' if options.debug else 'release')

    if options.build:
        build(options.debug)

    # Create build path
    if not os.path.exists(build_dir):
        raise OSError("Planner not built!")

    # If it is the lifted heuristic, we need to obtain the Datalog model
    if options.heuristic == 'add' or options.heuristic == 'hmax':
        PYTHON_EXTRA_OPTIONS += [
            '--build-datalog-model', '--datalog-file', options.datalog_file
        ]
        if options.keep_action_predicates:
            PYTHON_EXTRA_OPTIONS.append('--keep-action-predicates')
        if options.keep_duplicated_rules:
            PYTHON_EXTRA_OPTIONS.append('--keep-duplicated-rules')
        if options.add_inequalities:
            PYTHON_EXTRA_OPTIONS.append('--add-inequalities')
        CPP_EXTRA_OPTIONS += ['--datalog-file', options.datalog_file]

    # Checks if unit-cost flag is true
    if options.unit_cost:
        PYTHON_EXTRA_OPTIONS += ["--unit-cost"]

    # Invoke the Python preprocessor
    subprocess.call([os.path.join(build_dir, 'translator', 'translate.py'),
                     options.domain, options.instance, '--output-file', options.translator_file] + \
                    PYTHON_EXTRA_OPTIONS)

    # Invoke the C++ search component
    cmd = []
    if options.debug:
        cmd += ['gdbserver', ':1234']
    cmd += [os.path.join(build_dir, 'search', 'search'),
           '-f', options.translator_file,
           '-s', options.search,
           '-e', options.heuristic,
           '-g', options.generator,
           '-r', options.state,
           '--landmark-file', options.landmark,
           '--seed', str(options.seed)] + \
           CPP_EXTRA_OPTIONS
    if (options.ordering):
        cmd += ['-o']
    if (options.lama):
        cmd += ['--lama']

    print(f'Executing "{" ".join(cmd)}"')
    code = subprocess.call(cmd)

    # If we found a plan, try to validate it
    if code == 0 and options.validate:
        validate(options.domain, options.instance, 'sas_plan')

    return code
Beispiel #37
0
# File: build_isl.py
# 
# Copyright (c) 2018 The SydOS Team
# 
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# 
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# 
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import build

import build_gmp

ver = "0.22.1"
url =  "http://isl.gforge.inria.fr/isl-" + ver + ".tar.gz"

build.build(url=url, project="isl", configargs="--with-gmp-prefix=" + build.installpath)
     f"mcwzh-meme_compatible_sfw_v{pack_version}.zip",
     f"mcwzh-meme_compatible_nomod_sfw_v{pack_version}.zip",
     f"mcwzh-meme_compatible_nomod_noresource_sfw_v{pack_version}.zip",
     f"mcwzh-meme_legacy_nomod_noresource_sfw_v{pack_version}.zip"
 ]
 pack_counter = 0
 perfect_pack_counter = 0
 base_folder = "builds"
 if exists(base_folder) and not isdir(base_folder):
     remove(base_folder)
 if not exists(base_folder):
     mkdir(base_folder)
 for file in listdir(base_folder):
     remove(join(base_folder, file))
 for args, name in zip(preset_args, preset_name):
     pack_name, warning_count, error = build.build(args)
     if not error:
         pack_counter += 1
         if warning_count == 0:
             perfect_pack_counter += 1
         if name != pack_name:
             rename(join(base_folder, pack_name),
                    join(base_folder, name))
             print(f"Renamed pack to {name}.")
     else:
         print(f"Failed to build pack {name}.")
         build_unsuccessful = 1
 print(
     f"Built {pack_counter} packs with {perfect_pack_counter} pack(s) no warning."
 )
 exit(build_unsuccessful)
Beispiel #39
0
def _path(opt):
    build(opt)
    opt['ttw_data'] = os.path.join(opt['datapath'], 'TalkTheWalk')
Beispiel #40
0
    def build(self, host, nesting_count=0):

        build_error = False

        nesting_count += 1

        log.trace('_bset: %s for %s: make' % (self.bset, host))
        log.notice('Build Set: %s for %s' % (self.bset, host))

        mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}'))

        current_path = os.environ['PATH']

        start = datetime.datetime.now()

        have_errors = False

        try:
            configs = self.load()

            log.trace('_bset: %s: configs: %s' %
                      (self.bset, ','.join(configs)))

            sizes_valid = False
            builds = []
            for s in range(0, len(configs)):
                b = None
                try:
                    #
                    # Each section of the build set gets a separate set of
                    # macros so we do not contaminate one configuration with
                    # another.
                    #
                    opts = copy.copy(self.opts)
                    macros = copy.copy(self.macros)
                    self.set_host_details(host, opts, macros)
                    if configs[s].endswith('.bset'):
                        log.trace('_bset: == %2d %s' %
                                  (nesting_count + 1, '=' * 75))
                        bs = buildset(configs[s], self.configs, opts, macros)
                        bs.build(host, nesting_count)
                        del bs
                    elif configs[s].endswith('.cfg'):
                        log.trace('_bset: -- %2d %s' %
                                  (nesting_count + 1, '-' * 75))
                        try:
                            b = build.build(configs[s], False, opts, macros)
                        except:
                            build_error = True
                            raise
                        self.build_package(configs[s], b)
                        builds += [b]
                        #
                        # Dump post build macros.
                        #
                        log.trace('_bset: macros post-build')
                        log.trace(str(macros))
                    else:
                        raise error.general('invalid config type: %s' %
                                            (configs[s]))
                except error.general as gerr:
                    have_errors = True
                    if b is not None:
                        if self.build_failure is None:
                            self.build_failure = b.name()
                    raise
            #
            # Clear out the builds ...
            #
            for b in builds:
                del b
        except error.general as gerr:
            if not build_error:
                log.stderr(str(gerr))
            raise
        except KeyboardInterrupt:
            raise
        except:
            self.build_failure = 'RSB general failure'
            raise
        finally:
            end = datetime.datetime.now()
            os.environ['PATH'] = current_path
            build_time = str(end - start)
            log.notice('Build Set: Time %s' % (build_time))
def configure():
    build()
Beispiel #42
0
    "tomlkit>=0.7.0,<0.8.0",
    "tqdm>=4.53.0,<5.0.0",
]

extras_require = {':python_version < "3.8"': ["importlib-metadata>=2.0.0,<3.0.0"]}

entry_points = {"console_scripts": ["mckit = mckit.cli.runner:mckit"]}

setup_kwargs = {
    "name": "mckit",
    "version": "0.5.1a0",
    "description": "Tools to process MCNP models and results",
    "long_description": None,
    "author": "rrn",
    "author_email": "*****@*****.**",
    "maintainer": "dpv2015",
    "maintainer_email": "*****@*****.**",
    "url": "https://github.com/rorni/mckit",
    "packages": packages,
    "package_data": package_data,
    "install_requires": install_requires,
    "extras_require": extras_require,
    "entry_points": entry_points,
    "python_requires": ">=3.7,<4.0",
}
from build import build

build(setup_kwargs)

setup(**setup_kwargs)
Beispiel #43
0
         optimization="-Os",  #default "-Os",
         verbose=True,
         skip_programming=True,
         dry_run=True,
         board_db=boards,
         blob_stream=hexstream)
     print(hexstream.getvalue())
 elif x == 1:
     rc, = build(
         "16u2@8M",
         os.getcwd(),
         wdir_recurse=True,
         aux_src_dirs=[],  #[ src_dir_t(AUX_SRC_DIR, False) ],
         aux_src_files=[],
         boards_txt=None,  #BOARDS_TXT,
         ignore_file="amkignore",
         prog_port="/dev/ttyACM0",
         prog_driver="avrdude",
         prog_adapter=None,  #"arduino", #None for dfu-programmer
         optimization="-Os",  #default "-Os",
         verbose=False,
         skip_programming=True,
         dry_run=True,
         board_db=boards)
 elif x == 2:
     rc, = build(
         "uno_dfu",
         os.getcwd(),
         wdir_recurse=True,
         aux_src_dirs=[],  #[ src_dir_t(AUX_SRC_DIR, False) ],
         boards_txt=None,  #BOARDS_TXT,
         ignore_file="amkignore",
Beispiel #44
0
def build_and_deploy(domain, stack_name, template_file, site_dir, cache_age):
    build(site_dir)
    return deploy(domain, stack_name, template_file, site_dir, cache_age)
	def run (self, env):
		# specialize config for env
		buildDir	= os.path.join(env.tmpDir, self.buildDir)
		curConfig	= self.configGen.getBuildConfig(env, buildDir)

		build(curConfig, self.generator)
Beispiel #46
0
import os
import sys

sys.path.append(os.path.join(os.path.dirname(__file__), '.', 'build'))
from build import build

source = os.path.dirname(os.path.realpath(__file__))
build(source, 'Debug', ['gui'])
Beispiel #47
0
    def build(self, deps=None, nesting_count=0):

        build_error = False

        nesting_count += 1

        log.trace('_bset: %s: make' % (self.bset))
        log.notice('Build Set: %s' % (self.bset))

        if self.opts.get_arg('--mail'):
            mail_report_subject = '%s %s' % (self.bset,
                                             self.macros.expand('%{_host}'))

        current_path = os.environ['PATH']

        start = datetime.datetime.now()

        mail_report = False
        have_errors = False

        try:
            configs = self.load()

            log.trace('_bset: %s: configs: %s' %
                      (self.bset, ','.join(configs)))

            builds = []
            for s in range(0, len(configs)):
                b = None
                try:
                    #
                    # Each section of the build set gets a separate set of
                    # macros so we do not contaminate one configuration with
                    # another.
                    #
                    opts = copy.copy(self.opts)
                    macros = copy.copy(self.macros)
                    if configs[s].endswith('.bset'):
                        log.trace('_bset: == %2d %s' %
                                  (nesting_count + 1, '=' * 75))
                        bs = buildset(configs[s], self.configs, opts, macros)
                        bs.build(deps, nesting_count)
                        del bs
                    elif configs[s].endswith('.cfg'):
                        mail_report = self.opts.get_arg('--mail')
                        log.trace('_bset: -- %2d %s' %
                                  (nesting_count + 1, '-' * 75))
                        try:
                            b = build.build(
                                configs[s],
                                self.opts.get_arg('--pkg-tar-files'), opts,
                                macros)
                        except:
                            build_error = True
                            raise
                        if b.macros.get('%{_disable_reporting}'):
                            mail_report = False
                        if deps is None:
                            self.build_package(configs[s], b)
                            self.report(configs[s], b, copy.copy(self.opts),
                                        copy.copy(self.macros))
                            # Always product an XML report.
                            self.report(configs[s],
                                        b,
                                        copy.copy(self.opts),
                                        copy.copy(self.macros),
                                        format='xml')
                            if s == len(configs) - 1 and not have_errors:
                                self.bset_tar(b)
                        else:
                            deps += b.config.includes()
                        builds += [b]
                    else:
                        raise error.general('invalid config type: %s' %
                                            (configs[s]))
                except error.general, gerr:
                    have_errors = True
                    if b is not None:
                        if self.build_failure is None:
                            self.build_failure = b.name()
                        self.write_mail_header('')
                        self.write_mail_header('= ' * 40)
                        self.write_mail_header('Build FAILED: %s' % (b.name()))
                        self.write_mail_header('- ' * 40)
                        self.write_mail_header(str(log.default))
                        self.write_mail_header('- ' * 40)
                        if self.opts.keep_going():
                            log.notice(str(gerr))
                            if self.opts.always_clean():
                                builds += [b]
                        else:
                            raise
                    else:
                        raise
            #
            # Installing ...
            #
            log.trace('_bset: installing: deps:%r no-install:%r' % \
                      (deps is None, self.opts.no_install()))
            if deps is None \
               and not self.opts.no_install() \
               and not have_errors:
                for b in builds:
                    log.trace('_bset: installing: %r' % b.installable())
                    if b.installable():
                        self.install(b.name(), b.config.expand('%{buildroot}'),
                                     b.config.expand('%{_prefix}'))

            if deps is None and \
                    (not self.opts.no_clean() or self.opts.always_clean()):
                for b in builds:
                    if not b.disabled():
                        log.notice('cleaning: %s' % (b.name()))
                        b.cleanup()
            for b in builds:
                del b
Beispiel #48
0
        "buildVersion": int(current_time),
        "name": file_name,
        "md5": md5
    }
    with open(json_path, 'w', encoding='utf-8') as f:
        f.write(json.dumps(dict, indent=4))


def upload_file(file_path, url):
    with open(file_path, 'rb') as f:
        response = requests.put(url=url, data=f)
        response.raise_for_status()
        print(response.status_code, response.text)


build.build(build.build_list)
os.chdir(sys.path[0])

sign = "D9CD3CA0Axxxxxxxxxxxxxxxxxxxxxxx069E"  # sign code

build.execute(
    "cp -af ./../../bin-64/release/MyApplication.app/ ./../packet/MyApplication.app/"
)
build.execute("rm -rf ./../releases/MyApplication.app")
build.execute("{0} ./../packet/MyApplication.app -always-overwrite".format(
    r'/Users/fxy/Qt/6.3.0/clang_64/bin/macdeployqt'))
build.execute(
    "cp -af ./../packet/MyApplication.app ./../releases/MyApplication.app")

build.execute(
    "codesign --options=runtime --timestamp --force --verify --verbose -f -s {0} {1} --deep"
Beispiel #49
0
import sys

from build import build

try:
    xpi = sys.argv[1]
except:
    from xml.dom.minidom import parse
    dom = parse("install.rdf")
    version = dom.getElementsByTagName("em:version")[0].firstChild.nodeValue
    dom.unlink()
    xpi = "mintrayr-{}.xpi".format(version)
sys.exit(build(".", xpi))
Beispiel #50
0
def setup(ext_modules=freeze([])):
    config = get_config()
    package_root = config.get_value('package_root')
    # Build
    if any(x in argv for x in ('build', 'install', 'sdist')):
        version = build(config)
    else:
        version = get_package_version(package_root)

    # Initialize variables
    package_name = config.get_value('package_name')
    if package_name is None:
        package_name = config.get_value('name')
    packages = [package_name]
    package_data = {package_name: []}

    # The sub-packages
    if config.has_value('packages'):
        subpackages = config.get_value('packages')
        for subpackage_name in subpackages:
            packages.append('%s.%s' % (package_name, subpackage_name))
    else:
        subpackages = []

    # Python files are included by default
    filenames = [x.strip() for x in open('MANIFEST').readlines()]
    filenames = [x for x in filenames if not x.endswith('.py')]

    # The data files
    prefix = '' if package_root == '.' else package_root + '/'
    prefix_n = len(prefix)
    for line in filenames:
        if not line.startswith(prefix):
            continue
        line = line[prefix_n:]

        path = line.split('/')
        if path[0] in subpackages:
            subpackage = '%s.%s' % (package_name, path[0])
            files = package_data.setdefault(subpackage, [])
            files.append(join_path(*path[1:]))
        elif path[0] not in ('archive', 'docs', 'scripts', 'test'):
            package_data[package_name].append(line)

    # The scripts
    if config.has_value('scripts'):
        scripts = config.get_value('scripts')
        scripts = [join_path(*['scripts', x]) for x in scripts]
    else:
        scripts = []

    # Long description
    if exists('README.rst'):
        with codecs.open('README.rst', 'r', 'utf-8') as readme:
            long_description = readme.read()
    else:
        long_description = config.get_value('description')

    author_name = config.get_value('author_name')
    # Requires
    install_requires = []
    if exists('requirements.txt'):
        install_requires = parse_requirements('requirements.txt',
                                              session=PipSession())
        install_requires = [str(ir.req) for ir in install_requires]
    # XXX Workaround buggy distutils ("sdist" don't likes unicode strings,
    # and "register" don't likes normal strings).
    if 'register' in argv:
        author_name = unicode(author_name, 'utf-8')
    classifiers = [x for x in config.get_value('classifiers') if x]
    core.setup(
        name=package_name,
        version=version,
        # Metadata
        author=author_name,
        author_email=config.get_value('author_email'),
        license=config.get_value('license'),
        url=config.get_value('url'),
        description=config.get_value('title'),
        long_description=long_description,
        classifiers=classifiers,
        # Packages
        package_dir={package_name: package_root},
        packages=packages,
        package_data=package_data,
        # Requires / Provides
        requires=config.get_value('requires'),
        provides=config.get_value('provides'),
        install_requires=install_requires,
        # Scripts
        scripts=scripts,
        cmdclass={'build_ext': OptionalBuildExt},
        # C extensions
        ext_modules=ext_modules)
Beispiel #51
0
from nn_utils.model import Model
from build import build

import tensorflow as tf
from tensorflow.models.rnn import rnn, rnn_cell
import numpy as np
'''
LSTM RNN For Generating the Next Vector Element in a Sequence 
'''
# Build Data, Params
data, params = build()

# Parameters
gen_seq_len = params['gen_steps']

print('Building Model...')
model = Model(params)

# Initializing the variables
init = tf.initialize_all_variables()

# Saver
saver = tf.train.Saver()


def istate():
    return np.zeros((params['batch_size'], 2 * params['n_hidden']))


# Launch the graph
with tf.Session() as sess:
						"--deqp-target",
						dest="targetName",
						default=DEFAULT_TARGET,
						help="dEQP build target")
	parser.add_argument("-d",
						"--dst-path",
						dest="dstPath",
						default=DEFAULT_DST_DIR,
						help="Destination path")
	parser.add_argument("-u",
						"--target-vulkan-version",
						dest="vulkanVersion",
						default="1.2",
						choices=["1.0", "1.1", "1.2"],
						help="Target Vulkan version")
	return parser.parse_args()

if __name__ == "__main__":
	args = parseArgs()

	generator	= ANY_GENERATOR
	buildCfg	= getBuildConfig(args.buildDir, args.targetName, args.buildType)
	module		= VULKAN_MODULE

	build(buildCfg, generator, ["vk-build-programs"])

	if not os.path.exists(args.dstPath):
		os.makedirs(args.dstPath)

	execBuildPrograms(buildCfg, generator, module, args.dstPath, args.vulkanVersion)
Beispiel #53
0
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--lang', default='en,ru,zh,fa')
    arg_parser.add_argument('--docs-dir', default='.')
    arg_parser.add_argument('--theme-dir', default='mkdocs-material-theme')
    arg_parser.add_argument('--website-dir',
                            default=os.path.join('..', 'website'))
    arg_parser.add_argument('--output-dir', default='build')
    arg_parser.add_argument('--enable-stable-releases', action='store_true')
    arg_parser.add_argument('--version-prefix', type=str, default='')
    arg_parser.add_argument('--skip-single-page', action='store_true')
    arg_parser.add_argument('--skip-pdf', action='store_true')
    arg_parser.add_argument('--skip-website', action='store_true')
    arg_parser.add_argument('--save-raw-single-page', type=str)
    arg_parser.add_argument('--verbose', action='store_true')

    args = arg_parser.parse_args()
    args.docs_output_dir = os.path.join(os.path.abspath(args.output_dir),
                                        'docs')

    from github import choose_latest_releases
    args.stable_releases = choose_latest_releases(
    ) if args.enable_stable_releases else []

    logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO,
                        stream=sys.stderr)

    logging.getLogger('MARKDOWN').setLevel(logging.INFO)

    from build import build
    build(args)
Beispiel #54
0
sys.path.append(os.path.split(os.getcwd())[0])
from args import args
from config import config
from build import build

# 設定変更
config.n_z = 12
config.n_gen_hidden_units = [2000, 1000, 500, 250]
config.n_dis_hidden_units = [600, 600, 600]
config.n_dec_hidden_units = [250, 500, 1000, 2000]
config.gen_encoder_type = "deterministic"
config.gen_enable_dropout = False
config.dis_enable_dropout = False
config.dec_enable_dropout = False

gen, dis, dec = build(config)

if args.load_epoch > 0:
    gen_filename = "%s/gen_epoch_%s.model" % (args.model_dir, args.load_epoch)
    if os.path.isfile(gen_filename):
        serializers.load_hdf5(gen_filename, gen)
        print gen_filename
    else:
        raise Exception("Failed to load generator.")

    dis_filename = "%s/dis_epoch_%s.model" % (args.model_dir, args.load_epoch)
    if os.path.isfile(dis_filename):
        serializers.load_hdf5(dis_filename, dis)
        print dis_filename
    else:
        raise Exception("Failed to load discriminator.")
Beispiel #55
0
def autobuild(region, reason, **kwargs):
    # TODO: fail gracefully if these aren't set
    gh_user = os.environ['GITHUB_USER']
    gh_token = os.environ['GITHUB_TOKEN']

    verbose = kwargs.get('verbose', False)
    rebuild = kwargs.get('rebuild', False)
    clean = kwargs.get('clean', False)

    dont_remove = [
        '.git', '.gitignore', '.travis.yml', 'CNAME', 'README.md',
        'requirements.txt'
    ]
    output_dir = 'output/codeclub%s' % region
    # case sensitivity issues
    pp_region = {'uk': 'UK', 'world': 'World'}[region]
    gh_repo = 'CodeClub%s-Projects' % pp_region

    r = Github(gh_user, gh_token).get_repo('CodeClub/%s' % gh_repo)

    pdf_generator = 'phantomjs'

    # clone the curricula repos (the lazy way)
    subprocess.call('make clone'.split())

    # clone the output repo
    subprocess.call(('git clone https://github.com/CodeClub/%s.git %s' %
                     (gh_repo, output_dir)).split())

    if clean:
        # delete everything in the output dir
        rm_files(output_dir, dont_remove)

    # init gitpython!
    repo = Repo(output_dir)

    # run the build
    build.build(pdf_generator,
                ['lessons/scratch', 'lessons/webdev', 'lessons/python'],
                region, output_dir, verbose, repo, rebuild)

    # add username and token to remote url
    # (so we can write)
    origin_url = repo.remotes.origin.url
    origin_url = 'https://%s:%[email protected]/%s/%s' % (gh_user, gh_token,
                                                     gh_user, origin_url[28:])
    repo.git.remote('set-url', '--push', 'origin', origin_url)

    # stage everything...
    repo.git.add('--all')
    # NB. it seems weird, but this reason can disagree
    # with the PR (since we force push)
    reason_text = get_reason_text(reason)

    try:
        # ... commit it...
        repo.git.commit('-m', 'Rebuild', '-m', reason_text)
        # ...and push!
        # TODO: Don't force push here!
        repo.git.push('-f', 'origin', 'gh-pages')
    except GitCommandError:
        sys.exit()

    # submit pull request
    try:
        msg = "Hello!\n\n"
        msg += "I've been hard at work, rebuilding the Code Club %s projects website from the latest markdown.\n\n" % pp_region
        msg += "%s and I found some updates, so I thought I'd best send a pull request. You can view my updated version here:\nhttp://%s.github.io/%s/\n\n" % (
            reason_text, gh_user, gh_repo)
        msg += "Have a nice day!"
        r.create_pull(title='Rebuild',
                      body=msg,
                      head='%s:gh-pages' % gh_user,
                      base='gh-pages')
    except GithubException:
        # TODO: handle this.
        # Usually it just means the PR already exists, which is
        # nothing too much to worry about.
        pass
Beispiel #56
0
import os
import sys

sys.path.append(os.path.join(os.path.dirname(__file__), '.', 'build'))
from build import build

source = os.path.dirname(os.path.realpath(__file__))
build(source, 'Release', ['gui'])
Beispiel #57
0
from sys import path
from time import ctime
from MySQLdb import Connect
# from memory_profiler import profile
# My Modules
print("\n  Benchmark Init: [{}]\n".format(ctime()))
path.insert(0, 'c_modules')
path.insert(0, 'cython_modules')
path.insert(0, 'lib')
from build import build
from descriptorextractor import DescriptorExtractor
from settings import GLOBAL_SETTINGS, SEARCH_SETTINGS, DATABASE_SETTINGS, BINARY_FEATURE_DETECTOR_ALG_LIST, NUMERICAL_FEATURE_DETECTOR_ALG_LIST
from utils import c_hamming_distance, memory_usage_psutil, to_bit_string, chi2_distance

np.set_printoptions(suppress=False)
VOCAB_TREE, N_IMAGES = build()
TOP_N_RESULTS = SEARCH_SETTINGS['top-n-results']
FEATURE_DETECTOR_ALG = GLOBAL_SETTINGS['feature-detector-alg']
LEVELS_TO_USE = GLOBAL_SETTINGS['levels-to-use']
HIST_COMP_METHOD = SEARCH_SETTINGS['hist-comp-method']

# Histrograma da Query
QUERY_HISTOGRAM = {}
# Dicionário de Histogramas de imagens para calcular o Score
MATCHED_IMAGES = {}


# Método para explorar a VocabTree
# @profile(precision=10, stream=open('logs/benchmark_memory_profiler.log','a'))
def explore(node, bit_string_des, ni):
    # Testa se o nodo está abaixo da linha de níveis a serem usados
from build import build
import os

configuration = 'release'

make_dir = os.path.dirname(os.path.realpath(__file__))
exe_root = os.path.join(make_dir, os.pardir, 'bin')
lib_root = os.path.join(make_dir, os.pardir, 'lib')

if __name__ == '__main__':
    for platform in ['linux', 'android-armeabi-v7a']:
        lib_dir = os.path.join(lib_root, platform)
        build(configuration, platform, lib_dir)
Beispiel #59
0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import build

import build_gmp
import build_mpfr

ver = "1.1.0"
url = "https://ftp.gnu.org/gnu/mpc/mpc-" + ver + ".tar.gz"

build.build(url=url,
            project="mpc",
            configargs="--with-gmp=" + build.installpath + " --with-mpfr=" +
            build.installpath)
                        "--modules",
                        dest="modules",
                        help="Comma-separated list of modules to update")
    parser.add_argument("dst",
                        help="Destination directory for test case lists")
    return parser.parse_args()


if __name__ == "__main__":
    args = parseArgs()

    generator = ANY_GENERATOR
    buildCfg = getBuildConfig(args.buildDir, args.targetName, args.buildType)
    modules = None

    if args.modules:
        modules = []
        for m in args.modules.split(","):
            modules.append(getModuleByName(m))

    if modules:
        build(buildCfg, generator, [m.binName for m in modules])
    else:
        build(buildCfg, generator)
        modules = getBuiltModules(buildCfg)

    for module in modules:
        print "Generating test case list for %s" % module.name
        genAndCopyCaseList(buildCfg, generator, module, args.dst,
                           args.caseListType)